Merge the 2020-05-05 SPL branch from AOSP-Partner

* security-aosp-pi-release:
  Revert "HAL3: Fix a use-after-free bug"
  HAL3: Fix a use-after-free bug

Change-Id: I36b5ae11b6b862d780ef59c97488670b8bb0e4c7
diff --git a/Android.mk b/Android.mk
index 1e8e1db..b4fda56 100644
--- a/Android.mk
+++ b/Android.mk
@@ -6,6 +6,9 @@
         ifneq ($(filter msm8998,$(TARGET_BOARD_PLATFORM)),)
           include $(call all-makefiles-under,$(call my-dir)/msm8998)
         endif
+        ifneq ($(filter msm8974,$(TARGET_BOARD_PLATFORM)),)
+          include $(call all-makefiles-under,$(call my-dir)/msm8974)
+        endif
       endif
     endif
   endif
diff --git a/msm8974/Android.mk b/msm8974/Android.mk
new file mode 100644
index 0000000..5053e7d
--- /dev/null
+++ b/msm8974/Android.mk
@@ -0,0 +1 @@
+include $(call all-subdir-makefiles)
diff --git a/msm8974/CleanSpec.mk b/msm8974/CleanSpec.mk
new file mode 100644
index 0000000..e9231a7
--- /dev/null
+++ b/msm8974/CleanSpec.mk
@@ -0,0 +1,47 @@
+# Copyright (C) 2007 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If you don't need to do a full clean build but would like to touch
+# a file or delete some intermediate files, add a clean step to the end
+# of the list.  These steps will only be run once, if they haven't been
+# run before.
+#
+# E.g.:
+#     $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
+#     $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
+#
+# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
+# files that are missing or have been moved.
+#
+# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
+# Use $(OUT_DIR) to refer to the "out" directory.
+#
+# If you need to re-do something that's already mentioned, just copy
+# the command and add it to the bottom of the list.  E.g., if a change
+# that you made last week required touching a file and a change you
+# made today requires touching the same file, just copy the old
+# touch step and add it to the end of the list.
+#
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
+
+# For example:
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
+#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
+#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+
+$(call add-clean-step, find $(OUT_DIR) -name "camera.msm8974*" -print0 | xargs -0 rm -rf)
diff --git a/msm8974/MODULE_LICENSE_BSD b/msm8974/MODULE_LICENSE_BSD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/msm8974/MODULE_LICENSE_BSD
diff --git a/msm8974/QCamera2/Android.mk b/msm8974/QCamera2/Android.mk
new file mode 100644
index 0000000..5053e7d
--- /dev/null
+++ b/msm8974/QCamera2/Android.mk
@@ -0,0 +1 @@
+include $(call all-subdir-makefiles)
diff --git a/msm8974/QCamera2/HAL/Android.mk b/msm8974/QCamera2/HAL/Android.mk
new file mode 100644
index 0000000..d0f615b
--- /dev/null
+++ b/msm8974/QCamera2/HAL/Android.mk
@@ -0,0 +1,65 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+    QCamera2Factory.cpp \
+    QCamera2Hal.cpp \
+    QCamera2HWI.cpp \
+    QCameraMem.cpp \
+    ../util/QCameraQueue.cpp \
+    ../util/QCameraCmdThread.cpp \
+    QCameraStateMachine.cpp \
+    QCameraChannel.cpp \
+    QCameraStream.cpp \
+    QCameraPostProc.cpp \
+    QCamera2HWICallbacks.cpp \
+    QCameraParameters.cpp \
+    QCameraThermalAdapter.cpp \
+    wrapper/QualcommCamera.cpp
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-variable -Wno-unused-parameter 
+
+LOCAL_CFLAGS += -DHAS_MULTIMEDIA_HINTS
+
+LOCAL_CFLAGS += -DDEFAULT_DENOISE_MODE_ON
+
+#use media extension
+ifeq ($(TARGET_USES_MEDIA_EXTENSIONS), true)
+LOCAL_CFLAGS += -DUSE_MEDIA_EXTENSIONS
+endif
+
+# Debug logs are disabled
+LOCAL_CFLAGS += -DDISABLE_DEBUG_LOG
+
+#ifeq ($(TARGET_USES_AOSP),true)
+LOCAL_CFLAGS += -DVANILLA_HAL
+#endif
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/../stack/common \
+    frameworks/native/include/media/openmax \
+    $(call project-path-for,qcom-display)/libgralloc \
+    $(call project-path-for,qcom-media)/libstagefrighthw \
+    $(LOCAL_PATH)/../../mm-image-codec/qexif \
+    $(LOCAL_PATH)/../../mm-image-codec/qomx_core \
+    $(LOCAL_PATH)/../util \
+    $(LOCAL_PATH)/wrapper \
+    system/media/camera/include
+
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_SHARED_LIBRARIES := libcamera_client liblog libhardware libutils libcutils libdl libgui libsensor
+LOCAL_SHARED_LIBRARIES += libmmcamera_interface libmmjpeg_interface
+LOCAL_SHARED_LIBRARIES += android.hidl.token@1.0-utils android.hardware.graphics.bufferqueue@1.0
+LOCAL_STATIC_LIBRARIES := libarect
+LOCAL_HEADER_LIBRARIES := libnativebase_headers
+
+LOCAL_MODULE_RELATIVE_PATH := hw
+LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS := optional
+LOCAL_VENDOR_MODULE := true
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/msm8974/QCamera2/HAL/QCamera2Factory.cpp b/msm8974/QCamera2/HAL/QCamera2Factory.cpp
new file mode 100644
index 0000000..07f6da2
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCamera2Factory.cpp
@@ -0,0 +1,229 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCamera2Factory"
+
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include <hardware/camera.h>
+#include <sensor/SensorManager.h>
+
+#include "QCamera2Factory.h"
+
+namespace qcamera {
+
+QCamera2Factory gQCamera2Factory;
+
+/*===========================================================================
+ * FUNCTION   : QCamera2Factory
+ *
+ * DESCRIPTION: default constructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera2Factory::QCamera2Factory()
+{
+    mNumOfCameras = get_num_of_cameras();
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera2Factory
+ *
+ * DESCRIPTION: deconstructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera2Factory::~QCamera2Factory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : get_number_of_cameras
+ *
+ * DESCRIPTION: static function to query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera2Factory::get_number_of_cameras()
+{
+    return gQCamera2Factory.getNumberOfCameras();
+}
+
+/*===========================================================================
+ * FUNCTION   : get_camera_info
+ *
+ * DESCRIPTION: static function to query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::get_camera_info(int camera_id, struct camera_info *info)
+{
+    return gQCamera2Factory.getCameraInfo(camera_id, info);
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera2Factory::getNumberOfCameras()
+{
+    return mNumOfCameras;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCameraInfo
+ *
+ * DESCRIPTION: query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::getCameraInfo(int camera_id, struct camera_info *info)
+{
+    int rc;
+    ALOGV("%s: E, camera_id = %d", __func__, camera_id);
+
+    if (!mNumOfCameras || camera_id >= mNumOfCameras || !info) {
+        return INVALID_OPERATION;
+    }
+
+    rc = QCamera2HardwareInterface::getCapabilities((uint32_t)camera_id, info);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+bool QCamera2Factory::can_talk_to_sensormanager()
+{
+    android::SensorManager& sensorManager(
+            android::SensorManager::getInstanceForPackage(android::String16("camera")));
+    android::Sensor const * const * sensorList;
+    return sensorManager.getSensorList(&sensorList) >= 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cameraDeviceOpen
+ *
+ * DESCRIPTION: open a camera device with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::cameraDeviceOpen(int camera_id,
+                    struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+
+    android::Mutex::Autolock lock(gCameraWrapperLock);
+
+    if (camera_id < 0 || camera_id >= mNumOfCameras)
+        return BAD_VALUE;
+
+    // mm-qcamera-daemon blocks until initialization of sensorservice
+    // and might miss V4L events generated by the HAL during that time,
+    // causing HAL initialization failures. Avoid those failures by waiting
+    // for sensorservice initialization before opening the HAL.
+    if (!can_talk_to_sensormanager()) {
+        ALOGE("Waiting for sensor service failed.");
+        return android::NO_INIT;
+    }
+    
+    QCamera2HardwareInterface *hw = new QCamera2HardwareInterface((uint32_t)camera_id);
+    if (!hw) {
+        ALOGE("Allocation of hardware interface failed");
+        return NO_MEMORY;
+    }
+    rc = hw->openCamera(hw_device);
+    if (rc != NO_ERROR) {
+        delete hw;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : camera_device_open
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::camera_device_open(
+    const struct hw_module_t *module, const char *id,
+    struct hw_device_t **hw_device)
+{
+    if (module != &HAL_MODULE_INFO_SYM.common) {
+        ALOGE("Invalid module. Trying to open %p, expect %p",
+            module, &HAL_MODULE_INFO_SYM.common);
+        return INVALID_OPERATION;
+    }
+    if (!id) {
+        ALOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+    return gQCamera2Factory.cameraDeviceOpen(atoi(id), hw_device);
+}
+
+struct hw_module_methods_t QCamera2Factory::mModuleMethods = {
+    .open = QCamera2Factory::camera_device_open,
+};
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCamera2Factory.h b/msm8974/QCamera2/HAL/QCamera2Factory.h
new file mode 100644
index 0000000..ce5d905
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCamera2Factory.h
@@ -0,0 +1,70 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2FACTORY_H__
+#define __QCAMERA2FACTORY_H__
+
+#include <hardware/camera.h>
+#include <system/camera.h>
+#include <media/msmb_camera.h>
+
+#include "QCamera2HWI.h"
+
+namespace qcamera {
+
+class QCamera2Factory
+{
+public:
+    QCamera2Factory();
+    virtual ~QCamera2Factory();
+
+    static int get_number_of_cameras();
+    static int get_camera_info(int camera_id, struct camera_info *info);
+
+private:
+    int getNumberOfCameras();
+    int getCameraInfo(int camera_id, struct camera_info *info);
+    int cameraDeviceOpen(int camera_id, struct hw_device_t **hw_device);
+    static int camera_device_open(const struct hw_module_t *module, const char *id,
+                struct hw_device_t **hw_device);
+    bool can_talk_to_sensormanager();
+
+public:
+    static struct hw_module_methods_t mModuleMethods;
+
+private:
+    int mNumOfCameras;
+    android::Mutex gCameraWrapperLock;
+};
+
+}; /*namespace qcamera*/
+
+extern camera_module_t HAL_MODULE_INFO_SYM;
+
+#endif /* ANDROID_HARDWARE_QUALCOMM_CAMERA_H */
diff --git a/msm8974/QCamera2/HAL/QCamera2HWI.cpp b/msm8974/QCamera2/HAL/QCamera2HWI.cpp
new file mode 100644
index 0000000..384817f
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCamera2HWI.cpp
@@ -0,0 +1,7121 @@
+/* Copyright (c) 2012-2016, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+#include <utils/Log.h>
+#include <cutils/properties.h>
+#include <hardware/camera.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include <utils/Trace.h>
+#include <gralloc_priv.h>
+#include <gui/Surface.h>
+
+#include "QCamera2HWI.h"
+#include "QCameraMem.h"
+
+#define MAP_TO_DRIVER_COORDINATE(val, base, scale, offset) \
+  ((int32_t)val * (int32_t)scale / (int32_t)base + (int32_t)offset)
+#define CAMERA_MIN_STREAMING_BUFFERS     3
+#define EXTRA_ZSL_PREVIEW_STREAM_BUF     2
+#define CAMERA_MIN_JPEG_ENCODING_BUFFERS 2
+#define CAMERA_MIN_VIDEO_BUFFERS         9
+#define CAMERA_LONGSHOT_STAGES           4
+
+#define HDR_CONFIDENCE_THRESHOLD 0.4
+
+namespace qcamera {
+
+cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
+qcamera_saved_sizes_list savedSizes[MM_CAMERA_MAX_NUM_SENSORS];
+
+static pthread_mutex_t g_camlock = PTHREAD_MUTEX_INITIALIZER;
+volatile uint32_t gCamHalLogLevel = 0;
+
+camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
+    .set_preview_window =         QCamera2HardwareInterface::set_preview_window,
+    .set_callbacks =              QCamera2HardwareInterface::set_CallBacks,
+    .enable_msg_type =            QCamera2HardwareInterface::enable_msg_type,
+    .disable_msg_type =           QCamera2HardwareInterface::disable_msg_type,
+    .msg_type_enabled =           QCamera2HardwareInterface::msg_type_enabled,
+
+    .start_preview =              QCamera2HardwareInterface::start_preview,
+    .stop_preview =               QCamera2HardwareInterface::stop_preview,
+    .preview_enabled =            QCamera2HardwareInterface::preview_enabled,
+    .store_meta_data_in_buffers = QCamera2HardwareInterface::store_meta_data_in_buffers,
+
+    .start_recording =            QCamera2HardwareInterface::start_recording,
+    .stop_recording =             QCamera2HardwareInterface::stop_recording,
+    .recording_enabled =          QCamera2HardwareInterface::recording_enabled,
+    .release_recording_frame =    QCamera2HardwareInterface::release_recording_frame,
+
+    .auto_focus =                 QCamera2HardwareInterface::auto_focus,
+    .cancel_auto_focus =          QCamera2HardwareInterface::cancel_auto_focus,
+
+    .take_picture =               QCamera2HardwareInterface::take_picture,
+    .cancel_picture =             QCamera2HardwareInterface::cancel_picture,
+
+    .set_parameters =             QCamera2HardwareInterface::set_parameters,
+    .get_parameters =             QCamera2HardwareInterface::get_parameters,
+    .put_parameters =             QCamera2HardwareInterface::put_parameters,
+    .send_command =               QCamera2HardwareInterface::send_command,
+
+    .release =                    QCamera2HardwareInterface::release,
+    .dump =                       QCamera2HardwareInterface::dump,
+};
+
+/*===========================================================================
+ * FUNCTION   : set_preview_window
+ *
+ * DESCRIPTION: set preview window.
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @window  : window ops table
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
+        struct preview_stream_ops *window)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return BAD_VALUE;
+    }
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    rc = hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, &apiResult);
+        rc = apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_CallBacks
+ *
+ * DESCRIPTION: set callbacks for notify and data
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @notify_cb  : notify cb
+ *   @data_cb    : data cb
+ *   @data_cb_timestamp  : video data cd with timestamp
+ *   @get_memory : ops table for request gralloc memory
+ *   @user       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::set_CallBacks(struct camera_device *device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+
+    qcamera_sm_evt_setcb_payload_t payload;
+    payload.notify_cb = notify_cb;
+    payload.data_cb = data_cb;
+    payload.data_cb_timestamp = data_cb_timestamp;
+    payload.get_memory = get_memory;
+    payload.user = user;
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_SET_CALLBACKS, (void *)&payload);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_CALLBACKS, &apiResult);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : enable_msg_type
+ *
+ * DESCRIPTION: enable certain msg type
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::enable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, (void *)&msg_type);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, &apiResult);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : disable_msg_type
+ *
+ * DESCRIPTION: disable certain msg type
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::disable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, (void *)&msg_type);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, &apiResult);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : msg_type_enabled
+ *
+ * DESCRIPTION: if certain msg type is enabled
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : 1 -- enabled
+ *              0 -- not enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msg_type_enabled(struct camera_device *device, int32_t msg_type)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, (void *)&msg_type);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, &apiResult);
+        ret = apiResult.enabled;
+    }
+    hw->unlockAPI();
+
+   return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_preview
+ *
+ * DESCRIPTION: start preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_preview(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_START_PREVIEW", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
+    if (hw->isNoDisplayMode()) {
+        evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
+    }
+    ret = hw->processAPI(evt, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(evt, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    hw->m_bPreviewStarted = true;
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_preview
+ *
+ * DESCRIPTION: stop preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_preview(struct camera_device *device)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_STOP_PREVIEW", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_PREVIEW, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_PREVIEW, &apiResult);
+    }
+    hw->unlockAPI();
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_enabled
+ *
+ * DESCRIPTION: if preview is running
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : 1 -- running
+ *              0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::preview_enabled(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_PREVIEW_ENABLED, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PREVIEW_ENABLED, &apiResult);
+        ret = apiResult.enabled;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : store_meta_data_in_buffers
+ *
+ * DESCRIPTION: if need to store meta data in buffers for video frame
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @enable  : flag if enable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::store_meta_data_in_buffers(
+                struct camera_device *device, int enable)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, (void *)&enable);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_recording
+ *
+ * DESCRIPTION: start recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_recording(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_START_RECORDING", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_START_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_START_RECORDING, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    hw->m_bRecordStarted = true;
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_recording
+ *
+ * DESCRIPTION: stop recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_recording(struct camera_device *device)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_STOP_RECORDING", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_RECORDING, &apiResult);
+    }
+    hw->unlockAPI();
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : recording_enabled
+ *
+ * DESCRIPTION: if recording is running
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : 1 -- running
+ *              0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::recording_enabled(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_RECORDING_ENABLED, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RECORDING_ENABLED, &apiResult);
+        ret = apiResult.enabled;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : release_recording_frame
+ *
+ * DESCRIPTION: return recording frame back
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @opaque  : ptr to frame to be returned
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release_recording_frame(
+            struct camera_device *device, const void *opaque)
+{
+    ATRACE_CALL();
+    int32_t ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    if (opaque == NULL) {
+        ALOGE("%s: Error!! Frame info is NULL", __func__);
+        return;
+    }
+    CDBG_HIGH("%s: E", __func__);
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, &apiResult);
+    }
+    hw->unlockAPI();
+    CDBG_HIGH("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : auto_focus
+ *
+ * DESCRIPTION: start auto focus
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::auto_focus(struct camera_device *device)
+{
+    ATRACE_INT("Camera:AutoFocus", 1);
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    CDBG_HIGH("[KPI Perf] %s : E PROFILE_AUTO_FOCUS", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_START_AUTO_FOCUS, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_START_AUTO_FOCUS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    CDBG_HIGH("[KPI Perf] %s : X", __func__);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_auto_focus(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    CDBG_HIGH("[KPI Perf] %s : E PROFILE_CANCEL_AUTO_FOCUS", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    CDBG_HIGH("[KPI Perf] %s : X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : take_picture
+ *
+ * DESCRIPTION: take picture
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::take_picture(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_TAKE_PICTURE", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    /* Prepare snapshot in case LED needs to be flashed */
+    if (hw->mFlashNeeded == true || hw->mParameters.isChromaFlashEnabled()) {
+        ret = hw->processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
+        if (ret == NO_ERROR) {
+            hw->waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
+            ret = apiResult.status;
+        }
+        hw->mPrepSnapRun = true;
+    }
+
+    /* Regardless what the result value for prepare_snapshot,
+     * go ahead with capture anyway. Just like the way autofocus
+     * is handled in capture case. */
+
+    /* capture */
+    ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+        ret = apiResult.status;
+    }
+
+    hw->unlockAPI();
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_picture
+ *
+ * DESCRIPTION: cancel current take picture request
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_picture(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_CANCEL_PICTURE", __func__);
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_CANCEL_PICTURE, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_parameters
+ *
+ * DESCRIPTION: set camera parameters
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @parms   : string of packed parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_parameters(struct camera_device *device,
+                                              const char *parms)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS, (void *)parms);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_parameters
+ *
+ * DESCRIPTION: query camera parameters
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : packed parameters in a string
+ *==========================================================================*/
+char* QCamera2HardwareInterface::get_parameters(struct camera_device *device)
+{
+    ATRACE_CALL();
+    char *ret = NULL;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return NULL;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_GET_PARAMS, NULL);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_GET_PARAMS, &apiResult);
+        ret = apiResult.params;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : put_parameters
+ *
+ * DESCRIPTION: return camera parameters string back to HAL
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @parm    : ptr to parameter string to be returned
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::put_parameters(struct camera_device *device,
+                                               char *parm)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_PUT_PARAMS, (void *)parm);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PUT_PARAMS, &apiResult);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : send_command
+ *
+ * DESCRIPTION: command to be executed
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @cmd     : cmd to be executed
+ *   @arg1    : ptr to optional argument1
+ *   @arg2    : ptr to optional argument2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::send_command(struct camera_device *device,
+                                            int32_t cmd,
+                                            int32_t arg1,
+                                            int32_t arg2)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    qcamera_sm_evt_command_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
+    payload.cmd = cmd;
+    payload.arg1 = arg1;
+    payload.arg2 = arg2;
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: release camera resource
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release(struct camera_device *device)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE, &apiResult);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: dump camera status
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @fd      : fd for status to be dumped to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(struct camera_device *device, int fd)
+{
+    int ret = NO_ERROR;
+
+    //Log level property is read when "adb shell dumpsys media.camera" is
+    //called so that the log level can be controlled without restarting
+    //media server
+    getLogLevel();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_DUMP, (void *)&fd);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_DUMP, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : close_camera_device
+ *
+ * DESCRIPTION: close camera device
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::close_camera_device(hw_device_t *hw_dev)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    CDBG_HIGH("[KPI Perf] %s: E",__func__);
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(
+            reinterpret_cast<camera_device_t *>(hw_dev)->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return BAD_VALUE;
+    }
+    delete hw;
+    CDBG_HIGH("[KPI Perf] %s: X",__func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : register_face_image
+ *
+ * DESCRIPTION: register a face image into imaging lib for face authenticatio/
+ *              face recognition
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @img_ptr : ptr to image buffer
+ *   @config  : ptr to config about input image, i.e., format, dimension, and etc.
+ *
+ * RETURN     : >=0 unique ID of face registerd.
+ *              <0  failure.
+ *==========================================================================*/
+int QCamera2HardwareInterface::register_face_image(struct camera_device *device,
+                                                   void *img_ptr,
+                                                   cam_pp_offline_src_config_t *config)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    qcamera_sm_evt_reg_face_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_reg_face_payload_t));
+    payload.img_ptr = img_ptr;
+    payload.config = config;
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_REG_FACE_IMAGE, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_REG_FACE_IMAGE, &apiResult);
+        ret = apiResult.handle;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera2HardwareInterface
+ *
+ * DESCRIPTION: constructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera ID
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera2HardwareInterface::QCamera2HardwareInterface(uint32_t cameraId)
+    : mCameraId(cameraId),
+      mCameraHandle(NULL),
+      mCameraOpened(false),
+      mPreviewWindow(NULL),
+      mMsgEnabled(0),
+      mStoreMetaDataInFrame(0),
+      m_stateMachine(this),
+      m_postprocessor(this),
+      m_thermalAdapter(QCameraThermalAdapter::getInstance()),
+      m_cbNotifier(this),
+      m_bShutterSoundPlayed(false),
+      m_bPreviewStarted(false),
+      m_bRecordStarted(false),
+      m_currentFocusState(CAM_AF_SCANNING),
+      m_pPowerModule(NULL),
+      mDumpFrmCnt(0U),
+      mDumpSkipCnt(0U),
+      mThermalLevel(QCAMERA_THERMAL_NO_ADJUSTMENT),
+      mCancelAutoFocus(false),
+      mActiveAF(false),
+      m_HDRSceneEnabled(false),
+      mLongshotEnabled(false),
+      m_max_pic_width(0),
+      m_max_pic_height(0),
+      mLiveSnapshotThread(0),
+      mIntPicThread(0),
+      mFlashNeeded(false),
+      mCaptureRotation(0U),
+      mIs3ALocked(false),
+      mPrepSnapRun(false),
+      mZoomLevel(0),
+      m_bIntEvtPending(false),
+      mSnapshotJob(-1),
+      mPostviewJob(-1),
+      mMetadataJob(-1),
+      mReprocJob(-1),
+      mRawdataJob(-1),
+      mPreviewFrameSkipValid(0),
+      mAdvancedCaptureConfigured(false),
+      mNumPreviewFaces(-1)
+{
+    getLogLevel();
+    ATRACE_CALL();
+    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
+    mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
+    mCameraDevice.common.close = close_camera_device;
+    mCameraDevice.ops = &mCameraOps;
+    mCameraDevice.priv = this;
+
+    pthread_mutex_init(&m_lock, NULL);
+    pthread_cond_init(&m_cond, NULL);
+
+    m_apiResultList = NULL;
+
+    pthread_mutex_init(&m_evtLock, NULL);
+    pthread_cond_init(&m_evtCond, NULL);
+    memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+
+    pthread_mutex_init(&m_parm_lock, NULL);
+
+    pthread_mutex_init(&m_int_lock, NULL);
+    pthread_cond_init(&m_int_cond, NULL);
+
+    memset(m_channels, 0, sizeof(m_channels));
+    memset(&mExifParams, 0, sizeof(mm_jpeg_exif_params_t));
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
+        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
+    }
+#endif
+
+    memset(mDeffOngoingJobs, 0, sizeof(mDeffOngoingJobs));
+
+    //reset preview frame skip
+    memset(&mPreviewFrameSkipIdxRange, 0, sizeof(cam_frame_idx_range_t));
+
+    mDefferedWorkThread.launch(defferedWorkRoutine, this);
+    mDefferedWorkThread.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera2HardwareInterface
+ *
+ * DESCRIPTION: destructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera2HardwareInterface::~QCamera2HardwareInterface()
+{
+    CDBG_HIGH("%s: E", __func__);
+    mDefferedWorkThread.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+    mDefferedWorkThread.exit();
+
+    closeCamera();
+    m_stateMachine.releaseThread();
+    pthread_mutex_destroy(&m_lock);
+    pthread_cond_destroy(&m_cond);
+    pthread_mutex_destroy(&m_evtLock);
+    pthread_cond_destroy(&m_evtCond);
+    pthread_mutex_destroy(&m_parm_lock);
+    pthread_mutex_destroy(&m_int_lock);
+    pthread_cond_destroy(&m_int_cond);
+    CDBG_HIGH("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS :
+ *   @hw_device  : double ptr for camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    if (mCameraOpened) {
+        *hw_device = NULL;
+        return PERMISSION_DENIED;
+    }
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_OPEN_CAMERA camera id %d", __func__,mCameraId);
+    rc = openCamera();
+    if (rc == NO_ERROR){
+        *hw_device = &mCameraDevice.common;
+        if (m_thermalAdapter.init(this) != 0) {
+          ALOGE("Init thermal adapter failed");
+        }
+    }
+    else
+        *hw_device = NULL;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera()
+{
+    int32_t l_curr_width = 0;
+    int32_t l_curr_height = 0;
+    m_max_pic_width = 0;
+    m_max_pic_height = 0;
+    char value[PROPERTY_VALUE_MAX];
+    int enable_4k2k;
+    size_t i;
+
+    if (mCameraHandle) {
+        ALOGE("Failure: Camera already opened");
+        return ALREADY_EXISTS;
+    }
+    mCameraHandle = camera_open((uint8_t)mCameraId);
+    if (!mCameraHandle) {
+        ALOGE("camera_open failed.");
+        return UNKNOWN_ERROR;
+    }
+    if (NULL == gCamCapability[mCameraId])
+        initCapabilities(mCameraId,mCameraHandle);
+
+    mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+                                              camEvtHandle,
+                                              (void *) this);
+
+    /* get max pic size for jpeg work buf calculation*/
+    for(i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt - 1; i++)
+    {
+      l_curr_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
+      l_curr_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
+
+      if ((l_curr_width * l_curr_height) >
+        (m_max_pic_width * m_max_pic_height)) {
+        m_max_pic_width = l_curr_width;
+        m_max_pic_height = l_curr_height;
+      }
+    }
+    //reset the preview and video sizes tables in case they were changed earlier
+    copyList(savedSizes[mCameraId].all_preview_sizes, gCamCapability[mCameraId]->preview_sizes_tbl,
+             savedSizes[mCameraId].all_preview_sizes_cnt);
+    gCamCapability[mCameraId]->preview_sizes_tbl_cnt = savedSizes[mCameraId].all_preview_sizes_cnt;
+    copyList(savedSizes[mCameraId].all_video_sizes, gCamCapability[mCameraId]->video_sizes_tbl,
+             savedSizes[mCameraId].all_video_sizes_cnt);
+    gCamCapability[mCameraId]->video_sizes_tbl_cnt = savedSizes[mCameraId].all_video_sizes_cnt;
+
+    //check if video size 4k x 2k support is enabled
+    property_get("persist.camera.4k2k.enable", value, "0");
+    enable_4k2k = atoi(value) > 0 ? 1 : 0;
+    ALOGD("%s: enable_4k2k is %d", __func__, enable_4k2k);
+    if (!enable_4k2k) {
+       //if the 4kx2k size exists in the supported preview size or
+       //supported video size remove it
+       bool found;
+       cam_dimension_t true_size_4k_2k;
+       cam_dimension_t size_4k_2k;
+       true_size_4k_2k.width = 4096;
+       true_size_4k_2k.height = 2160;
+       size_4k_2k.width = 3840;
+       size_4k_2k.height = 2160;
+
+       found = removeSizeFromList(gCamCapability[mCameraId]->preview_sizes_tbl,
+                                  gCamCapability[mCameraId]->preview_sizes_tbl_cnt,
+                                  true_size_4k_2k);
+       if (found) {
+          gCamCapability[mCameraId]->preview_sizes_tbl_cnt--;
+       }
+
+       found = removeSizeFromList(gCamCapability[mCameraId]->preview_sizes_tbl,
+                                  gCamCapability[mCameraId]->preview_sizes_tbl_cnt,
+                                  size_4k_2k);
+       if (found) {
+          gCamCapability[mCameraId]->preview_sizes_tbl_cnt--;
+       }
+
+
+       found = removeSizeFromList(gCamCapability[mCameraId]->video_sizes_tbl,
+                                  gCamCapability[mCameraId]->video_sizes_tbl_cnt,
+                                  true_size_4k_2k);
+       if (found) {
+          gCamCapability[mCameraId]->video_sizes_tbl_cnt--;
+       }
+
+       found = removeSizeFromList(gCamCapability[mCameraId]->video_sizes_tbl,
+                                  gCamCapability[mCameraId]->video_sizes_tbl_cnt,
+                                  size_4k_2k);
+       if (found) {
+          gCamCapability[mCameraId]->video_sizes_tbl_cnt--;
+       }
+    }
+
+    int32_t rc = m_postprocessor.init(jpegEvtHandle, this);
+    if (rc != 0) {
+        ALOGE("Init Postprocessor failed");
+        mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+        mCameraHandle = NULL;
+        return UNKNOWN_ERROR;
+    }
+
+    // update padding info from jpeg
+    cam_padding_info_t padding_info;
+    m_postprocessor.getJpegPaddingReq(padding_info);
+    if (gCamCapability[mCameraId]->padding_info.width_padding < padding_info.width_padding) {
+        gCamCapability[mCameraId]->padding_info.width_padding = padding_info.width_padding;
+    }
+    if (gCamCapability[mCameraId]->padding_info.height_padding < padding_info.height_padding) {
+        gCamCapability[mCameraId]->padding_info.height_padding = padding_info.height_padding;
+    }
+    if (gCamCapability[mCameraId]->padding_info.plane_padding < padding_info.plane_padding) {
+        gCamCapability[mCameraId]->padding_info.plane_padding = padding_info.plane_padding;
+    }
+
+    mParameters.init(gCamCapability[mCameraId], mCameraHandle, this, this);
+
+    mCameraOpened = true;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: close camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::closeCamera()
+{
+    int rc = NO_ERROR;
+    int i;
+    CDBG_HIGH("%s: E", __func__);
+    if (!mCameraOpened) {
+        return NO_ERROR;
+    }
+
+    pthread_mutex_lock(&m_parm_lock);
+
+    // set open flag to false
+    mCameraOpened = false;
+
+    // deinit Parameters
+    mParameters.deinit();
+
+    pthread_mutex_unlock(&m_parm_lock);
+
+    // exit notifier
+    m_cbNotifier.exit();
+
+    // stop and deinit postprocessor
+    m_postprocessor.stop();
+    m_postprocessor.deinit();
+
+    //free all pending api results here
+    if(m_apiResultList != NULL) {
+        api_result_list *apiResultList = m_apiResultList;
+        api_result_list *apiResultListNext;
+        while (apiResultList != NULL) {
+            apiResultListNext = apiResultList->next;
+            free(apiResultList);
+            apiResultList = apiResultListNext;
+        }
+    }
+
+    m_thermalAdapter.deinit();
+
+    // delete all channels if not already deleted
+    for (i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            m_channels[i]->stop();
+            delete m_channels[i];
+            m_channels[i] = NULL;
+        }
+    }
+
+    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+    mCameraHandle = NULL;
+    CDBG_HIGH("%s: X", __func__);
+    return rc;
+}
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+/*===========================================================================
+ * FUNCTION   : initCapabilities
+ *
+ * DESCRIPTION: initialize camera capabilities in static data struct
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::initCapabilities(uint32_t cameraId,
+        mm_camera_vtbl_t *cameraHandle)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    QCameraHeapMemory *capabilityHeap = NULL;
+
+    /* Allocate memory for capability buffer */
+    capabilityHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t));
+    if(rc != OK) {
+        ALOGE("%s: No memory for cappability", __func__);
+        goto allocate_failed;
+    }
+
+    /* Map memory for capability buffer */
+    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
+    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
+                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
+                                capabilityHeap->getFd(0),
+                                sizeof(cam_capability_t));
+    if(rc < 0) {
+        ALOGE("%s: failed to map capability buffer", __func__);
+        goto map_failed;
+    }
+
+    /* Query Capability */
+    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
+    if(rc < 0) {
+        ALOGE("%s: failed to query capability",__func__);
+        goto query_failed;
+    }
+    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
+    if (!gCamCapability[cameraId]) {
+        ALOGE("%s: out of memory", __func__);
+        goto query_failed;
+    }
+    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
+                                        sizeof(cam_capability_t));
+
+    //copy the preview sizes and video sizes lists because they
+    //might be changed later
+    copyList(gCamCapability[cameraId]->preview_sizes_tbl, savedSizes[cameraId].all_preview_sizes,
+             gCamCapability[cameraId]->preview_sizes_tbl_cnt);
+    savedSizes[cameraId].all_preview_sizes_cnt = gCamCapability[cameraId]->preview_sizes_tbl_cnt;
+    copyList(gCamCapability[cameraId]->video_sizes_tbl, savedSizes[cameraId].all_video_sizes,
+             gCamCapability[cameraId]->video_sizes_tbl_cnt);
+    savedSizes[cameraId].all_video_sizes_cnt = gCamCapability[cameraId]->video_sizes_tbl_cnt;
+
+    rc = NO_ERROR;
+
+query_failed:
+    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
+                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
+map_failed:
+    capabilityHeap->deallocate();
+    delete capabilityHeap;
+allocate_failed:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCapabilities
+ *
+ * DESCRIPTION: query camera capabilities
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *   @info      : camera info struct to be filled in with camera capabilities
+ *
+ * RETURN     : int type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::getCapabilities(uint32_t cameraId,
+        struct camera_info *info)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    struct  camera_info *p_info;
+    pthread_mutex_lock(&g_camlock);
+    p_info = get_cam_info(cameraId);
+    memcpy(info, p_info, sizeof (struct camera_info));
+    pthread_mutex_unlock(&g_camlock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : prepareTorchCamera
+ *
+ * DESCRIPTION: initializes the camera ( if needed )
+ *              so torch can be configured.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::prepareTorchCamera()
+{
+    int rc = NO_ERROR;
+
+    if ( ( !m_stateMachine.isPreviewRunning() ) &&
+            !m_stateMachine.isPreviewReady() &&
+            ( m_channels[QCAMERA_CH_TYPE_PREVIEW] == NULL ) ) {
+        rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseTorchCamera
+ *
+ * DESCRIPTION: releases all previously acquired camera resources ( if any )
+ *              needed for torch configuration.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::releaseTorchCamera()
+{
+    if ( !m_stateMachine.isPreviewRunning() &&
+            !m_stateMachine.isPreviewReady() &&
+            ( m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL ) ) {
+        delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
+        m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufNumRequired
+ *
+ * DESCRIPTION: return number of stream buffers needed for given stream type
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *
+ * RETURN     : number of buffers needed
+ *==========================================================================*/
+uint8_t QCamera2HardwareInterface::getBufNumRequired(cam_stream_type_t stream_type)
+{
+    int bufferCnt = 0;
+    int minCaptureBuffers = mParameters.getNumOfSnapshots();
+
+    int zslQBuffers = mParameters.getZSLQueueDepth();
+
+    int minCircularBufNum = mParameters.getMaxUnmatchedFramesInQueue() +
+                            CAMERA_MIN_JPEG_ENCODING_BUFFERS;
+
+    int minUndequeCount = 0;
+    int minPPBufs = mParameters.getMinPPBufs();
+    int maxStreamBuf = zslQBuffers + minCircularBufNum +
+        mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+        mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+        mParameters.getNumOfExtraBuffersForImageProc() +
+        EXTRA_ZSL_PREVIEW_STREAM_BUF;
+
+    if (!isNoDisplayMode()) {
+        if(mPreviewWindow != NULL) {
+            if (mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow,&minUndequeCount)
+                != 0) {
+                ALOGE("get_min_undequeued_buffer_count  failed");
+            }
+        } else {
+            //preview window might not be set at this point.
+            minUndequeCount = 2;
+        }
+    }
+
+    // Get buffer count for the particular stream type
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        {
+            if (mParameters.isZSLMode()) {
+                /* We need to add two extra streming buffers to add
+                  flexibility in forming matched super buf in ZSL queue.
+                  with number being 'zslQBuffers + minCircularBufNum'
+                  we see preview buffers sometimes get dropped at CPP
+                  and super buf is not forming in ZSL Q for long time. */
+
+                bufferCnt = zslQBuffers + minCircularBufNum +
+                        mParameters.getNumOfExtraBuffersForImageProc() +
+                        EXTRA_ZSL_PREVIEW_STREAM_BUF +
+                        mParameters.getNumOfExtraBuffersForPreview();
+            } else {
+                bufferCnt = CAMERA_MIN_STREAMING_BUFFERS +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        mParameters.getNumOfExtraBuffersForPreview();
+            }
+            bufferCnt += minUndequeCount;
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                        mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                        minPPBufs +
+                        mParameters.getNumOfExtraBuffersForImageProc();
+
+            if (bufferCnt > maxStreamBuf) {
+                bufferCnt = maxStreamBuf;
+            }
+            bufferCnt += minUndequeCount;
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        {
+            if (mParameters.isZSLMode() || mLongshotEnabled) {
+                if (minCaptureBuffers == 1 && !mLongshotEnabled) {
+                    // Single ZSL snapshot case
+                    bufferCnt = zslQBuffers + CAMERA_MIN_STREAMING_BUFFERS +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+                }
+                else {
+                    // ZSL Burst or Longshot case
+                    bufferCnt = zslQBuffers + minCircularBufNum +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+                }
+            } else {
+                bufferCnt = minCaptureBuffers +
+                            mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                            mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+
+                if (bufferCnt > maxStreamBuf) {
+                    bufferCnt = maxStreamBuf;
+                }
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        if (mParameters.isZSLMode()) {
+            bufferCnt = zslQBuffers + minCircularBufNum;
+        } else {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                        mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                        mParameters.getNumOfExtraBuffersForImageProc();
+
+            if (bufferCnt > maxStreamBuf) {
+                bufferCnt = maxStreamBuf;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        {
+            bufferCnt = CAMERA_MIN_VIDEO_BUFFERS +
+                    mParameters.getNumOfExtraBuffersForVideo();
+        }
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        {
+            if (mParameters.isZSLMode()) {
+                // MetaData buffers should be >= (Preview buffers-minUndequeCount)
+                bufferCnt = zslQBuffers + minCircularBufNum +
+                            mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                            mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                            mParameters.getNumOfExtraBuffersForImageProc() +
+                            EXTRA_ZSL_PREVIEW_STREAM_BUF;
+            } else {
+                bufferCnt = minCaptureBuffers +
+                            mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                            mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                            mParameters.getMaxUnmatchedFramesInQueue() +
+                            CAMERA_MIN_STREAMING_BUFFERS +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+            }
+            if (bufferCnt > maxStreamBuf) {
+                bufferCnt = maxStreamBuf;
+            }
+            bufferCnt += minUndequeCount;
+        }
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        {
+            bufferCnt = minCaptureBuffers;
+            if (mLongshotEnabled) {
+                char prop[PROPERTY_VALUE_MAX];
+                memset(prop, 0, sizeof(prop));
+                property_get("persist.camera.longshot.stages", prop, "0");
+                int longshotStages = atoi(prop);
+                if (longshotStages > 0 && longshotStages < CAMERA_LONGSHOT_STAGES) {
+                    bufferCnt = longshotStages;
+                }
+                else {
+                    bufferCnt = CAMERA_LONGSHOT_STAGES;
+                }
+            }
+            if (bufferCnt > maxStreamBuf) {
+                bufferCnt = maxStreamBuf;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        bufferCnt = 0;
+        break;
+    }
+
+    ALOGD("%s: Allocating %d buffers for streamtype %d",__func__,bufferCnt,stream_type);
+    return (uint8_t)bufferCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamBuf
+ *
+ * DESCRIPTION: alocate stream buffers
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *   @size         : size of buffer
+ *   @stride       : stride of buffer
+ *   @scanline     : scanline of buffer
+ *   @bufferCnt    : [IN/OUT] minimum num of buffers to be allocated.
+ *                   could be modified during allocation if more buffers needed
+ *
+ * RETURN     : ptr to a memory obj that holds stream buffers.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraMemory *QCamera2HardwareInterface::allocateStreamBuf(
+        cam_stream_type_t stream_type, size_t size, int stride, int scanline,
+        uint8_t &bufferCnt)
+{
+    int rc = NO_ERROR;
+    QCameraMemory *mem = NULL;
+    bool bCachedMem = QCAMERA_ION_USE_CACHE;
+    bool bPoolMem = false;
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.mem.usepool", value, "1");
+    if (atoi(value) == 1) {
+        bPoolMem = true;
+    }
+
+    // Allocate stream buffer memory object
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        {
+            if (isNoDisplayMode()) {
+                mem = new QCameraStreamMemory(mGetMemory,
+                        mCallbackCookie,
+                        bCachedMem,
+                        (bPoolMem) ? &m_memoryPool : NULL,
+                        stream_type);
+            } else {
+                cam_dimension_t dim;
+                QCameraGrallocMemory *grallocMemory =
+                    new QCameraGrallocMemory(mGetMemory, mCallbackCookie);
+
+                mParameters.getStreamDimension(stream_type, dim);
+                if (grallocMemory)
+                    grallocMemory->setWindowInfo(mPreviewWindow, dim.width,
+                        dim.height, stride, scanline,
+                        mParameters.getPreviewHalPixelFormat());
+                mem = grallocMemory;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        {
+            if (isPreviewRestartEnabled() || isNoDisplayMode()) {
+                mem = new QCameraStreamMemory(mGetMemory, mCallbackCookie, bCachedMem);
+            } else {
+                cam_dimension_t dim;
+                QCameraGrallocMemory *grallocMemory =
+                    new QCameraGrallocMemory(mGetMemory, mCallbackCookie);
+
+                mParameters.getStreamDimension(stream_type, dim);
+                if (grallocMemory) {
+                    grallocMemory->setWindowInfo(mPreviewWindow,
+                        dim.width,
+                        dim.height,
+                        stride,
+                        scanline,
+                        mParameters.getPreviewHalPixelFormat());
+                }
+                mem = grallocMemory;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        mem = new QCameraStreamMemory(mGetMemory,
+                mCallbackCookie,
+                bCachedMem,
+                (bPoolMem) ? &m_memoryPool : NULL,
+                stream_type);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        {
+            //Use uncached allocation by default
+            bCachedMem = QCAMERA_ION_USE_NOCACHE;
+            char value[PROPERTY_VALUE_MAX];
+            property_get("persist.camera.mem.usecache", value, "0");
+            //LLV needs cached video buffers
+            if ((atoi(value) == 1) || mParameters.isSeeMoreEnabled()) {
+                bCachedMem = QCAMERA_ION_USE_CACHE;
+            }
+            ALOGD("%s: vidoe buf using cached memory = %d", __func__, bCachedMem);
+            QCameraVideoMemory *videoMemory = new QCameraVideoMemory(mGetMemory, mCallbackCookie, bCachedMem);
+            int usage = 0;
+            cam_format_t fmt;
+            mParameters.getStreamFormat(CAM_STREAM_TYPE_VIDEO,fmt);
+            videoMemory->setVideoInfo(usage, fmt);
+
+            mem = videoMemory;
+        }
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        break;
+    }
+    if (!mem) {
+        return NULL;
+    }
+
+    if (bufferCnt > 0) {
+        rc = mem->allocate(bufferCnt, size);
+        if (rc < 0) {
+            delete mem;
+            return NULL;
+        }
+        bufferCnt = mem->getCnt();
+    }
+    return mem;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMoreStreamBuf
+ *
+ * DESCRIPTION: alocate more stream buffers from the memory object
+ *
+ * PARAMETERS :
+ *   @mem_obj      : memory object ptr
+ *   @size         : size of buffer
+ *   @bufferCnt    : [IN/OUT] additional number of buffers to be allocated.
+ *                   output will be the number of total buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::allocateMoreStreamBuf(
+        QCameraMemory *mem_obj, size_t size, uint8_t &bufferCnt)
+{
+    int rc = NO_ERROR;
+
+    if (bufferCnt > 0) {
+        rc = mem_obj->allocateMore(bufferCnt, size);
+        bufferCnt = mem_obj->getCnt();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamInfoBuf
+ *
+ * DESCRIPTION: alocate stream info buffer
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *
+ * RETURN     : ptr to a memory obj that holds stream info buffer.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraHeapMemory *QCamera2HardwareInterface::allocateStreamInfoBuf(
+    cam_stream_type_t stream_type)
+{
+    int rc = NO_ERROR;
+    QCameraHeapMemory *streamInfoBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (!streamInfoBuf) {
+        ALOGE("allocateStreamInfoBuf: Unable to allocate streamInfo object");
+        return NULL;
+    }
+
+    rc = streamInfoBuf->allocate(1, sizeof(cam_stream_info_t));
+    if (rc < 0) {
+        ALOGE("allocateStreamInfoBuf: Failed to allocate stream info memory");
+        delete streamInfoBuf;
+        return NULL;
+    }
+
+    cam_stream_info_t *streamInfo = (cam_stream_info_t *)streamInfoBuf->getPtr(0);
+    memset(streamInfo, 0, sizeof(cam_stream_info_t));
+    streamInfo->stream_type = stream_type;
+    rc = mParameters.getStreamFormat(stream_type, streamInfo->fmt);
+    rc = mParameters.getStreamDimension(stream_type, streamInfo->dim);
+    rc = mParameters.getStreamRotation(stream_type, streamInfo->pp_config, streamInfo->dim);
+    streamInfo->num_bufs = getBufNumRequired(stream_type);
+    streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    ALOGD("%s: stream_type %d, stream format %d,stream dimension %dx%d, num_bufs %d\n",
+           __func__, stream_type, streamInfo->fmt, streamInfo->dim.width,
+           streamInfo->dim.height, streamInfo->num_bufs);
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+        if ((mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) ||
+                 mLongshotEnabled) {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+        } else {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = (uint8_t)
+                    (mParameters.getNumOfSnapshots()
+                        + mParameters.getNumOfExtraHDRInBufsIfNeeded()
+                        - mParameters.getNumOfExtraHDROutBufsIfNeeded()
+                        + mParameters.getNumOfExtraBuffersForImageProc());
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        if (mLongshotEnabled) {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+        } else {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = (uint8_t)(mParameters.getNumOfSnapshots()
+                + mParameters.getNumOfExtraHDRInBufsIfNeeded()
+                - mParameters.getNumOfExtraHDROutBufsIfNeeded()
+                + mParameters.getNumOfExtraBuffersForImageProc());
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        streamInfo->dis_enable = mParameters.isDISEnabled();
+        if (mParameters.isSeeMoreEnabled()) {
+            streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_LLVD;
+        }
+
+    case CAM_STREAM_TYPE_PREVIEW:
+        if (mParameters.getRecordingHintValue()) {
+            const char* dis_param = mParameters.get(QCameraParameters::KEY_QC_DIS);
+            bool disEnabled = (dis_param != NULL)
+                    && !strcmp(dis_param,QCameraParameters::VALUE_ENABLE);
+            if(disEnabled) {
+                char value[PROPERTY_VALUE_MAX];
+                property_get("persist.camera.is_type", value, "0");
+                streamInfo->is_type = static_cast<cam_is_type_t>(atoi(value));
+            } else {
+                streamInfo->is_type = IS_TYPE_NONE;
+            }
+            if (mParameters.isSeeMoreEnabled()) {
+                streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_LLVD;
+            }
+        }
+        break;
+    default:
+        break;
+    }
+
+    if ((!isZSLMode() ||
+        (isZSLMode() && (stream_type != CAM_STREAM_TYPE_SNAPSHOT))) &&
+        !mParameters.isHDREnabled()) {
+        //set flip mode based on Stream type;
+        int flipMode = mParameters.getFlipMode(stream_type);
+        if (flipMode > 0) {
+            streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_FLIP;
+            streamInfo->pp_config.flip = (uint32_t)flipMode;
+        }
+    }
+
+    return streamInfoBuf;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewWindow
+ *
+ * DESCRIPTION: set preview window impl
+ *
+ * PARAMETERS :
+ *   @window  : ptr to window ops table struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setPreviewWindow(
+        struct preview_stream_ops *window)
+{
+    mPreviewWindow = window;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallBacks
+ *
+ * DESCRIPTION: set callbacks impl
+ *
+ * PARAMETERS :
+ *   @notify_cb  : notify cb
+ *   @data_cb    : data cb
+ *   @data_cb_timestamp : data cb with time stamp
+ *   @get_memory : request memory ops table
+ *   @user       : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setCallBacks(camera_notify_callback notify_cb,
+                                            camera_data_callback data_cb,
+                                            camera_data_timestamp_callback data_cb_timestamp,
+                                            camera_request_memory get_memory,
+                                            void *user)
+{
+    mNotifyCb        = notify_cb;
+    mDataCb          = data_cb;
+    mDataCbTimestamp = data_cb_timestamp;
+    mGetMemory       = get_memory;
+    mCallbackCookie  = user;
+    m_cbNotifier.setCallbacks(notify_cb, data_cb, data_cb_timestamp, user);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : enableMsgType
+ *
+ * DESCRIPTION: enable msg type impl
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask to be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::enableMsgType(int32_t msg_type)
+{
+    mMsgEnabled |= msg_type;
+    CDBG_HIGH("%s (0x%x) : mMsgEnabled = 0x%x", __func__, msg_type , mMsgEnabled );
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : disableMsgType
+ *
+ * DESCRIPTION: disable msg type impl
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask to be disabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::disableMsgType(int32_t msg_type)
+{
+    mMsgEnabled &= ~msg_type;
+    CDBG_HIGH("%s (0x%x) : mMsgEnabled = 0x%x", __func__, msg_type , mMsgEnabled );
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : msgTypeEnabled
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask
+ *
+ * RETURN     : 0 -- not enabled
+ *              none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabled(int32_t msg_type)
+{
+    return (mMsgEnabled & msg_type);
+}
+
+/*===========================================================================
+ * FUNCTION   : msgTypeEnabledWithLock
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled with lock
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask
+ *
+ * RETURN     : 0 -- not enabled
+ *              none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabledWithLock(int32_t msg_type)
+{
+    int enabled = 0;
+    lockAPI();
+    enabled = mMsgEnabled & msg_type;
+    unlockAPI();
+    return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : startPreview
+ *
+ * DESCRIPTION: start preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startPreview()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+    CDBG_HIGH("%s: E", __func__);
+    // start preview stream
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
+        rc = startChannel(QCAMERA_CH_TYPE_ZSL);
+    } else {
+        rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
+        /*
+          CAF needs cancel auto focus to resume after snapshot.
+          Focus should be locked till take picture is done.
+          In Non-zsl case if focus mode is CAF then calling cancel auto focus
+          to resume CAF.
+        */
+        cam_focus_mode_type focusMode = mParameters.getFocusMode();
+        if (focusMode == CAM_FOCUS_MODE_CONTINOUS_PICTURE)
+            mCameraHandle->ops->cancel_auto_focus(mCameraHandle->camera_handle);
+    }
+    CDBG_HIGH("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopPreview
+ *
+ * DESCRIPTION: stop preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopPreview()
+{
+    ATRACE_CALL();
+    CDBG_HIGH("%s: E", __func__);
+    mActiveAF = false;
+    mNumPreviewFaces = -1;
+    // stop preview stream
+    stopChannel(QCAMERA_CH_TYPE_ZSL);
+    stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+
+    //reset preview frame skip
+    mPreviewFrameSkipValid = 0;
+    memset(&mPreviewFrameSkipIdxRange, 0, sizeof(cam_frame_idx_range_t));
+
+    m_cbNotifier.flushPreviewNotifications();
+    // delete all channels from preparePreview
+    unpreparePreview();
+    CDBG_HIGH("%s: X", __func__);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : storeMetaDataInBuffers
+ *
+ * DESCRIPTION: enable store meta data in buffers for video frames impl
+ *
+ * PARAMETERS :
+ *   @enable  : flag if need enable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::storeMetaDataInBuffers(int enable)
+{
+    mStoreMetaDataInFrame = enable;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startRecording
+ *
+ * DESCRIPTION: start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startRecording()
+{
+    int32_t rc = NO_ERROR;
+    CDBG_HIGH("%s: E", __func__);
+    if (mParameters.getRecordingHintValue() == false) {
+        CDBG_HIGH("%s: start recording when hint is false, stop preview first", __func__);
+        stopPreview();
+
+        // Set recording hint to TRUE
+        mParameters.updateRecordingHintValue(TRUE);
+        rc = preparePreview();
+        if (rc == NO_ERROR) {
+            rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
+        }
+    }
+
+    if (rc == NO_ERROR) {
+        rc = startChannel(QCAMERA_CH_TYPE_VIDEO);
+    }
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (rc == NO_ERROR) {
+        if (m_pPowerModule) {
+            if (m_pPowerModule->powerHint) {
+                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, (void *)"state=1");
+            }
+        }
+    }
+#endif
+    CDBG_HIGH("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopRecording
+ *
+ * DESCRIPTION: stop recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopRecording()
+{
+    CDBG_HIGH("%s: E", __func__);
+    int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
+    m_cbNotifier.flushVideoNotifications();
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (m_pPowerModule) {
+        if (m_pPowerModule->powerHint) {
+            m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, (void *)"state=0");
+        }
+    }
+#endif
+    CDBG_HIGH("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseRecordingFrame
+ *
+ * DESCRIPTION: return video frame impl
+ *
+ * PARAMETERS :
+ *   @opaque  : ptr to video frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::releaseRecordingFrame(const void * opaque)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    QCameraVideoChannel *pChannel =
+        (QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
+    CDBG_HIGH("%s: opaque data = %p", __func__,opaque);
+    if(pChannel != NULL) {
+        rc = pChannel->releaseFrame(opaque, mStoreMetaDataInFrame > 0);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : autoFocus
+ *
+ * DESCRIPTION: start auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::autoFocus()
+{
+    int rc = NO_ERROR;
+    setCancelAutoFocus(false);
+    mActiveAF = true;
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+    CDBG_HIGH("[AF_DBG] %s: focusMode=%d, m_currentFocusState=%d, m_bAFRunning=%d",
+          __func__, focusMode, m_currentFocusState, isAFRunning());
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        rc = mCameraHandle->ops->do_auto_focus(mCameraHandle->camera_handle);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        ALOGE("%s: No ops in focusMode (%d)", __func__, focusMode);
+        rc = sendEvtNotify(CAMERA_MSG_FOCUS, true, 0);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelAutoFocus
+ *
+ * DESCRIPTION: cancel auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelAutoFocus()
+{
+    int rc = NO_ERROR;
+    setCancelAutoFocus(true);
+    mActiveAF = false;
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        rc = mCameraHandle->ops->cancel_auto_focus(mCameraHandle->camera_handle);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        CDBG("%s: No ops in focusMode (%d)", __func__, focusMode);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processUFDumps
+ *
+ * DESCRIPTION: process UF jpeg dumps for refocus support
+ *
+ * PARAMETERS :
+ *   @evt     : payload of jpeg event, including information about jpeg encoding
+ *              status, jpeg size and so on.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : none
+ *==========================================================================*/
+bool QCamera2HardwareInterface::processUFDumps(qcamera_jpeg_evt_payload_t *evt)
+{
+   bool ret = true;
+   if (mParameters.isUbiRefocus()) {
+       int index = (int)getOutputImageCount();
+       bool allFocusImage = (index == ((int)mParameters.UfOutputCount()-1));
+       char name[CAM_FN_CNT];
+
+       camera_memory_t *jpeg_mem = NULL;
+       omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+       size_t dataLen;
+       uint8_t *dataPtr;
+       if (!m_postprocessor.getJpegMemOpt()) {
+           dataLen = evt->out_data.buf_filled_len;
+           dataPtr = evt->out_data.buf_vaddr;
+       } else {
+           jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+           jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+           dataPtr = (uint8_t *)jpeg_mem->data;
+           dataLen = jpeg_mem->size;
+       }
+
+       if (allFocusImage)  {
+           snprintf(name, CAM_FN_CNT, "AllFocusImage");
+           index = -1;
+       } else {
+           snprintf(name, CAM_FN_CNT, "%d", 0);
+       }
+       CAM_DUMP_TO_FILE("/data/misc/camera/ubifocus", name, index, "jpg",
+           dataPtr, dataLen);
+       CDBG_HIGH("%s:%d] Dump the image %d %d allFocusImage %d", __func__, __LINE__,
+           getOutputImageCount(), index, allFocusImage);
+       setOutputImageCount(getOutputImageCount() + 1);
+       if (!allFocusImage) {
+           ret = false;
+       }
+   }
+   return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processMTFDumps
+ *
+ * DESCRIPTION: process MTF jpeg dumps for refocus support
+ *
+ * PARAMETERS :
+ *   @evt     : payload of jpeg event, including information about jpeg encoding
+ *              status, jpeg size and so on.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : none
+ *==========================================================================*/
+bool QCamera2HardwareInterface::processMTFDumps(qcamera_jpeg_evt_payload_t *evt)
+{
+   bool ret = true;
+   if (mParameters.isMTFRefocus()) {
+       int index = (int) getOutputImageCount();
+       bool allFocusImage = (index == ((int)mParameters.MTFOutputCount()-1));
+       char name[CAM_FN_CNT];
+
+       camera_memory_t *jpeg_mem = NULL;
+       omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+       size_t dataLen;
+       uint8_t *dataPtr;
+       if (!m_postprocessor.getJpegMemOpt()) {
+           dataLen = evt->out_data.buf_filled_len;
+           dataPtr = evt->out_data.buf_vaddr;
+       } else {
+           jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+           jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+           dataPtr = (uint8_t *)jpeg_mem->data;
+           dataLen = jpeg_mem->size;
+       }
+
+       if (allFocusImage)  {
+           strncpy(name, "AllFocusImage", CAM_FN_CNT - 1);
+           index = -1;
+       } else {
+           strncpy(name, "0", CAM_FN_CNT - 1);
+       }
+       CAM_DUMP_TO_FILE("/data/misc/camera/multiTouchFocus", name, index, "jpg",
+               dataPtr, dataLen);
+       CDBG("%s:%d] Dump the image %d %d allFocusImage %d", __func__, __LINE__,
+               getOutputImageCount(), index, allFocusImage);
+       setOutputImageCount(getOutputImageCount() + 1);
+       if (!allFocusImage) {
+           ret = false;
+       }
+   }
+   return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : unconfigureAdvancedCapture
+ *
+ * DESCRIPTION: unconfigure Advanced Capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::unconfigureAdvancedCapture()
+{
+    int32_t rc = NO_ERROR;
+
+    if (mAdvancedCaptureConfigured) {
+
+        mAdvancedCaptureConfigured = false;
+
+        if(mIs3ALocked) {
+            mParameters.set3ALock(QCameraParameters::VALUE_FALSE);
+            mIs3ALocked = false;
+        }
+        if ( mParameters.isHDREnabled() || mParameters.isAEBracketEnabled()) {
+            rc = mParameters.stopAEBracket();
+        } else if (mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+            rc = configureAFBracketing(false);
+        } else if (mParameters.isChromaFlashEnabled()) {
+            rc = configureFlashBracketing(false);
+        } else  if (mParameters.isOptiZoomEnabled() ||
+                mParameters.isfssrEnabled()) {
+            rc = mParameters.setAndCommitZoom(mZoomLevel);
+        } else if (mParameters.isMultiTouchFocusEnabled()) {
+            configureMTFBracketing(false);
+        } else {
+            ALOGE("%s: No Advanced Capture feature enabled!! ", __func__);
+            rc = BAD_VALUE;
+        }
+        if (mParameters.isMultiTouchFocusSelected()) {
+            mParameters.resetMultiTouchFocusParam();
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAdvancedCapture
+ *
+ * DESCRIPTION: configure Advanced Capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAdvancedCapture()
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+
+    setOutputImageCount(0);
+    mParameters.setDisplayFrame(FALSE);
+    if (mParameters.isUbiFocusEnabled()) {
+        rc = configureAFBracketing();
+    } else if (mParameters.isMultiTouchFocusEnabled()) {
+        rc = configureMTFBracketing();
+    } else if (mParameters.isOptiZoomEnabled()) {
+        rc = configureOptiZoom();
+    } else if (mParameters.isfssrEnabled()) {
+        rc = configureFssr();
+    } else if (mParameters.isChromaFlashEnabled()) {
+        rc = configureFlashBracketing();
+    } else if (mParameters.isHDREnabled()) {
+        rc = configureZSLHDRBracketing();
+    } else if (mParameters.isAEBracketEnabled()) {
+        rc = configureAEBracketing();
+    } else {
+        ALOGE("%s: No Advanced Capture feature enabled!! ", __func__);
+        rc = BAD_VALUE;
+    }
+
+    if (NO_ERROR == rc) {
+        mAdvancedCaptureConfigured = true;
+    } else {
+        mAdvancedCaptureConfigured = false;
+    }
+
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAFBracketing
+ *
+ * DESCRIPTION: configure AF Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAFBracketing(bool enable)
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+    cam_af_bracketing_t *af_bracketing_need;
+    af_bracketing_need =
+        &gCamCapability[mCameraId]->ubifocus_af_bracketing_need;
+
+    //Enable AF Bracketing.
+    cam_af_bracketing_t afBracket;
+    memset(&afBracket, 0, sizeof(cam_af_bracketing_t));
+    afBracket.enable = enable;
+    afBracket.burst_count = af_bracketing_need->burst_count;
+
+    for(int8_t i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+        afBracket.focus_steps[i] = af_bracketing_need->focus_steps[i];
+        CDBG_HIGH("%s: focus_step[%d] = %d", __func__, i, afBracket.focus_steps[i]);
+    }
+    //Send cmd to backend to set AF Bracketing for Ubi Focus.
+    rc = mParameters.commitAFBracket(afBracket);
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: cannot configure AF bracketing", __func__);
+        return rc;
+    }
+    if (enable) {
+        mParameters.set3ALock(QCameraParameters::VALUE_TRUE);
+        mIs3ALocked = true;
+    }
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureMTFBracketing
+ *
+ * DESCRIPTION: configure multi-touch focus AF Bracketing.
+ *
+ * PARAMETERS :
+ *   @enable  : bool flag if MTF should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureMTFBracketing(bool enable)
+{
+    int32_t rc = NO_ERROR;
+    cam_af_bracketing_t *mtf_bracketing_need;
+    mtf_bracketing_need = &mParameters.m_MTFBracketInfo;
+
+    //Enable AF Bracketing.
+    cam_af_bracketing_t afBracket;
+    memset(&afBracket, 0, sizeof(cam_af_bracketing_t));
+    afBracket.enable = enable;
+    afBracket.burst_count = mtf_bracketing_need->burst_count;
+
+    for(int8_t i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+        if (mtf_bracketing_need->focus_steps[i] != -1) {
+           afBracket.focus_steps[i] = mtf_bracketing_need->focus_steps[i];
+        }
+        CDBG_HIGH("%s: MTF focus_step[%d] = %d",
+                  __func__, i, afBracket.focus_steps[i]);
+    }
+    //Send cmd to backend to set AF Bracketing for MTF.
+    rc = mParameters.commitMTFBracket(afBracket);
+    mParameters.m_currNumBufMTF = afBracket.burst_count;
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: cannot configure MTF bracketing", __func__);
+        return rc;
+    }
+    if (enable) {
+        mParameters.set3ALock(QCameraParameters::VALUE_TRUE);
+        mIs3ALocked = true;
+    }
+    if (!enable) {
+        mParameters.m_currNumBufMTF = 0;
+    }
+    //reset multi-touch focus parameters for next use.
+    mParameters.resetMultiTouchFocusParam();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureFlashBracketing
+ *
+ * DESCRIPTION: configure Flash Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureFlashBracketing(bool enable)
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+
+    cam_flash_bracketing_t flashBracket;
+    memset(&flashBracket, 0, sizeof(cam_flash_bracketing_t));
+    flashBracket.enable = enable;
+    //TODO: Hardcoded value.
+    flashBracket.burst_count = 2;
+    //Send cmd to backend to set Flash Bracketing for chroma flash.
+    rc = mParameters.commitFlashBracket(flashBracket);
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: cannot configure AF bracketing", __func__);
+    }
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureZSLHDRBracketing
+ *
+ * DESCRIPTION: configure HDR Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureZSLHDRBracketing()
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+
+    // 'values' should be in "idx1,idx2,idx3,..." format
+    uint32_t hdrFrameCount = gCamCapability[mCameraId]->hdr_bracketing_setting.num_frames;
+    CDBG_HIGH("%s : HDR values %d, %d frame count: %u",
+          __func__,
+          (int) gCamCapability[mCameraId]->hdr_bracketing_setting.exp_val.values[0],
+          (int) gCamCapability[mCameraId]->hdr_bracketing_setting.exp_val.values[1],
+          hdrFrameCount);
+
+    // Enable AE Bracketing for HDR
+    cam_exp_bracketing_t aeBracket;
+    memset(&aeBracket, 0, sizeof(cam_exp_bracketing_t));
+    aeBracket.mode =
+        gCamCapability[mCameraId]->hdr_bracketing_setting.exp_val.mode;
+    String8 tmp;
+    for (uint32_t i = 0; i < hdrFrameCount; i++) {
+        tmp.appendFormat("%d",
+            (int8_t) gCamCapability[mCameraId]->hdr_bracketing_setting.exp_val.values[i]);
+        tmp.append(",");
+    }
+    if (mParameters.isHDR1xFrameEnabled()
+        && mParameters.isHDR1xExtraBufferNeeded()) {
+            tmp.appendFormat("%d", 0);
+            tmp.append(",");
+    }
+
+    if( !tmp.isEmpty() &&
+        ( MAX_EXP_BRACKETING_LENGTH > tmp.length() ) ) {
+        //Trim last comma
+        memset(aeBracket.values, '\0', MAX_EXP_BRACKETING_LENGTH);
+        memcpy(aeBracket.values, tmp.string(), tmp.length() - 1);
+    }
+
+    CDBG_HIGH("%s : HDR config values %s",
+          __func__,
+          aeBracket.values);
+    rc = mParameters.setHDRAEBracket(aeBracket);
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: cannot configure HDR bracketing", __func__);
+        return rc;
+    }
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAEBracketing
+ *
+ * DESCRIPTION: configure AE Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAEBracketing()
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+
+    rc = mParameters.setAEBracketing();
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: cannot configure AE bracketing", __func__);
+        return rc;
+    }
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureOptiZoom
+ *
+ * DESCRIPTION: configure Opti Zoom.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureOptiZoom()
+{
+    int32_t rc = NO_ERROR;
+
+    //store current zoom level.
+    mZoomLevel = (uint8_t) mParameters.getInt(CameraParameters::KEY_ZOOM);
+
+    //set zoom level to 1x;
+    mParameters.setAndCommitZoom(0);
+
+    mParameters.set3ALock(QCameraParameters::VALUE_TRUE);
+    mIs3ALocked = true;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureFssr
+ *
+ * DESCRIPTION: configure fssr.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*///TODO_fssr: place holder check the req for fssr
+int32_t QCamera2HardwareInterface::configureFssr()
+{
+    int32_t rc = NO_ERROR;
+
+    //store current zoom level.
+    mZoomLevel = (uint8_t) mParameters.getInt(CameraParameters::KEY_ZOOM);
+
+    //set zoom level to 1x;
+    mParameters.setAndCommitZoom(0);
+
+    mParameters.set3ALock(QCameraParameters::VALUE_TRUE);
+    mIs3ALocked = true;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startAdvancedCapture
+ *
+ * DESCRIPTION: starts advanced capture based on capture type
+ *
+ * PARAMETERS :
+ *   @pChannel : channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startAdvancedCapture(
+    QCameraPicChannel *pChannel)
+{
+    CDBG_HIGH("%s: Start bracketig",__func__);
+    int32_t rc = NO_ERROR;
+
+    if(mParameters.isUbiFocusEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_AF_BRACKETING);
+    } else if (mParameters.isMultiTouchFocusEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_MTF_BRACKETING);
+    } else if (mParameters.isChromaFlashEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_FLASH_BRACKETING);
+    } else if (mParameters.isHDREnabled() || mParameters.isAEBracketEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_AE_BRACKETING);
+    } else if (mParameters.isOptiZoomEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_ZOOM_1X);
+    } else if (mParameters.isfssrEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_ZOOM_1X);
+    } else {
+        ALOGE("%s: No Advanced Capture feature enabled!",__func__);
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: take picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePicture()
+{
+    int rc = NO_ERROR;
+    uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+
+    if (mParameters.isUbiFocusEnabled() ||
+            mParameters.isMultiTouchFocusEnabled() ||
+            mParameters.isOptiZoomEnabled() ||
+            mParameters.isfssrEnabled() ||
+            mParameters.isHDREnabled() ||
+            mParameters.isChromaFlashEnabled() ||
+            mParameters.isAEBracketEnabled()) {
+        rc = configureAdvancedCapture();
+        if (rc == NO_ERROR) {
+            numSnapshots = mParameters.getBurstCountForAdvancedCapture();
+        }
+    }
+    CDBG_HIGH("%s: numSnapshot = %d",__func__, numSnapshots);
+
+    getOrientation();
+    CDBG_HIGH("%s: E", __func__);
+    if (mParameters.isZSLMode()) {
+        QCameraPicChannel *pZSLChannel =
+            (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+        if (NULL != pZSLChannel) {
+            // start postprocessor
+            rc = m_postprocessor.start(pZSLChannel);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: cannot start postprocessor", __func__);
+                return rc;
+            }
+            if (mParameters.isUbiFocusEnabled() ||
+                    mParameters.isMultiTouchFocusEnabled() ||
+                    mParameters.isOptiZoomEnabled() ||
+                    mParameters.isHDREnabled() ||
+                    mParameters.isfssrEnabled() ||
+                    mParameters.isChromaFlashEnabled() ||
+                    mParameters.isAEBracketEnabled()) {
+                rc = startAdvancedCapture(pZSLChannel);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot start zsl advanced capture", __func__);
+                    return rc;
+                }
+            }
+            if (mLongshotEnabled && mPrepSnapRun) {
+                mCameraHandle->ops->start_zsl_snapshot(
+                        mCameraHandle->camera_handle,
+                        pZSLChannel->getMyHandle());
+            }
+            rc = pZSLChannel->takePicture(numSnapshots);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: cannot take ZSL picture", __func__);
+                m_postprocessor.stop();
+                return rc;
+            }
+        } else {
+            ALOGE("%s: ZSL channel is NULL", __func__);
+            return UNKNOWN_ERROR;
+        }
+    } else {
+
+        // start snapshot
+        if (mParameters.isJpegPictureFormat() ||
+            mParameters.isNV16PictureFormat() ||
+            mParameters.isNV21PictureFormat()) {
+
+            if (!isLongshotEnabled()) {
+                rc = addCaptureChannel();
+
+                // normal capture case
+                // need to stop preview channel
+                stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+                delChannel(QCAMERA_CH_TYPE_PREVIEW);
+
+                waitDefferedWork(mSnapshotJob);
+                waitDefferedWork(mMetadataJob);
+                waitDefferedWork(mRawdataJob);
+
+                {
+                    DefferWorkArgs args;
+                    DefferAllocBuffArgs allocArgs;
+
+                    memset(&args, 0, sizeof(DefferWorkArgs));
+                    memset(&allocArgs, 0, sizeof(DefferAllocBuffArgs));
+
+                    allocArgs.ch = m_channels[QCAMERA_CH_TYPE_CAPTURE];
+                    allocArgs.type = CAM_STREAM_TYPE_POSTVIEW;
+                    args.allocArgs = allocArgs;
+
+                    mPostviewJob = queueDefferedWork(CMD_DEFF_ALLOCATE_BUFF,
+                            args);
+
+                    if ( mPostviewJob == -1)
+                        rc = UNKNOWN_ERROR;
+                }
+
+                waitDefferedWork(mPostviewJob);
+            } else {
+                // normal capture case
+                // need to stop preview channel
+                stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+                delChannel(QCAMERA_CH_TYPE_PREVIEW);
+
+                rc = addCaptureChannel();
+            }
+
+            if ((rc == NO_ERROR) &&
+                (NULL != m_channels[QCAMERA_CH_TYPE_CAPTURE])) {
+
+                // configure capture channel
+                rc = m_channels[QCAMERA_CH_TYPE_CAPTURE]->config();
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot configure capture channel", __func__);
+                    delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                    return rc;
+                }
+
+                DefferWorkArgs args;
+                memset(&args, 0, sizeof(DefferWorkArgs));
+
+                args.pprocArgs = m_channels[QCAMERA_CH_TYPE_CAPTURE];
+                mReprocJob = queueDefferedWork(CMD_DEFF_PPROC_START,
+                        args);
+
+                // start catpure channel
+                rc =  m_channels[QCAMERA_CH_TYPE_CAPTURE]->start();
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot start capture channel", __func__);
+                    delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                    return rc;
+                }
+
+                QCameraPicChannel *pCapChannel =
+                    (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
+                if (NULL != pCapChannel) {
+                    if (mParameters.isUbiFocusEnabled()|
+                        mParameters.isChromaFlashEnabled()) {
+                        rc = startAdvancedCapture(pCapChannel);
+                        if (rc != NO_ERROR) {
+                            ALOGE("%s: cannot start advanced capture", __func__);
+                            return rc;
+                        }
+                    }
+                }
+                if ( mLongshotEnabled ) {
+                    rc = longShot();
+                    if (NO_ERROR != rc) {
+                        delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                        return rc;
+                    }
+                }
+            } else {
+                ALOGE("%s: cannot add capture channel", __func__);
+                return rc;
+            }
+        } else {
+
+            stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+            delChannel(QCAMERA_CH_TYPE_PREVIEW);
+
+            rc = addRawChannel();
+            if (rc == NO_ERROR) {
+                // start postprocessor
+                rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot start postprocessor", __func__);
+                    delChannel(QCAMERA_CH_TYPE_RAW);
+                    return rc;
+                }
+
+                rc = startChannel(QCAMERA_CH_TYPE_RAW);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot start raw channel", __func__);
+                    m_postprocessor.stop();
+                    delChannel(QCAMERA_CH_TYPE_RAW);
+                    return rc;
+                }
+            } else {
+                ALOGE("%s: cannot add raw channel", __func__);
+                return rc;
+            }
+        }
+    }
+    CDBG_HIGH("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : longShot
+ *
+ * DESCRIPTION: Queue one more ZSL frame
+ *              in the longshot pipe.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::longShot()
+{
+    int32_t rc = NO_ERROR;
+    uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+    QCameraPicChannel *pChannel = NULL;
+
+    if (mParameters.isZSLMode()) {
+        pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+    } else {
+        pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
+    }
+
+    if (NULL != pChannel) {
+        rc = pChannel->takePicture(numSnapshots);
+    } else {
+        ALOGE(" %s : Capture channel not initialized!", __func__);
+        rc = NO_INIT;
+        goto end;
+    }
+
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopCaptureChannel
+ *
+ * DESCRIPTION: Stops capture channel
+ *
+ * PARAMETERS :
+ *   @destroy : Set to true to stop and delete camera channel.
+ *              Set to false to only stop capture channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopCaptureChannel(bool destroy)
+{
+    if (mParameters.isJpegPictureFormat() ||
+        mParameters.isNV16PictureFormat() ||
+        mParameters.isNV21PictureFormat()) {
+        stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+        if (destroy) {
+            // Destroy camera channel but dont release context
+            delChannel(QCAMERA_CH_TYPE_CAPTURE, false);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelPicture()
+{
+    CDBG_HIGH("%s:%d] ",__func__, __LINE__);
+    waitDefferedWork(mReprocJob);
+
+    //stop post processor
+    m_postprocessor.stop();
+
+    unconfigureAdvancedCapture();
+
+    mParameters.setDisplayFrame(TRUE);
+
+    if (mParameters.isZSLMode()) {
+        QCameraPicChannel *pZSLChannel =
+            (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+        if (NULL != pZSLChannel) {
+            pZSLChannel->cancelPicture();
+        }
+    } else {
+
+        // normal capture case
+        if (mParameters.isJpegPictureFormat() ||
+            mParameters.isNV16PictureFormat() ||
+            mParameters.isNV21PictureFormat()) {
+            stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+            delChannel(QCAMERA_CH_TYPE_CAPTURE);
+        } else {
+            stopChannel(QCAMERA_CH_TYPE_RAW);
+            delChannel(QCAMERA_CH_TYPE_RAW);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : captureDone
+ *
+ * DESCRIPTION: Function called when the capture is completed before encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::captureDone()
+{
+    if (++mOutputCount >= mParameters.getBurstCountForAdvancedCapture()) {
+        unconfigureAdvancedCapture();
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : Live_Snapshot_thread
+ *
+ * DESCRIPTION: Seperate thread for taking live snapshot during recording
+ *
+ * PARAMETERS : @data - pointer to QCamera2HardwareInterface class object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void* Live_Snapshot_thread (void* data)
+{
+
+    QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(data);
+    if (!hw) {
+        ALOGE("take_picture_thread: NULL camera device");
+        return (void *)BAD_VALUE;
+    }
+    hw->takeLiveSnapshot_internal();
+    return (void* )NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : Int_Pic_thread
+ *
+ * DESCRIPTION: Seperate thread for taking snapshot triggered by camera backend
+ *
+ * PARAMETERS : @data - pointer to QCamera2HardwareInterface class object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void* Int_Pic_thread (void* data)
+{
+
+    QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(data);
+
+    if (!hw) {
+        ALOGE("take_picture_thread: NULL camera device");
+        return (void *)BAD_VALUE;
+    }
+
+    bool JpegMemOpt = false;
+
+    hw->takeBackendPic_internal(&JpegMemOpt);
+    hw->checkIntPicPending(JpegMemOpt);
+
+    return (void* )NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeLiveSnapshot
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot()
+{
+    int rc = NO_ERROR;
+    rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePictureInternal
+ *
+ * DESCRIPTION: take snapshot triggered by backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePictureInternal()
+{
+    int rc = NO_ERROR;
+    rc= pthread_create(&mIntPicThread, NULL, Int_Pic_thread, (void *) this);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkIntPicPending
+ *
+ * DESCRIPTION: timed wait for jpeg completion event, and send
+ *                        back completion event to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::checkIntPicPending(bool JpegMemOpt)
+{
+    cam_int_evt_params_t params;
+    int rc = NO_ERROR;
+
+    struct timespec ts;
+    struct timeval tp;
+    gettimeofday(&tp, NULL);
+    ts.tv_sec  = tp.tv_sec;
+    ts.tv_nsec = tp.tv_usec * 1000 + 1000 * 1000000;
+
+    if (true == m_bIntEvtPending) {
+        //wait on the eztune condition variable
+        pthread_mutex_lock(&m_int_lock);
+        rc = pthread_cond_timedwait(&m_int_cond, &m_int_lock, &ts);
+        m_bIntEvtPending = false;
+        pthread_mutex_unlock(&m_int_lock);
+        if (ETIMEDOUT == rc) {
+            return;
+        }
+
+        params.dim = m_postprocessor.m_dst_dim;
+        //send event back to server with the file path
+        memcpy(&params.path[0], &m_BackendFileName[0], 50);
+        params.size = mBackendFileSize;
+        pthread_mutex_lock(&m_parm_lock);
+        rc = mParameters.setIntEvent(params);
+        pthread_mutex_unlock(&m_parm_lock);
+
+        lockAPI();
+        rc = processAPI(QCAMERA_SM_EVT_SNAPSHOT_DONE, NULL);
+        unlockAPI();
+        if (false == mParameters.isZSLMode()) {
+            lockAPI();
+            rc = processAPI(QCAMERA_SM_EVT_START_PREVIEW, NULL);
+            unlockAPI();
+        }
+
+        m_postprocessor.setJpegMemOpt(JpegMemOpt);
+    }
+
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeBackendPic_internal
+ *
+ * DESCRIPTION: take snapshot triggered by backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeBackendPic_internal(bool *JpegMemOpt)
+{
+    int rc;
+
+    *JpegMemOpt = m_postprocessor.getJpegMemOpt();
+    m_postprocessor.setJpegMemOpt(false);
+
+    lockAPI();
+    rc = processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+    if (rc == NO_ERROR) {
+        qcamera_api_result_t apiResult;
+        waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+    }
+    unlockAPI();
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeLiveSnapshot_internal
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot_internal()
+{
+    int rc = NO_ERROR;
+    getOrientation();
+    QCameraChannel *pChannel = NULL;
+
+    // start post processor
+    rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_SNAPSHOT]);
+    if (NO_ERROR != rc) {
+        ALOGE("%s: Post-processor start failed %d", __func__, rc);
+        goto end;
+    }
+
+    pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+    if (NULL == pChannel) {
+        ALOGE("%s: Snapshot channel not initialized", __func__);
+        rc = NO_INIT;
+        goto end;
+    }
+
+    // start snapshot channel
+    if ((rc == NO_ERROR) && (NULL != pChannel)) {
+
+        // Find and try to link a metadata stream from preview channel
+        QCameraChannel *pMetaChannel = NULL;
+        QCameraStream *pMetaStream = NULL;
+
+        if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+            pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+            uint32_t streamNum = pMetaChannel->getNumOfStreams();
+            QCameraStream *pStream = NULL;
+            for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+                pStream = pMetaChannel->getStreamByIndex(i);
+                if ((NULL != pStream) &&
+                        (CAM_STREAM_TYPE_METADATA == pStream->getMyType())) {
+                    pMetaStream = pStream;
+                    break;
+                }
+            }
+        }
+
+        if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+            rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+            if (NO_ERROR != rc) {
+                ALOGE("%s : Metadata stream link failed %d", __func__, rc);
+            }
+        }
+
+        rc = pChannel->start();
+    }
+
+end:
+    if (rc != NO_ERROR) {
+        rc = processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+        rc = sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelLiveSnapshot
+ *
+ * DESCRIPTION: cancel current live snapshot request
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot()
+{
+    int rc = NO_ERROR;
+    if (mLiveSnapshotThread != 0) {
+        pthread_join(mLiveSnapshotThread,NULL);
+        mLiveSnapshotThread = 0;
+    }
+    //stop post processor
+    m_postprocessor.stop();
+
+    // stop snapshot channel
+    rc = stopChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getParameters
+ *
+ * DESCRIPTION: get parameters impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : a string containing parameter pairs
+ *==========================================================================*/
+char* QCamera2HardwareInterface::getParameters()
+{
+    char* strParams = NULL;
+    String8 str;
+
+    int cur_width, cur_height;
+    pthread_mutex_lock(&m_parm_lock);
+    //Need take care Scale picture size
+    if(mParameters.m_reprocScaleParam.isScaleEnabled() &&
+        mParameters.m_reprocScaleParam.isUnderScaling()){
+        int scale_width, scale_height;
+
+        mParameters.m_reprocScaleParam.getPicSizeFromAPK(scale_width,scale_height);
+        mParameters.getPictureSize(&cur_width, &cur_height);
+
+        String8 pic_size;
+        char buffer[32];
+        snprintf(buffer, sizeof(buffer), "%dx%d", scale_width, scale_height);
+        pic_size.append(buffer);
+        mParameters.set(CameraParameters::KEY_PICTURE_SIZE, pic_size);
+    }
+
+    str = mParameters.flatten( );
+    strParams = (char *)malloc(sizeof(char)*(str.length()+1));
+    if(strParams != NULL){
+        memset(strParams, 0, sizeof(char)*(str.length()+1));
+        strncpy(strParams, str.string(), str.length());
+        strParams[str.length()] = 0;
+    }
+
+    if(mParameters.m_reprocScaleParam.isScaleEnabled() &&
+        mParameters.m_reprocScaleParam.isUnderScaling()){
+        //need set back picture size
+        String8 pic_size;
+        char buffer[32];
+        snprintf(buffer, sizeof(buffer), "%dx%d", cur_width, cur_height);
+        pic_size.append(buffer);
+        mParameters.set(CameraParameters::KEY_PICTURE_SIZE, pic_size);
+    }
+    pthread_mutex_unlock(&m_parm_lock);
+    return strParams;
+}
+
+/*===========================================================================
+ * FUNCTION   : putParameters
+ *
+ * DESCRIPTION: put parameters string impl
+ *
+ * PARAMETERS :
+ *   @parms   : parameters string to be released
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::putParameters(char *parms)
+{
+    free(parms);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendCommand
+ *
+ * DESCRIPTION: send command impl
+ *
+ * PARAMETERS :
+ *   @command : command to be executed
+ *   @arg1    : optional argument 1
+ *   @arg2    : optional argument 2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::sendCommand(int32_t command,
+        int32_t &arg1, int32_t &/*arg2*/)
+{
+    int rc = NO_ERROR;
+
+    switch (command) {
+#ifndef VANILLA_HAL
+    case CAMERA_CMD_LONGSHOT_ON:
+        arg1 = 0;
+        // Longshot can only be enabled when image capture
+        // is not active.
+        if ( !m_stateMachine.isCaptureRunning() ) {
+            CDBG_HIGH("%s: Longshot Enabled", __func__);
+            mLongshotEnabled = true;
+
+            // Due to recent buffer count optimizations
+            // ZSL might run with considerably less buffers
+            // when not in longshot mode. Preview needs to
+            // restart in this case.
+            if (isZSLMode() && m_stateMachine.isPreviewRunning()) {
+                QCameraChannel *pChannel = NULL;
+                QCameraStream *pSnapStream = NULL;
+                pChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+                if (NULL != pChannel) {
+                    QCameraStream *pStream = NULL;
+                    for (uint32_t i = 0; i < pChannel->getNumOfStreams(); i++) {
+                        pStream = pChannel->getStreamByIndex(i);
+                        if (pStream != NULL) {
+                            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                                pSnapStream = pStream;
+                                break;
+                            }
+                        }
+                    }
+                    if (NULL != pSnapStream) {
+                        uint8_t required = 0;
+                        required = getBufNumRequired(CAM_STREAM_TYPE_SNAPSHOT);
+                        if (pSnapStream->getBufferCount() < required) {
+                            // We restart here, to reset the FPS and no
+                            // of buffers as per the requirement of longshot usecase.
+                            arg1 = QCAMERA_SM_EVT_RESTART_PERVIEW;
+                        }
+                    }
+                }
+            }
+            rc = mParameters.setLongshotEnable(mLongshotEnabled);
+            mPrepSnapRun = false;
+        } else {
+            rc = NO_INIT;
+        }
+        break;
+    case CAMERA_CMD_LONGSHOT_OFF:
+        if ( mLongshotEnabled && m_stateMachine.isCaptureRunning() ) {
+            cancelPicture();
+            processEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE, NULL);
+            QCameraChannel *pZSLChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+            if (isZSLMode() && (NULL != pZSLChannel) && mPrepSnapRun) {
+                mCameraHandle->ops->stop_zsl_snapshot(
+                        mCameraHandle->camera_handle,
+                        pZSLChannel->getMyHandle());
+            }
+        }
+        CDBG_HIGH("%s: Longshot Disabled", __func__);
+        mPrepSnapRun = false;
+        mLongshotEnabled = false;
+        rc = mParameters.setLongshotEnable(mLongshotEnabled);
+        break;
+    case CAMERA_CMD_HISTOGRAM_ON:
+    case CAMERA_CMD_HISTOGRAM_OFF:
+        rc = setHistogram(command == CAMERA_CMD_HISTOGRAM_ON? true : false);
+        CDBG_HIGH("%s: Histogram -> %s", __func__,
+              mParameters.isHistogramEnabled() ? "Enabled" : "Disabled");
+        break;
+#endif
+    case CAMERA_CMD_START_FACE_DETECTION:
+    case CAMERA_CMD_STOP_FACE_DETECTION:
+        rc = setFaceDetection(command == CAMERA_CMD_START_FACE_DETECTION? true : false);
+        CDBG_HIGH("%s: FaceDetection -> %s", __func__,
+              mParameters.isFaceDetectionEnabled() ? "Enabled" : "Disabled");
+        break;
+#ifndef VANILLA_HAL
+    case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+#endif
+    default:
+        rc = NO_ERROR;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerFaceImage
+ *
+ * DESCRIPTION: register face image impl
+ *
+ * PARAMETERS :
+ *   @img_ptr : ptr to image buffer
+ *   @config  : ptr to config struct about input image info
+ *   @faceID  : [OUT] face ID to uniquely identifiy the registered face image
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::registerFaceImage(void *img_ptr,
+                                                 cam_pp_offline_src_config_t *config,
+                                                 int32_t &faceID)
+{
+    int rc = NO_ERROR;
+    faceID = -1;
+
+    if (img_ptr == NULL || config == NULL) {
+        ALOGE("%s: img_ptr or config is NULL", __func__);
+        return BAD_VALUE;
+    }
+
+    // allocate ion memory for source image
+    QCameraHeapMemory *imgBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (imgBuf == NULL) {
+        ALOGE("%s: Unable to new heap memory obj for image buf", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = imgBuf->allocate(1, config->input_buf_planes.plane_info.frame_len);
+    if (rc < 0) {
+        ALOGE("%s: Unable to allocate heap memory for image buf", __func__);
+        delete imgBuf;
+        return NO_MEMORY;
+    }
+
+    void *pBufPtr = imgBuf->getPtr(0);
+    if (pBufPtr == NULL) {
+        ALOGE("%s: image buf is NULL", __func__);
+        imgBuf->deallocate();
+        delete imgBuf;
+        return NO_MEMORY;
+    }
+    memcpy(pBufPtr, img_ptr, config->input_buf_planes.plane_info.frame_len);
+
+    cam_pp_feature_config_t pp_feature;
+    memset(&pp_feature, 0, sizeof(cam_pp_feature_config_t));
+    pp_feature.feature_mask = CAM_QCOM_FEATURE_REGISTER_FACE;
+    QCameraReprocessChannel *pChannel =
+        addOfflineReprocChannel(*config, pp_feature, NULL, NULL);
+
+    if (pChannel == NULL) {
+        ALOGE("%s: fail to add offline reprocess channel", __func__);
+        imgBuf->deallocate();
+        delete imgBuf;
+        return UNKNOWN_ERROR;
+    }
+
+    rc = pChannel->start();
+    if (rc != NO_ERROR) {
+        ALOGE("%s: Cannot start reprocess channel", __func__);
+        imgBuf->deallocate();
+        delete imgBuf;
+        delete pChannel;
+        return rc;
+    }
+
+    ssize_t bufSize = imgBuf->getSize(0);
+    if (BAD_INDEX != bufSize) {
+        rc = pChannel->doReprocess(imgBuf->getFd(0), (size_t)bufSize, faceID);
+    } else {
+        ALOGE("Failed to retrieve buffer size (bad index)");
+        return UNKNOWN_ERROR;
+    }
+
+    // done with register face image, free imgbuf and delete reprocess channel
+    imgBuf->deallocate();
+    delete imgBuf;
+    imgBuf = NULL;
+    pChannel->stop();
+    delete pChannel;
+    pChannel = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: release camera resource impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::release()
+{
+    // stop and delete all channels
+    for (int i = 0; i <QCAMERA_CH_TYPE_MAX ; i++) {
+        if (m_channels[i] != NULL) {
+            stopChannel((qcamera_ch_type_enum_t)i);
+            delChannel((qcamera_ch_type_enum_t)i);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: camera status dump impl
+ *
+ * PARAMETERS :
+ *   @fd      : fd for the buffer to be dumped with camera status
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(int /*fd*/)
+{
+    ALOGE("%s: not supported yet", __func__);
+    return INVALID_OPERATION;
+}
+
+/*===========================================================================
+ * FUNCTION   : processAPI
+ *
+ * DESCRIPTION: process API calls from upper layer
+ *
+ * PARAMETERS :
+ *   @api         : API to be processed
+ *   @api_payload : ptr to API payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
+{
+    return m_stateMachine.procAPI(api, api_payload);
+}
+
+/*===========================================================================
+ * FUNCTION   : processEvt
+ *
+ * DESCRIPTION: process Evt from backend via mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @evt         : event type to be processed
+ *   @evt_payload : ptr to event payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+    return m_stateMachine.procEvt(evt, evt_payload);
+}
+
+/*===========================================================================
+ * FUNCTION   : processSyncEvt
+ *
+ * DESCRIPTION: process synchronous Evt from backend
+ *
+ * PARAMETERS :
+ *   @evt         : event type to be processed
+ *   @evt_payload : ptr to event payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+    int rc = NO_ERROR;
+
+    pthread_mutex_lock(&m_evtLock);
+    rc =  processEvt(evt, evt_payload);
+    if (rc == NO_ERROR) {
+        memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+        while (m_evtResult.request_api != evt) {
+            pthread_cond_wait(&m_evtCond, &m_evtLock);
+        }
+        rc =  m_evtResult.status;
+    }
+    pthread_mutex_unlock(&m_evtLock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : evtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-camera-interface to handle backend events
+ *
+ * PARAMETERS :
+ *   @camera_handle : event type to be processed
+ *   @evt           : ptr to event
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
+                                          mm_camera_event_t *evt,
+                                          void *user_data)
+{
+    QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)user_data;
+    if (obj && evt) {
+        mm_camera_event_t *payload =
+            (mm_camera_event_t *)malloc(sizeof(mm_camera_event_t));
+        if (NULL != payload) {
+            *payload = *evt;
+            //peek into the event, if this is an eztune event from server,
+            //then we don't need to post it to the SM Qs, we shud directly
+            //spawn a thread and get the job done (jpeg or raw snapshot)
+            if (CAM_EVENT_TYPE_INT_TAKE_PIC == payload->server_event_type) {
+                pthread_mutex_lock(&obj->m_int_lock);
+                obj->m_bIntEvtPending = true;
+                pthread_mutex_unlock(&obj->m_int_lock);
+                obj->takePictureInternal();
+                free(payload);
+            } else {
+                obj->processEvt(QCAMERA_SM_EVT_EVT_NOTIFY, payload);
+            }
+        }
+    } else {
+        ALOGE("%s: NULL user_data", __func__);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : jpegEvtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events
+ *
+ * PARAMETERS :
+ *   @status    : status of jpeg job
+ *   @client_hdl: jpeg client handle
+ *   @jobId     : jpeg job Id
+ *   @p_ouput   : ptr to jpeg output result struct
+ *   @userdata  : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::jpegEvtHandle(jpeg_job_status_t status,
+                                              uint32_t /*client_hdl*/,
+                                              uint32_t jobId,
+                                              mm_jpeg_output_t *p_output,
+                                              void *userdata)
+{
+    QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)userdata;
+    if (obj) {
+        qcamera_jpeg_evt_payload_t *payload =
+            (qcamera_jpeg_evt_payload_t *)malloc(sizeof(qcamera_jpeg_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_jpeg_evt_payload_t));
+            payload->status = status;
+            payload->jobId = jobId;
+            if (p_output != NULL) {
+                payload->out_data = *p_output;
+            }
+            obj->processUFDumps(payload);
+            obj->processMTFDumps(payload);
+            obj->processEvt(QCAMERA_SM_EVT_JPEG_EVT_NOTIFY, payload);
+        }
+    } else {
+        ALOGE("%s: NULL user_data", __func__);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : thermalEvtHandle
+ *
+ * DESCRIPTION: routine to handle thermal event notification
+ *
+ * PARAMETERS :
+ *   @level      : thermal level
+ *   @userdata   : userdata passed in during registration
+ *   @data       : opaque data from thermal client
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::thermalEvtHandle(
+        qcamera_thermal_level_enum_t level, void * /*userdata*/, void * /*data*/)
+{
+    if (!mCameraOpened) {
+        CDBG("%s: Camera is not opened, no need to handle thermal evt", __func__);
+        return NO_ERROR;
+    }
+
+    // Make sure thermal events are logged
+    CDBG("%s: level = %d, userdata = %p, data = %p",
+        __func__, level, userdata, data);
+    //We don't need to lockAPI, waitAPI here. QCAMERA_SM_EVT_THERMAL_NOTIFY
+    // becomes an aync call. This also means we can only pass payload
+    // by value, not by address.
+    return processAPI(QCAMERA_SM_EVT_THERMAL_NOTIFY, (void *)&level);
+}
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify to notify thread
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type to be sent
+ *   @ext1    : optional extension1
+ *   @ext2    : optional extension2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendEvtNotify(int32_t msg_type,
+                                                 int32_t ext1,
+                                                 int32_t ext2)
+{
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+    cbArg.msg_type = msg_type;
+    cbArg.ext1 = ext1;
+    cbArg.ext2 = ext2;
+    return m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION   : processAutoFocusEvent
+ *
+ * DESCRIPTION: process auto focus event
+ *
+ * PARAMETERS :
+ *   @focus_data: struct containing auto focus result info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAutoFocusEvent(cam_auto_focus_data_t &focus_data)
+{
+    int32_t ret = NO_ERROR;
+    CDBG_HIGH("%s: E",__func__);
+
+    m_currentFocusState = focus_data.focus_state;
+
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+    CDBG_HIGH("[AF_DBG] %s: focusMode=%d, m_currentFocusState=%d, m_bAFRunning=%d",
+         __func__, focusMode, m_currentFocusState, isAFRunning());
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+        if (getCancelAutoFocus()) {
+            // auto focus has canceled, just ignore it
+            break;
+        }
+
+        if (focus_data.focus_state == CAM_AF_PASSIVE_SCANNING ||
+            focus_data.focus_state == CAM_AF_PASSIVE_FOCUSED ||
+            focus_data.focus_state == CAM_AF_PASSIVE_UNFOCUSED) {
+            //ignore passive(CAF) events in Auto/Macro AF modes
+            break;
+        }
+
+        if (focus_data.focus_state == CAM_AF_SCANNING ||
+            focus_data.focus_state == CAM_AF_INACTIVE) {
+            // in the middle of focusing, just ignore it
+            break;
+        }
+
+        // update focus distance
+        mParameters.updateFocusDistances(&focus_data.focus_dist);
+        if (mParameters.isZSLMode()) {
+          QCameraPicChannel *pZSLChannel =
+            (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+          if (NULL != pZSLChannel) {
+            //flush the zsl-buffer
+            uint32_t flush_frame_idx = focus_data.focused_frame_idx;
+            ALOGD("%s, flush the zsl-buffer before frame = %d.", __func__, flush_frame_idx);
+            pZSLChannel->flushSuperbuffer(flush_frame_idx);
+          }
+        }
+        ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+                            (focus_data.focus_state == CAM_AF_FOCUSED)? true : false,
+                            0);
+        // multi-touch focus feature, record current lens position when focused.
+        if (mParameters.isTouchFocusing() &&
+                focus_data.focus_state == CAM_AF_FOCUSED &&
+                mParameters.isMultiTouchFocusSelected()) {
+            mParameters.updateMTFInfo(focus_data.focus_pos);
+        }
+        break;
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        if (mActiveAF &&
+            (focus_data.focus_state == CAM_AF_PASSIVE_FOCUSED ||
+            focus_data.focus_state == CAM_AF_PASSIVE_UNFOCUSED)) {
+            //ignore passive(CAF) events during AF triggered by app/HAL
+            break;
+        }
+
+        if (focus_data.focus_state == CAM_AF_PASSIVE_FOCUSED ||
+            focus_data.focus_state == CAM_AF_PASSIVE_UNFOCUSED ||
+            focus_data.focus_state == CAM_AF_FOCUSED ||
+            focus_data.focus_state == CAM_AF_NOT_FOCUSED) {
+
+            // update focus distance
+            mParameters.updateFocusDistances(&focus_data.focus_dist);
+
+            ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+                  (focus_data.focus_state == CAM_AF_PASSIVE_FOCUSED ||
+                   focus_data.focus_state == CAM_AF_FOCUSED)? true : false,
+                  0);
+        }
+        ret = sendEvtNotify(CAMERA_MSG_FOCUS_MOVE,
+                (focus_data.focus_state == CAM_AF_PASSIVE_SCANNING)? true : false,
+                0);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        CDBG_HIGH("%s: no ops for autofocus event in focusmode %d", __func__, focusMode);
+        break;
+    }
+
+    //Reset mActiveAF once we receive focus done event
+    if (focus_data.focus_state == CAM_AF_FOCUSED ||
+        focus_data.focus_state == CAM_AF_NOT_FOCUSED) {
+        mActiveAF = false;
+    }
+
+    // we save cam_auto_focus_data_t.focus_pos to parameters,
+    // in any focus mode.
+    CDBG_HIGH("%s, update focus position: %d", __func__, focus_data.focus_pos);
+    mParameters.updateCurrentFocusPosition(focus_data.focus_pos);
+
+    CDBG_HIGH("%s: X",__func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomEvent
+ *
+ * DESCRIPTION: process zoom event
+ *
+ * PARAMETERS :
+ *   @crop_info : crop info as a result of zoom operation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processZoomEvent(cam_crop_data_t &crop_info)
+{
+    int32_t ret = NO_ERROR;
+
+    for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            ret = m_channels[i]->processZoomDone(mPreviewWindow, crop_info);
+        }
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processHDRData
+ *
+ * DESCRIPTION: process HDR scene events
+ *
+ * PARAMETERS :
+ *   @hdr_scene : HDR scene event data
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHDRData(cam_asd_hdr_scene_data_t hdr_scene)
+{
+    int rc = NO_ERROR;
+
+#ifndef VANILLA_HAL
+    if (hdr_scene.is_hdr_scene &&
+      (hdr_scene.hdr_confidence > HDR_CONFIDENCE_THRESHOLD) &&
+      mParameters.isAutoHDREnabled()) {
+        m_HDRSceneEnabled = true;
+    } else {
+        m_HDRSceneEnabled = false;
+    }
+    pthread_mutex_lock(&m_parm_lock);
+    mParameters.setHDRSceneEnable(m_HDRSceneEnabled);
+    pthread_mutex_unlock(&m_parm_lock);
+
+    if ( msgTypeEnabled(CAMERA_MSG_META_DATA) ) {
+
+        size_t data_len = sizeof(int);
+        size_t buffer_len = 1 *sizeof(int)       //meta type
+                          + 1 *sizeof(int)       //data len
+                          + 1 *sizeof(int);      //data
+        camera_memory_t *hdrBuffer = mGetMemory(-1,
+                                                 buffer_len,
+                                                 1,
+                                                 mCallbackCookie);
+        if ( NULL == hdrBuffer ) {
+            ALOGE("%s: Not enough memory for auto HDR data",
+                  __func__);
+            return NO_MEMORY;
+        }
+
+        int *pHDRData = (int *)hdrBuffer->data;
+        if (pHDRData == NULL) {
+            ALOGE("%s: memory data ptr is NULL", __func__);
+            return UNKNOWN_ERROR;
+        }
+
+        pHDRData[0] = CAMERA_META_DATA_HDR;
+        pHDRData[1] = (int)data_len;
+        pHDRData[2] = m_HDRSceneEnabled;
+
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_META_DATA;
+        cbArg.data = hdrBuffer;
+        cbArg.user_data = hdrBuffer;
+        cbArg.cookie = this;
+        cbArg.release_cb = releaseCameraMemory;
+        rc = m_cbNotifier.notifyCallback(cbArg);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: fail sending auto HDR notification", __func__);
+            hdrBuffer->release(hdrBuffer);
+        }
+    }
+
+    CDBG("%s : hdr_scene_data: processHDRData: %d %f",
+          __func__,
+          hdr_scene.is_hdr_scene,
+          hdr_scene.hdr_confidence);
+
+#endif
+  return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : transAwbMetaToParams
+ *
+ * DESCRIPTION: translate awb params from metadata callback to QCameraParameters
+ *
+ * PARAMETERS :
+ *   @awb_params : awb params from metadata callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::transAwbMetaToParams(cam_awb_params_t &awb_params)
+{
+    CDBG("%s, cct value: %d", __func__, awb_params.cct_value);
+
+    return mParameters.updateCCTValue(awb_params.cct_value);
+}
+
+/*===========================================================================
+ * FUNCTION   : processPrepSnapshotDone
+ *
+ * DESCRIPTION: process prep snapshot done event
+ *
+ * PARAMETERS :
+ *   @prep_snapshot_state  : state of prepare snapshot done. In other words,
+ *                           i.e. whether need future frames for capture.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processPrepSnapshotDoneEvent(
+                        cam_prep_snapshot_state_t prep_snapshot_state)
+{
+    int32_t ret = NO_ERROR;
+
+    if (m_channels[QCAMERA_CH_TYPE_ZSL] &&
+        prep_snapshot_state == NEED_FUTURE_FRAME) {
+        CDBG_HIGH("%s: already handled in mm-camera-intf, no ops here", __func__);
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processASDUpdate
+ *
+ * DESCRIPTION: process ASD update event
+ *
+ * PARAMETERS :
+ *   @scene: selected scene mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processASDUpdate(cam_auto_scene_t scene)
+{
+#ifndef VANILLA_HAL
+    //set ASD parameter
+    mParameters.set(QCameraParameters::KEY_SELECTED_AUTO_SCENE, mParameters.getASDStateString(scene));
+
+    size_t data_len = sizeof(cam_auto_scene_t);
+    size_t buffer_len = 1 *sizeof(int)       //meta type
+                      + 1 *sizeof(int)       //data len
+                      + data_len;            //data
+    camera_memory_t *asdBuffer = mGetMemory(-1,
+                                             buffer_len,
+                                             1,
+                                             mCallbackCookie);
+    if ( NULL == asdBuffer ) {
+        ALOGE("%s: Not enough memory for histogram data", __func__);
+        return NO_MEMORY;
+    }
+
+    int *pASDData = (int *)asdBuffer->data;
+    if (pASDData == NULL) {
+        ALOGE("%s: memory data ptr is NULL", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    pASDData[0] = CAMERA_META_DATA_ASD;
+    pASDData[1] = (int)data_len;
+    pASDData[2] = scene;
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    cbArg.msg_type = CAMERA_MSG_META_DATA;
+    cbArg.data = asdBuffer;
+    cbArg.user_data = asdBuffer;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseCameraMemory;
+    int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: fail sending notification", __func__);
+        asdBuffer->release(asdBuffer);
+    }
+#endif
+    return NO_ERROR;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : processAWBUpdate
+ *
+ * DESCRIPTION: process AWB update event
+ *
+ * PARAMETERS :
+ *   @awb_params: current awb parameters from back-end.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAWBUpdate(cam_awb_params_t &awb_params)
+{
+    return transAwbMetaToParams(awb_params);
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegNotify
+ *
+ * DESCRIPTION: process jpeg event
+ *
+ * PARAMETERS :
+ *   @jpeg_evt: ptr to jpeg event payload
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_evt)
+{
+    return m_postprocessor.processJpegEvt(jpeg_evt);
+}
+
+/*===========================================================================
+ * FUNCTION   : lockAPI
+ *
+ * DESCRIPTION: lock to process API
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::lockAPI()
+{
+    pthread_mutex_lock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : waitAPIResult
+ *
+ * DESCRIPTION: wait for API result coming back. This is a blocking call, it will
+ *              return only cerntain API event type arrives
+ *
+ * PARAMETERS :
+ *   @api_evt : API event type
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::waitAPIResult(qcamera_sm_evt_enum_t api_evt,
+        qcamera_api_result_t *apiResult)
+{
+    CDBG("%s: wait for API result of evt (%d)", __func__, api_evt);
+    int resultReceived = 0;
+    while  (!resultReceived) {
+        pthread_cond_wait(&m_cond, &m_lock);
+        if (m_apiResultList != NULL) {
+            api_result_list *apiResultList = m_apiResultList;
+            api_result_list *apiResultListPrevious = m_apiResultList;
+            while (apiResultList != NULL) {
+                if (apiResultList->result.request_api == api_evt) {
+                    resultReceived = 1;
+                    *apiResult = apiResultList->result;
+                    apiResultListPrevious->next = apiResultList->next;
+                    if (apiResultList == m_apiResultList) {
+                        m_apiResultList = apiResultList->next;
+                    }
+                    free(apiResultList);
+                    break;
+                }
+                else {
+                    apiResultListPrevious = apiResultList;
+                    apiResultList = apiResultList->next;
+                }
+            }
+        }
+    }
+    CDBG("%s: return (%d) from API result wait for evt (%d)",
+          __func__, apiResult->status, api_evt);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : unlockAPI
+ *
+ * DESCRIPTION: API processing is done, unlock
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unlockAPI()
+{
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : signalAPIResult
+ *
+ * DESCRIPTION: signal condition viarable that cerntain API event type arrives
+ *
+ * PARAMETERS :
+ *   @result  : API result
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalAPIResult(qcamera_api_result_t *result)
+{
+
+    pthread_mutex_lock(&m_lock);
+    api_result_list *apiResult = (api_result_list *)malloc(sizeof(api_result_list));
+    if (apiResult == NULL) {
+        ALOGE("%s: ERROR: malloc for api result failed", __func__);
+        ALOGE("%s: ERROR: api thread will wait forever fot this lost result", __func__);
+        goto malloc_failed;
+    }
+    apiResult->result = *result;
+    apiResult->next = NULL;
+    if (m_apiResultList == NULL) m_apiResultList = apiResult;
+    else {
+        api_result_list *apiResultList = m_apiResultList;
+        while(apiResultList->next != NULL) apiResultList = apiResultList->next;
+        apiResultList->next = apiResult;
+    }
+malloc_failed:
+    pthread_cond_broadcast(&m_cond);
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : signalEvtResult
+ *
+ * DESCRIPTION: signal condition variable that certain event was processed
+ *
+ * PARAMETERS :
+ *   @result  : Event result
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalEvtResult(qcamera_api_result_t *result)
+{
+    pthread_mutex_lock(&m_evtLock);
+    m_evtResult = *result;
+    pthread_cond_signal(&m_evtCond);
+    pthread_mutex_unlock(&m_evtLock);
+}
+
+int32_t QCamera2HardwareInterface::prepareRawStream(QCameraChannel *curChannel)
+{
+    int32_t rc = NO_ERROR;
+    cam_dimension_t str_dim,max_dim;
+    QCameraChannel *pChannel;
+
+    max_dim.width = 0;
+    max_dim.height = 0;
+
+    for (int j = 0; j < QCAMERA_CH_TYPE_MAX; j++) {
+        if (m_channels[j] != NULL) {
+            pChannel = m_channels[j];
+            for (uint8_t i = 0; i < pChannel->getNumOfStreams(); i++) {
+                QCameraStream *pStream = pChannel->getStreamByIndex(i);
+                if (pStream != NULL) {
+                    if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                        continue;
+                    }
+                    pStream->getFrameDimension(str_dim);
+                    if (str_dim.width > max_dim.width) {
+                        max_dim.width = str_dim.width;
+                    }
+                    if (str_dim.height > max_dim.height) {
+                        max_dim.height = str_dim.height;
+                    }
+                }
+            }
+        }
+    }
+
+    for (uint8_t i = 0; i < curChannel->getNumOfStreams(); i++) {
+        QCameraStream *pStream = curChannel->getStreamByIndex(i);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                continue;
+            }
+            pStream->getFrameDimension(str_dim);
+            if (str_dim.width > max_dim.width) {
+                max_dim.width = str_dim.width;
+            }
+            if (str_dim.height > max_dim.height) {
+                max_dim.height = str_dim.height;
+            }
+        }
+    }
+    rc = mParameters.updateRAW(max_dim);
+    return rc;
+}
+/*===========================================================================
+ * FUNCTION   : addStreamToChannel
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @pChannel   : ptr to channel obj
+ *   @streamType : type of stream to be added
+ *   @streamCB   : callback of stream
+ *   @userData   : user data ptr to callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addStreamToChannel(QCameraChannel *pChannel,
+                                                      cam_stream_type_t streamType,
+                                                      stream_cb_routine streamCB,
+                                                      void *userData)
+{
+    int32_t rc = NO_ERROR;
+
+    if (streamType == CAM_STREAM_TYPE_RAW) {
+        prepareRawStream(pChannel);
+    }
+    QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(streamType);
+    if (pStreamInfo == NULL) {
+        ALOGE("%s: no mem for stream info buf", __func__);
+        return NO_MEMORY;
+    }
+    uint8_t minStreamBufNum = getBufNumRequired(streamType);
+    bool bDynAllocBuf = false;
+    if (isZSLMode() && streamType == CAM_STREAM_TYPE_SNAPSHOT) {
+        bDynAllocBuf = true;
+    }
+
+    if ( ( streamType == CAM_STREAM_TYPE_SNAPSHOT ||
+            streamType == CAM_STREAM_TYPE_POSTVIEW ||
+            streamType == CAM_STREAM_TYPE_METADATA ||
+            streamType == CAM_STREAM_TYPE_RAW) &&
+            !isZSLMode() &&
+            !isLongshotEnabled() &&
+            !mParameters.getRecordingHintValue()) {
+        rc = pChannel->addStream(*this,
+                pStreamInfo,
+                minStreamBufNum,
+                &gCamCapability[mCameraId]->padding_info,
+                streamCB, userData,
+                bDynAllocBuf,
+                true);
+
+        // Queue buffer allocation for Snapshot and Metadata streams
+        if ( !rc ) {
+            DefferWorkArgs args;
+            DefferAllocBuffArgs allocArgs;
+
+            memset(&args, 0, sizeof(DefferWorkArgs));
+            memset(&allocArgs, 0, sizeof(DefferAllocBuffArgs));
+            allocArgs.type = streamType;
+            allocArgs.ch = pChannel;
+            args.allocArgs = allocArgs;
+
+            if (streamType == CAM_STREAM_TYPE_SNAPSHOT) {
+                mSnapshotJob = queueDefferedWork(CMD_DEFF_ALLOCATE_BUFF,
+                        args);
+
+                if ( mSnapshotJob == -1) {
+                    rc = UNKNOWN_ERROR;
+                }
+            } else if (streamType == CAM_STREAM_TYPE_METADATA) {
+                mMetadataJob = queueDefferedWork(CMD_DEFF_ALLOCATE_BUFF,
+                        args);
+
+                if ( mMetadataJob == -1) {
+                    rc = UNKNOWN_ERROR;
+                }
+            } else if (streamType == CAM_STREAM_TYPE_RAW) {
+                mRawdataJob = queueDefferedWork(CMD_DEFF_ALLOCATE_BUFF,
+                        args);
+
+                if ( mRawdataJob == -1) {
+                    rc = UNKNOWN_ERROR;
+                }
+            }
+        }
+    } else {
+        rc = pChannel->addStream(*this,
+                pStreamInfo,
+                minStreamBufNum,
+                &gCamCapability[mCameraId]->padding_info,
+                streamCB, userData,
+                bDynAllocBuf,
+                false);
+    }
+
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add stream type (%d) failed, ret = %d",
+              __func__, streamType, rc);
+        pStreamInfo->deallocate();
+        delete pStreamInfo;
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addPreviewChannel
+ *
+ * DESCRIPTION: add a preview channel that contains a preview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addPreviewChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+        // Using the no preview torch WA it is possible
+        // to already have a preview channel present before
+        // start preview gets called.
+        CDBG_HIGH(" %s : Preview Channel already added!", __func__);
+        return NO_ERROR;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for preview channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // preview only channel, don't need bundle attr and cb
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init preview channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with preview if applicable
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (isNoDisplayMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                nodisplay_preview_stream_cb_routine, this);
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+    }
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add preview stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addVideoChannel
+ *
+ * DESCRIPTION: add a video channel that contains a video stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addVideoChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraVideoChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_VIDEO] != NULL) {
+        // if we had video channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_VIDEO];
+        m_channels[QCAMERA_CH_TYPE_VIDEO] = NULL;
+    }
+
+    pChannel = new QCameraVideoChannel(mCameraHandle->camera_handle,
+                                       mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for video channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // preview only channel, don't need bundle attr and cb
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != 0) {
+        ALOGE("%s: init video channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_VIDEO,
+                            video_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add video stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_VIDEO] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addSnapshotChannel
+ *
+ * DESCRIPTION: add a snapshot channel that contains a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : Add this channel for live snapshot usecase. Regular capture will
+ *              use addCaptureChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addSnapshotChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_SNAPSHOT] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+        m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for snapshot channel", __func__);
+        return NO_MEMORY;
+    }
+
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.look_back = mParameters.getZSLBackLookCount();
+    attr.post_frame_skip = mParameters.getZSLBurstInterval();
+    attr.water_mark = mParameters.getZSLQueueDepth();
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr, snapshot_channel_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init snapshot channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+            NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addRawChannel
+ *
+ * DESCRIPTION: add a raw channel that contains a raw image stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addRawChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_RAW] != NULL) {
+        // if we had raw channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_RAW];
+        m_channels[QCAMERA_CH_TYPE_RAW] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for raw channel", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init raw channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with snapshot in regular RAW capture case
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+    waitDefferedWork(mMetadataJob);
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+                            raw_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+    waitDefferedWork(mRawdataJob);
+    m_channels[QCAMERA_CH_TYPE_RAW] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addZSLChannel
+ *
+ * DESCRIPTION: add a ZSL channel that contains a preview stream and
+ *              a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addZSLChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraPicChannel *pChannel = NULL;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+
+    if (m_channels[QCAMERA_CH_TYPE_ZSL] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_ZSL];
+        m_channels[QCAMERA_CH_TYPE_ZSL] = NULL;
+    }
+
+     if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
+        m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
+    }
+
+    pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+                                     mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for ZSL channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // ZSL channel, init with bundle attr and cb
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+    attr.look_back = mParameters.getZSLBackLookCount();
+    attr.post_frame_skip = mParameters.getZSLBurstInterval();
+    attr.water_mark = mParameters.getZSLQueueDepth();
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr,
+                        zsl_channel_cb,
+                        this);
+    if (rc != 0) {
+        ALOGE("%s: init ZSL channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with preview if applicable
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (isNoDisplayMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                nodisplay_preview_stream_cb_routine, this);
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+    }
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add preview stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+                            NULL, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+    if ( raw_yuv ) {
+        rc = addStreamToChannel(pChannel,
+                                CAM_STREAM_TYPE_RAW,
+                                NULL,
+                                this);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: add raw stream failed, ret = %d", __func__, rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    m_channels[QCAMERA_CH_TYPE_ZSL] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addCaptureChannel
+ *
+ * DESCRIPTION: add a capture channel that contains a snapshot stream
+ *              and a postview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : Add this channel for regular capture usecase.
+ *              For Live snapshot usecase, use addSnapshotChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addCaptureChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraPicChannel *pChannel = NULL;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+
+    if (m_channels[QCAMERA_CH_TYPE_CAPTURE] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_CAPTURE];
+        m_channels[QCAMERA_CH_TYPE_CAPTURE] = NULL;
+    }
+
+    pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for capture channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    if ( mLongshotEnabled ) {
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+        attr.look_back = mParameters.getZSLBackLookCount();
+        attr.water_mark = mParameters.getZSLQueueDepth();
+    } else {
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    }
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+
+    rc = pChannel->init(&attr,
+                        capture_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init capture channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with snapshot in regular capture case
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (!mLongshotEnabled) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_POSTVIEW,
+                                NULL, this);
+
+        if (rc != NO_ERROR) {
+            ALOGE("%s: add postview stream failed, ret = %d", __func__, rc);
+            delete pChannel;
+            return rc;
+        }
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+
+      if (rc != NO_ERROR) {
+          ALOGE("%s: add preview stream failed, ret = %d", __func__, rc);
+          delete pChannel;
+          return rc;
+      }
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+                            NULL, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+    if ( raw_yuv ) {
+        rc = addStreamToChannel(pChannel,
+                                CAM_STREAM_TYPE_RAW,
+                                snapshot_raw_stream_cb_routine,
+                                this);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: add raw stream failed, ret = %d", __func__, rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    m_channels[QCAMERA_CH_TYPE_CAPTURE] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addMetaDataChannel
+ *
+ * DESCRIPTION: add a meta data channel that contains a metadata stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addMetaDataChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_METADATA] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_METADATA];
+        m_channels[QCAMERA_CH_TYPE_METADATA] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for metadata channel", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL,
+                        NULL,
+                        NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init metadata channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_METADATA] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocChannel
+ *
+ * DESCRIPTION: add a reprocess channel that will do reprocess on frames
+ *              coming from input channel
+ *
+ * PARAMETERS :
+ *   @pInputChannel : ptr to input channel whose frames will be post-processed
+ *
+ * RETURN     : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addReprocChannel(
+                                                      QCameraChannel *pInputChannel)
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+
+    if (pInputChannel == NULL) {
+        ALOGE("%s: input channel obj is NULL", __func__);
+        return NULL;
+    }
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for reprocess channel", __func__);
+        return NULL;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr,
+                        postproc_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    CDBG_HIGH("%s: Before pproc config check, ret = %x", __func__, gCamCapability[mCameraId]->min_required_pp_mask);
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    uint32_t required_mask = gCamCapability[mCameraId]->min_required_pp_mask;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+    if (mParameters.isZSLMode() || (required_mask & CAM_QCOM_FEATURE_CPP)) {
+        if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_EFFECT) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_EFFECT;
+            pp_config.effect = mParameters.getEffectValue();
+        }
+        if ((gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) &&
+                !mParameters.isOptiZoomEnabled()) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+            pp_config.sharpness = mParameters.getInt(QCameraParameters::KEY_QC_SHARPNESS);
+        }
+
+        if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_CROP) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+        }
+
+        if (mParameters.isWNREnabled()) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+            pp_config.denoise2d.denoise_enable = 1;
+            pp_config.denoise2d.process_plates = mParameters.getWaveletDenoiseProcessPlate();
+        }
+
+        if (required_mask & CAM_QCOM_FEATURE_CPP) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_CPP;
+        }
+
+    }
+
+    if (isCACEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_CAC;
+    }
+
+    if (needRotationReprocess()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_CPP;
+        uint32_t rotation = getJpegRotation();
+        if (rotation == 0) {
+            pp_config.rotation = ROTATE_0;
+        } else if (rotation == 90) {
+            pp_config.rotation = ROTATE_90;
+        } else if (rotation == 180) {
+            pp_config.rotation = ROTATE_180;
+        } else if (rotation == 270) {
+            pp_config.rotation = ROTATE_270;
+        }
+    }
+
+    uint8_t minStreamBufNum = getBufNumRequired(CAM_STREAM_TYPE_OFFLINE_PROC);
+
+    if (mParameters.isHDREnabled()){
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
+        pp_config.hdr_param.hdr_enable = 1;
+        pp_config.hdr_param.hdr_need_1x = mParameters.isHDR1xFrameEnabled();
+        pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_HDR;
+        pp_config.hdr_param.hdr_enable = 0;
+    }
+
+    if(needScaleReprocess()){
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+        mParameters.m_reprocScaleParam.getPicSizeFromAPK(
+              pp_config.scale_param.output_width, pp_config.scale_param.output_height);
+    }
+
+    CDBG_HIGH("%s: After pproc config check, ret = %x", __func__, pp_config.feature_mask);
+
+    if(mParameters.isUbiFocusEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_UBIFOCUS;
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_UBIFOCUS;
+    }
+
+    if(mParameters.isMultiTouchFocusEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_MULTI_TOUCH_FOCUS;
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_MULTI_TOUCH_FOCUS;
+    }
+
+    if(mParameters.isChromaFlashEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_CHROMA_FLASH;
+        //TODO: check flash value for captured image, then assign.
+        pp_config.flash_value = CAM_FLASH_ON;
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CHROMA_FLASH;
+    }
+
+    if(mParameters.isOptiZoomEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_OPTIZOOM;
+        pp_config.zoom_level =
+                (uint8_t) mParameters.getInt(CameraParameters::KEY_ZOOM);
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_OPTIZOOM;
+    }
+
+    if (mParameters.isTruePortraitEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_TRUEPORTRAIT;
+        pp_config.tp_param.enable = mParameters.isTruePortraitEnabled();
+        pp_config.tp_param.meta_max_size = mParameters.TpMaxMetaSize();
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_TRUEPORTRAIT;
+        pp_config.tp_param.enable = 0;
+    }
+
+    //WNR and HDR happen inline. No extra buffers needed.
+    uint32_t temp_feature_mask = pp_config.feature_mask;
+    temp_feature_mask &= ~CAM_QCOM_FEATURE_DENOISE2D;
+    temp_feature_mask &= ~CAM_QCOM_FEATURE_HDR;
+    if (temp_feature_mask && mParameters.isHDREnabled()) {
+        minStreamBufNum = (uint8_t)(1 + mParameters.getNumOfExtraHDRInBufsIfNeeded());
+    }
+
+    // Add non inplace image lib buffers only when ppproc is present,
+    // becuase pproc is non inplace and input buffers for img lib
+    // are output for pproc and this number of extra buffers is required
+    // If pproc is not there, input buffers for imglib are from snapshot stream
+    uint8_t imglib_extra_bufs = mParameters.getNumOfExtraBuffersForImageProc();
+    if (temp_feature_mask && imglib_extra_bufs) {
+        // 1 is added because getNumOfExtraBuffersForImageProc returns extra
+        // buffers assuming number of capture is already added
+        minStreamBufNum = (uint8_t)(minStreamBufNum + imglib_extra_bufs + 1);
+    }
+
+    CDBG_HIGH("%s: Allocating %d reproc buffers",__func__,minStreamBufNum);
+
+    bool offlineReproc = isRegularCapture();
+    rc = pChannel->addReprocStreamsFromSource(*this,
+                                              pp_config,
+                                              pInputChannel,
+                                              minStreamBufNum,
+                                              mParameters.getNumOfSnapshots(),
+                                              &gCamCapability[mCameraId]->padding_info,
+                                              mParameters,
+                                              mLongshotEnabled,
+                                              offlineReproc);
+    if (rc != NO_ERROR) {
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : addOfflineReprocChannel
+ *
+ * DESCRIPTION: add a offline reprocess channel contains one reproc stream,
+ *              that will do reprocess on frames coming from external images
+ *
+ * PARAMETERS :
+ *   @img_config  : offline reporcess image info
+ *   @pp_feature  : pp feature config
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addOfflineReprocChannel(
+                                            cam_pp_offline_src_config_t &img_config,
+                                            cam_pp_feature_config_t &pp_feature,
+                                            stream_cb_routine stream_cb,
+                                            void *userdata)
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for reprocess channel", __func__);
+        return NULL;
+    }
+
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+    if (pStreamInfo == NULL) {
+        ALOGE("%s: no mem for stream info buf", __func__);
+        delete pChannel;
+        return NULL;
+    }
+
+    cam_stream_info_t *streamInfoBuf = (cam_stream_info_t *)pStreamInfo->getPtr(0);
+    memset(streamInfoBuf, 0, sizeof(cam_stream_info_t));
+    streamInfoBuf->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+    streamInfoBuf->fmt = img_config.input_fmt;
+    streamInfoBuf->dim = img_config.input_dim;
+    streamInfoBuf->buf_planes = img_config.input_buf_planes;
+    streamInfoBuf->streaming_mode = CAM_STREAMING_MODE_BURST;
+    streamInfoBuf->num_of_burst = img_config.num_of_bufs;
+
+    streamInfoBuf->reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+    streamInfoBuf->reprocess_config.offline = img_config;
+    streamInfoBuf->reprocess_config.pp_feature_config = pp_feature;
+
+    rc = pChannel->addStream(*this,
+                             pStreamInfo, img_config.num_of_bufs,
+                             &gCamCapability[mCameraId]->padding_info,
+                             stream_cb, userdata, false);
+
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add reprocess stream failed, ret = %d", __func__, rc);
+        pStreamInfo->deallocate();
+        delete pStreamInfo;
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : addDualReprocChannel
+ *
+ * DESCRIPTION: add a second reprocess channel that will do reprocess on frames
+ *              coming from another reproc channel
+ *
+ * PARAMETERS :
+ *   @pInputChannel : ptr to input channel whose frames will be post-processed
+ *
+ * RETURN     : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addDualReprocChannel(
+                                                      QCameraChannel *pInputChannel)
+
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+
+    if (pInputChannel == NULL) {
+        ALOGE("%s: input channel obj is NULL", __func__);
+        return NULL;
+    }
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for reprocess channel", __func__);
+        return NULL;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr,
+                        dual_reproc_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+    if(mParameters.isfssrEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_FSSR;
+        pp_config.zoom_level =
+                (uint8_t) mParameters.getInt(CameraParameters::KEY_ZOOM);
+    } else {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_FSSR;
+    }
+
+    uint8_t minStreamBufNum = getBufNumRequired(CAM_STREAM_TYPE_OFFLINE_PROC);;
+
+    CDBG_HIGH("%s: Allocating %d dual reproc buffers",__func__,minStreamBufNum);
+
+    bool offlineReproc = isRegularCapture();
+    rc = pChannel->addReprocStreamsFromSource(*this,
+                                              pp_config,
+                                              pInputChannel,
+                                              minStreamBufNum,
+                                              mParameters.getNumOfSnapshots(),
+                                              &gCamCapability[mCameraId]->padding_info,
+                                              mParameters,
+                                              mLongshotEnabled,
+                                              offlineReproc);
+    if (rc != NO_ERROR) {
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : addChannel
+ *
+ * DESCRIPTION: add a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    switch (ch_type) {
+    case QCAMERA_CH_TYPE_ZSL:
+        rc = addZSLChannel();
+        break;
+    case QCAMERA_CH_TYPE_CAPTURE:
+        rc = addCaptureChannel();
+        break;
+    case QCAMERA_CH_TYPE_PREVIEW:
+        rc = addPreviewChannel();
+        break;
+    case QCAMERA_CH_TYPE_VIDEO:
+        rc = addVideoChannel();
+        break;
+    case QCAMERA_CH_TYPE_SNAPSHOT:
+        rc = addSnapshotChannel();
+        break;
+    case QCAMERA_CH_TYPE_RAW:
+        rc = addRawChannel();
+        break;
+    case QCAMERA_CH_TYPE_METADATA:
+        rc = addMetaDataChannel();
+        break;
+    default:
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : delChannel
+ *
+ * DESCRIPTION: delete a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *   @destroy : delete context as well
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::delChannel(qcamera_ch_type_enum_t ch_type,
+                                              bool destroy)
+{
+    if (m_channels[ch_type] != NULL) {
+        if (destroy) {
+            delete m_channels[ch_type];
+            m_channels[ch_type] = NULL;
+        } else {
+            m_channels[ch_type]->deleteChannel();
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startChannel
+ *
+ * DESCRIPTION: start a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    if (m_channels[ch_type] != NULL) {
+        rc = m_channels[ch_type]->config();
+        if (NO_ERROR == rc) {
+            rc = m_channels[ch_type]->start();
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopChannel
+ *
+ * DESCRIPTION: stop a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    if (m_channels[ch_type] != NULL) {
+        rc = m_channels[ch_type]->stop();
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : preparePreview
+ *
+ * DESCRIPTION: add channels needed for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::preparePreview()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
+        rc = addChannel(QCAMERA_CH_TYPE_ZSL);
+        if (rc != NO_ERROR) {
+            return rc;
+        }
+    } else {
+        bool recordingHint = mParameters.getRecordingHintValue();
+        if(recordingHint) {
+            //stop face detection,longshot,etc if turned ON in Camera mode
+            int32_t arg; //dummy arg
+#ifndef VANILLA_HAL
+            if (isLongshotEnabled()) {
+                sendCommand(CAMERA_CMD_LONGSHOT_OFF, arg, arg);
+            }
+#endif
+            if (mParameters.isFaceDetectionEnabled()) {
+                sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, arg, arg);
+            }
+#ifndef VANILLA_HAL
+            if (mParameters.isHistogramEnabled()) {
+                sendCommand(CAMERA_CMD_HISTOGRAM_OFF, arg, arg);
+            }
+#endif
+
+            cam_dimension_t videoSize;
+            mParameters.getVideoSize(&videoSize.width, &videoSize.height);
+            if (!is4k2kResolution(&videoSize)) {
+               rc = addChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+               if (rc != NO_ERROR) {
+                   return rc;
+               }
+            }
+            rc = addChannel(QCAMERA_CH_TYPE_VIDEO);
+            if (rc != NO_ERROR) {
+                delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                return rc;
+            }
+        }
+
+        rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
+        if (rc != NO_ERROR) {
+            if (recordingHint) {
+                delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                delChannel(QCAMERA_CH_TYPE_VIDEO);
+            }
+            return rc;
+        }
+
+        if (!recordingHint) {
+            waitDefferedWork(mMetadataJob);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : unpreparePreview
+ *
+ * DESCRIPTION: delete channels for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unpreparePreview()
+{
+    delChannel(QCAMERA_CH_TYPE_ZSL);
+    delChannel(QCAMERA_CH_TYPE_PREVIEW);
+    delChannel(QCAMERA_CH_TYPE_VIDEO);
+    delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+}
+
+/*===========================================================================
+ * FUNCTION   : playShutter
+ *
+ * DESCRIPTION: send request to play shutter sound
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::playShutter(){
+     if (mNotifyCb == NULL ||
+         msgTypeEnabledWithLock(CAMERA_MSG_SHUTTER) == 0){
+         CDBG("%s: shutter msg not enabled or NULL cb", __func__);
+         return;
+     }
+     CDBG_HIGH("%s: CAMERA_MSG_SHUTTER ", __func__);
+     qcamera_callback_argm_t cbArg;
+     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+     cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+     cbArg.msg_type = CAMERA_MSG_SHUTTER;
+     cbArg.ext1 = 0;
+     cbArg.ext2 = false;
+     m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION   : getChannelByHandle
+ *
+ * DESCRIPTION: return a channel by its handle
+ *
+ * PARAMETERS :
+ *   @channelHandle : channel handle
+ *
+ * RETURN     : a channel obj if found, NULL if not found
+ *==========================================================================*/
+QCameraChannel *QCamera2HardwareInterface::getChannelByHandle(uint32_t channelHandle)
+{
+    for(int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL &&
+            m_channels[i]->getMyHandle() == channelHandle) {
+            return m_channels[i];
+        }
+    }
+
+    return NULL;
+}
+/*===========================================================================
+ * FUNCTION   : needPreviewFDCallback
+ *
+ * DESCRIPTION: decides if needPreviewFDCallback
+ *
+ * PARAMETERS :
+ *   @fd_data : number of faces
+ *
+ * RETURN     : bool type of status
+ *              true  -- success
+ *              fale -- failure code
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needPreviewFDCallback(uint8_t num_faces)
+{
+    if (num_faces == 0 && mNumPreviewFaces == 0) {
+        return false;
+    }
+
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : processFaceDetectionReuslt
+ *
+ * DESCRIPTION: process face detection reuslt
+ *
+ * PARAMETERS :
+ *   @fd_data : ptr to face detection result struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processFaceDetectionResult(cam_face_detection_data_t *fd_data)
+{
+    if (!mParameters.isFaceDetectionEnabled()) {
+        CDBG_HIGH("%s: FaceDetection not enabled, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    qcamera_face_detect_type_t fd_type = fd_data->fd_type;
+    if ((NULL == mDataCb) ||
+        (fd_type == QCAMERA_FD_PREVIEW && (!msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ||
+        (!needPreviewFDCallback(fd_data->num_faces_detected)))) ||
+#ifndef VANILLA_HAL
+        (fd_type == QCAMERA_FD_SNAPSHOT && !msgTypeEnabled(CAMERA_MSG_META_DATA))
+#else
+        (fd_type == QCAMERA_FD_SNAPSHOT)
+#endif
+        ) {
+        CDBG_HIGH("%s: metadata msgtype not enabled, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    cam_dimension_t display_dim;
+    mParameters.getStreamDimension(CAM_STREAM_TYPE_PREVIEW, display_dim);
+    if (display_dim.width <= 0 || display_dim.height <= 0) {
+        ALOGE("%s: Invalid preview width or height (%d x %d)",
+              __func__, display_dim.width, display_dim.height);
+        return UNKNOWN_ERROR;
+    }
+
+    // process face detection result
+    // need separate face detection in preview or snapshot type
+    size_t faceResultSize = 0;
+    size_t data_len = 0;
+    if(fd_type == QCAMERA_FD_PREVIEW){
+        //fd for preview frames
+        faceResultSize = sizeof(camera_frame_metadata_t);
+        faceResultSize += sizeof(camera_face_t) * MAX_ROI;
+    }else if(fd_type == QCAMERA_FD_SNAPSHOT){
+#ifndef VANILLA_HAL
+        // fd for snapshot frames
+        //check if face is detected in this frame
+        if(fd_data->num_faces_detected > 0){
+            data_len = sizeof(camera_frame_metadata_t) +
+                         sizeof(camera_face_t) * fd_data->num_faces_detected;
+        }else{
+            //no face
+            data_len = 0;
+        }
+#endif
+        faceResultSize = 1 *sizeof(int)    //meta data type
+                       + 1 *sizeof(int)    // meta data len
+                       + data_len;         //data
+    }
+
+    camera_memory_t *faceResultBuffer = mGetMemory(-1,
+                                                   faceResultSize,
+                                                   1,
+                                                   mCallbackCookie);
+    if ( NULL == faceResultBuffer ) {
+        ALOGE("%s: Not enough memory for face result data",
+              __func__);
+        return NO_MEMORY;
+    }
+
+    unsigned char *pFaceResult = ( unsigned char * ) faceResultBuffer->data;
+    memset(pFaceResult, 0, faceResultSize);
+    unsigned char *faceData = NULL;
+    if(fd_type == QCAMERA_FD_PREVIEW){
+        faceData = pFaceResult;
+        mNumPreviewFaces = fd_data->num_faces_detected;
+    }else if(fd_type == QCAMERA_FD_SNAPSHOT){
+#ifndef VANILLA_HAL
+        //need fill meta type and meta data len first
+        int *data_header = (int* )pFaceResult;
+        data_header[0] = CAMERA_META_DATA_FD;
+        data_header[1] = (int)data_len;
+
+        if(data_len <= 0){
+            //if face is not valid or do not have face, return
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_META_DATA;
+            cbArg.data = faceResultBuffer;
+            cbArg.user_data = faceResultBuffer;
+            cbArg.cookie = this;
+            cbArg.release_cb = releaseCameraMemory;
+            int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: fail sending notification", __func__);
+                faceResultBuffer->release(faceResultBuffer);
+            }
+            return rc;
+        }
+
+        faceData = pFaceResult + 2 *sizeof(int); //skip two int length
+#endif
+    }
+
+    camera_frame_metadata_t *roiData = (camera_frame_metadata_t * ) faceData;
+    camera_face_t *faces = (camera_face_t *) ( faceData + sizeof(camera_frame_metadata_t) );
+
+    roiData->number_of_faces = fd_data->num_faces_detected;
+    roiData->faces = faces;
+    if (roiData->number_of_faces > 0) {
+        for (int i = 0; i < roiData->number_of_faces; i++) {
+            faces[i].id = fd_data->faces[i].face_id;
+            faces[i].score = fd_data->faces[i].score;
+
+            // left
+            faces[i].rect[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.left, display_dim.width, 2000, -1000);
+
+            // top
+            faces[i].rect[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.top, display_dim.height, 2000, -1000);
+
+            // right
+            faces[i].rect[2] = faces[i].rect[0] +
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.width, display_dim.width, 2000, 0);
+
+             // bottom
+            faces[i].rect[3] = faces[i].rect[1] +
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.height, display_dim.height, 2000, 0);
+
+            // Center of left eye
+            faces[i].left_eye[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].left_eye_center.x, display_dim.width, 2000, -1000);
+
+            faces[i].left_eye[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].left_eye_center.y, display_dim.height, 2000, -1000);
+
+            // Center of right eye
+            faces[i].right_eye[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].right_eye_center.x, display_dim.width, 2000, -1000);
+
+            faces[i].right_eye[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].right_eye_center.y, display_dim.height, 2000, -1000);
+
+            // Center of mouth
+            faces[i].mouth[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].mouth_center.x, display_dim.width, 2000, -1000);
+
+            faces[i].mouth[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].mouth_center.y, display_dim.height, 2000, -1000);
+
+#ifndef VANILLA_HAL
+            faces[i].smile_degree = fd_data->faces[i].smile_degree;
+            faces[i].smile_score = fd_data->faces[i].smile_confidence;
+            faces[i].blink_detected = fd_data->faces[i].blink_detected;
+            faces[i].face_recognised = fd_data->faces[i].face_recognised;
+            faces[i].gaze_angle = fd_data->faces[i].gaze_angle;
+
+            // upscale by 2 to recover from demaen downscaling
+            faces[i].updown_dir = fd_data->faces[i].updown_dir * 2;
+            faces[i].leftright_dir = fd_data->faces[i].leftright_dir * 2;
+            faces[i].roll_dir = fd_data->faces[i].roll_dir * 2;
+
+            faces[i].leye_blink = fd_data->faces[i].left_blink;
+            faces[i].reye_blink = fd_data->faces[i].right_blink;
+            faces[i].left_right_gaze = fd_data->faces[i].left_right_gaze;
+            faces[i].top_bottom_gaze = fd_data->faces[i].top_bottom_gaze;
+#endif
+
+        }
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    if(fd_type == QCAMERA_FD_PREVIEW){
+        cbArg.msg_type = CAMERA_MSG_PREVIEW_METADATA;
+    }
+#ifndef VANILLA_HAL
+    else if(fd_type == QCAMERA_FD_SNAPSHOT){
+        cbArg.msg_type = CAMERA_MSG_META_DATA;
+    }
+#endif
+    cbArg.data = faceResultBuffer;
+    cbArg.metadata = roiData;
+    cbArg.user_data = faceResultBuffer;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseCameraMemory;
+    int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: fail sending notification", __func__);
+        faceResultBuffer->release(faceResultBuffer);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseCameraMemory
+ *
+ * DESCRIPTION: releases camera memory objects
+ *
+ * PARAMETERS :
+ *   @data    : buffer to be released
+ *   @cookie  : context data
+ *   @cbStatus: callback status
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::releaseCameraMemory(void *data,
+                                                    void */*cookie*/,
+                                                    int32_t /*cbStatus*/)
+{
+    camera_memory_t *mem = ( camera_memory_t * ) data;
+    if ( NULL != mem ) {
+        mem->release(mem);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : returnStreamBuffer
+ *
+ * DESCRIPTION: returns back a stream buffer
+ *
+ * PARAMETERS :
+ *   @data    : buffer to be released
+ *   @cookie  : context data
+ *   @cbStatus: callback status
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::returnStreamBuffer(void *data,
+                                                   void *cookie,
+                                                   int32_t /*cbStatus*/)
+{
+    QCameraStream *stream = ( QCameraStream * ) cookie;
+    int idx = *((int *)data);
+    if ((NULL != stream) && (0 <= idx)) {
+        stream->bufDone((uint32_t)idx);
+    } else {
+        ALOGE("%s: Cannot return buffer %d %p", __func__, idx, cookie);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : processHistogramStats
+ *
+ * DESCRIPTION: process histogram stats
+ *
+ * PARAMETERS :
+ *   @hist_data : ptr to histogram stats struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHistogramStats(cam_hist_stats_t &stats_data)
+{
+#ifndef VANILLA_HAL
+    if (!mParameters.isHistogramEnabled()) {
+        CDBG("%s: Histogram not enabled, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    camera_memory_t *histBuffer = mGetMemory(-1,
+                                             sizeof(cam_histogram_data_t),
+                                             1,
+                                             mCallbackCookie);
+    if ( NULL == histBuffer ) {
+        ALOGE("%s: Not enough memory for histogram data",
+              __func__);
+        return NO_MEMORY;
+    }
+
+    cam_histogram_data_t *pHistData = (cam_histogram_data_t *)histBuffer->data;
+    if (pHistData == NULL) {
+        ALOGE("%s: memory data ptr is NULL", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    switch (stats_data.type) {
+    case CAM_HISTOGRAM_TYPE_BAYER:
+        *pHistData = stats_data.bayer_stats.gb_stats;
+        break;
+    case CAM_HISTOGRAM_TYPE_YUV:
+        *pHistData = stats_data.yuv_stats;
+        break;
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    cbArg.msg_type = CAMERA_MSG_STATS_DATA;
+    cbArg.data = histBuffer;
+    cbArg.user_data = histBuffer;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseCameraMemory;
+    int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: fail sending notification", __func__);
+        histBuffer->release(histBuffer);
+    }
+#endif
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcThermalLevel
+ *
+ * DESCRIPTION: Calculates the target fps range depending on
+ *              the thermal level.
+ *
+ * PARAMETERS :
+ *   @level    : received thermal level
+ *   @minFPS   : minimum configured fps range
+ *   @maxFPS   : maximum configured fps range
+ *   @adjustedRange : target fps range
+ *   @skipPattern : target skip pattern
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::calcThermalLevel(
+            qcamera_thermal_level_enum_t level,
+            const int minFPSi,
+            const int maxFPSi,
+            cam_fps_range_t &adjustedRange,
+            enum msm_vfe_frame_skip_pattern &skipPattern)
+{
+    const float minFPS = (float)minFPSi;
+    const float maxFPS = (float)maxFPSi;
+
+    // Initialize video fps to preview fps
+    float minVideoFps = minFPS, maxVideoFps = maxFPS;
+    cam_fps_range_t videoFps;
+    // If HFR mode, update video fps accordingly
+    if(isHFRMode()) {
+        mParameters.getHfrFps(videoFps);
+        minVideoFps = videoFps.video_min_fps;
+        maxVideoFps = videoFps.video_max_fps;
+    }
+
+    CDBG_HIGH("%s: level: %d, preview minfps %f, preview maxfpS %f, "
+              "video minfps %f, video maxfpS %f",
+            __func__, level, minFPS, maxFPS, minVideoFps, maxVideoFps);
+
+    switch(level) {
+    case QCAMERA_THERMAL_NO_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = minFPS / 1000.0f;
+            adjustedRange.max_fps = maxFPS / 1000.0f;
+            adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+            adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+            skipPattern = NO_SKIP;
+        }
+        break;
+    case QCAMERA_THERMAL_SLIGHT_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = (minFPS / 2) / 1000.0f;
+            adjustedRange.max_fps = (maxFPS / 2) / 1000.0f;
+            adjustedRange.video_min_fps = (minVideoFps / 2) / 1000.0f;
+            adjustedRange.video_max_fps = (maxVideoFps / 2 ) / 1000.0f;
+            if ( adjustedRange.min_fps < 1 ) {
+                adjustedRange.min_fps = 1;
+            }
+            if ( adjustedRange.max_fps < 1 ) {
+                adjustedRange.max_fps = 1;
+            }
+            if ( adjustedRange.video_min_fps < 1 ) {
+                adjustedRange.video_min_fps = 1;
+            }
+            if ( adjustedRange.video_max_fps < 1 ) {
+                adjustedRange.video_max_fps = 1;
+            }
+            skipPattern = EVERY_2FRAME;
+        }
+        break;
+    case QCAMERA_THERMAL_BIG_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = (minFPS / 4) / 1000.0f;
+            adjustedRange.max_fps = (maxFPS / 4) / 1000.0f;
+            adjustedRange.video_min_fps = (minVideoFps / 4) / 1000.0f;
+            adjustedRange.video_max_fps = (maxVideoFps / 4 ) / 1000.0f;
+            if ( adjustedRange.min_fps < 1 ) {
+                adjustedRange.min_fps = 1;
+            }
+            if ( adjustedRange.max_fps < 1 ) {
+                adjustedRange.max_fps = 1;
+            }
+            if ( adjustedRange.video_min_fps < 1 ) {
+                adjustedRange.video_min_fps = 1;
+            }
+            if ( adjustedRange.video_max_fps < 1 ) {
+                adjustedRange.video_max_fps = 1;
+            }
+            skipPattern = EVERY_4FRAME;
+        }
+        break;
+    case QCAMERA_THERMAL_SHUTDOWN:
+        {
+            // Stop Preview?
+            // Set lowest min FPS for now
+            adjustedRange.min_fps = minFPS/1000.0f;
+            adjustedRange.max_fps = minFPS/1000.0f;
+            for (size_t i = 0 ; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt ; i++) {
+                if (gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps < adjustedRange.min_fps) {
+                    adjustedRange.min_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+                    adjustedRange.max_fps = adjustedRange.min_fps;
+                }
+            }
+            skipPattern = MAX_SKIP;
+            adjustedRange.video_min_fps = adjustedRange.min_fps;
+            adjustedRange.video_max_fps = adjustedRange.max_fps;
+        }
+        break;
+    default:
+        {
+            CDBG("%s: Invalid thermal level %d", __func__, level);
+            return BAD_VALUE;
+        }
+        break;
+    }
+    CDBG_HIGH("%s: Thermal level %d, FPS [%3.2f,%3.2f, %3.2f,%3.2f], frameskip %d",
+          __func__, level, adjustedRange.min_fps, adjustedRange.max_fps,
+          adjustedRange.video_min_fps, adjustedRange.video_max_fps,skipPattern);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : recalcFPSRange
+ *
+ * DESCRIPTION: adjust the configured fps range regarding
+ *              the last thermal level.
+ *
+ * PARAMETERS :
+ *   @minFPS   : minimum configured fps range
+ *   @maxFPS   : maximum configured fps range
+ *   @adjustedRange : target fps range
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::recalcFPSRange(int &minFPS, int &maxFPS,
+        cam_fps_range_t &adjustedRange)
+{
+    enum msm_vfe_frame_skip_pattern skipPattern;
+    calcThermalLevel(mThermalLevel,
+                     minFPS,
+                     maxFPS,
+                     adjustedRange,
+                     skipPattern);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateThermalLevel
+ *
+ * DESCRIPTION: update thermal level depending on thermal events
+ *
+ * PARAMETERS :
+ *   @level   : thermal level
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateThermalLevel(
+            qcamera_thermal_level_enum_t level)
+{
+    int ret = NO_ERROR;
+    cam_fps_range_t adjustedRange;
+    int minFPS, maxFPS;
+    enum msm_vfe_frame_skip_pattern skipPattern;
+
+    pthread_mutex_lock(&m_parm_lock);
+
+    if (!mCameraOpened) {
+        CDBG("%s: Camera is not opened, no need to update camera parameters", __func__);
+        pthread_mutex_unlock(&m_parm_lock);
+        return NO_ERROR;
+    }
+
+    mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+    qcamera_thermal_mode thermalMode = mParameters.getThermalMode();
+    calcThermalLevel(level, minFPS, maxFPS, adjustedRange, skipPattern);
+    mThermalLevel = level;
+
+    if (thermalMode == QCAMERA_THERMAL_ADJUST_FPS)
+        ret = mParameters.adjustPreviewFpsRange(&adjustedRange);
+    else if (thermalMode == QCAMERA_THERMAL_ADJUST_FRAMESKIP)
+        ret = mParameters.setFrameSkip(skipPattern);
+    else
+        ALOGE("%s: Incorrect thermal mode %d", __func__, thermalMode);
+
+    pthread_mutex_unlock(&m_parm_lock);
+
+    return ret;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParameters
+ *
+ * DESCRIPTION: update parameters
+ *
+ * PARAMETERS :
+ *   @parms       : input parameters string
+ *   @needRestart : output, flag to indicate if preview restart is needed
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateParameters(const char *parms, bool &needRestart)
+{
+    int rc = NO_ERROR;
+    pthread_mutex_lock(&m_parm_lock);
+    String8 str = String8(parms);
+    QCameraParameters param(str);
+    rc =  mParameters.updateParameters(param, needRestart);
+
+    // update stream based parameter settings
+    for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            m_channels[i]->UpdateStreamBasedParameters(mParameters);
+        }
+    }
+    pthread_mutex_unlock(&m_parm_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParameterChanges
+ *
+ * DESCRIPTION: commit parameter changes to the backend to take effect
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : This function must be called after updateParameters.
+ *              Otherwise, no change will be passed to backend to take effect.
+ *==========================================================================*/
+int QCamera2HardwareInterface::commitParameterChanges()
+{
+    int rc = NO_ERROR;
+    pthread_mutex_lock(&m_parm_lock);
+    rc = mParameters.commitParameters();
+    if (rc == NO_ERROR) {
+        // update number of snapshot based on committed parameters setting
+        rc = mParameters.setNumOfSnapshot();
+    }
+    pthread_mutex_unlock(&m_parm_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : needDebugFps
+ *
+ * DESCRIPTION: if fps log info need to be printed out
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: need print out fps log
+ *              false: no need to print out fps log
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needDebugFps()
+{
+    bool needFps = false;
+    pthread_mutex_lock(&m_parm_lock);
+    needFps = mParameters.isFpsDebugEnabled();
+    pthread_mutex_unlock(&m_parm_lock);
+    return needFps;
+}
+
+/*===========================================================================
+ * FUNCTION   : isCACEnabled
+ *
+ * DESCRIPTION: if CAC is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCACEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.cac", prop, "0");
+    int enableCAC = atoi(prop);
+    return enableCAC == 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : is4k2kResolution
+ *
+ * DESCRIPTION: if resolution is 4k x 2k or true 4k x 2k
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::is4k2kResolution(cam_dimension_t* resolution)
+{
+   bool enabled = false;
+   if ((resolution->width == 4096 && resolution->height == 2160) ||
+       (resolution->width == 3840 && resolution->height == 2160) ) {
+      enabled = true;
+   }
+   return enabled;
+}
+
+
+/*===========================================================================
+ *
+ * FUNCTION   : isPreviewRestartEnabled
+ *
+ * DESCRIPTION: Check whether preview should be restarted automatically
+ *              during image capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isPreviewRestartEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.restart", prop, "0");
+    int earlyRestart = atoi(prop);
+    return earlyRestart == 1;
+}
+
+/*===========================================================================
+=======
+ * FUNCTION   : isAFRunning
+ *
+ * DESCRIPTION: if AF is in progress while in Auto/Macro focus modes
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: AF in progress
+ *              false: AF not in progress
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isAFRunning()
+{
+    bool isAFInProgress = (m_currentFocusState == CAM_AF_SCANNING &&
+            (mParameters.getFocusMode() == CAM_FOCUS_MODE_AUTO ||
+            mParameters.getFocusMode() == CAM_FOCUS_MODE_MACRO));
+
+    return isAFInProgress;
+}
+
+bool QCamera2HardwareInterface::needDualReprocess()
+{
+    bool ret = false;
+    pthread_mutex_lock(&m_parm_lock);
+    if (mParameters.isfssrEnabled()) {
+        CDBG_HIGH("%s: need do reprocess for FSSR", __func__);
+        ret = true;
+    }
+    pthread_mutex_unlock(&m_parm_lock);
+    return ret;
+}
+
+/*===========================================================================
+
+>>>>>>> c709c9a... Camera: Block CancelAF till HAL receives AF event.
+ * FUNCTION   : needReprocess
+ *
+ * DESCRIPTION: if reprocess is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needReprocess()
+{
+    pthread_mutex_lock(&m_parm_lock);
+    if (!mParameters.isJpegPictureFormat() &&
+        !mParameters.isNV21PictureFormat()) {
+        // RAW image, no need to reprocess
+        pthread_mutex_unlock(&m_parm_lock);
+        return false;
+    }
+
+    if (mParameters.isHDREnabled()) {
+        CDBG_HIGH("%s: need do reprocess for HDR", __func__);
+        pthread_mutex_unlock(&m_parm_lock);
+        return true;
+    }
+
+    uint32_t feature_mask = 0;
+    uint32_t required_mask = 0;
+    feature_mask = gCamCapability[mCameraId]->qcom_supported_feature_mask;
+    required_mask = gCamCapability[mCameraId]->min_required_pp_mask;
+    if (((feature_mask & CAM_QCOM_FEATURE_CPP) > 0) &&
+        (getJpegRotation() > 0)) {
+            // current rotation is not zero, and pp has the capability to process rotation
+            CDBG_HIGH("%s: need to do reprocess for rotation=%d", __func__, getJpegRotation());
+            pthread_mutex_unlock(&m_parm_lock);
+            return true;
+    }
+
+    if (isZSLMode()) {
+        if (((gCamCapability[mCameraId]->min_required_pp_mask > 0) ||
+             mParameters.isWNREnabled() || isCACEnabled())) {
+            // TODO: add for ZSL HDR later
+            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
+            pthread_mutex_unlock(&m_parm_lock);
+            return true;
+        }
+
+        int snapshot_flipMode =
+            mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
+        if (snapshot_flipMode > 0) {
+            CDBG_HIGH("%s: Need do flip for snapshot in ZSL mode", __func__);
+            pthread_mutex_unlock(&m_parm_lock);
+            return true;
+        }
+    } else {
+        if (required_mask & CAM_QCOM_FEATURE_CPP) {
+            CDBG_HIGH("%s: Need CPP in non-ZSL mode", __func__);
+            pthread_mutex_unlock(&m_parm_lock);
+            return true;
+        }
+    }
+
+    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SCALE) > 0 &&
+        mParameters.m_reprocScaleParam.isScaleEnabled() &&
+        mParameters.m_reprocScaleParam.isUnderScaling()) {
+        // Reproc Scale is enaled and also need Scaling to current Snapshot
+        CDBG_HIGH("%s: need do reprocess for scale", __func__);
+        pthread_mutex_unlock(&m_parm_lock);
+        return true;
+    }
+
+    if (mParameters.isUbiFocusEnabled() |
+        mParameters.isMultiTouchFocusEnabled() |
+        mParameters.isChromaFlashEnabled() |
+        mParameters.isHDREnabled() |
+        mParameters.isfssrEnabled() |
+        mParameters.isOptiZoomEnabled()) {
+        CDBG_HIGH("%s: need reprocess for |UbiFocus=%d|ChramaFlash=%d"
+                  "|OptiZoom=%d|fssr=%d|MultiTouchFocus=%d",__func__,
+                  mParameters.isUbiFocusEnabled(),
+                  mParameters.isChromaFlashEnabled(),
+                  mParameters.isOptiZoomEnabled(),
+                  mParameters.isfssrEnabled(),
+                  mParameters.isMultiTouchFocusEnabled());
+        pthread_mutex_unlock(&m_parm_lock);
+        return true;
+    }
+
+    pthread_mutex_unlock(&m_parm_lock);
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : needRotationReprocess
+ *
+ * DESCRIPTION: if rotation needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needRotationReprocess()
+{
+    pthread_mutex_lock(&m_parm_lock);
+    if (!mParameters.isJpegPictureFormat() &&
+        !mParameters.isNV21PictureFormat()) {
+        // RAW image, no need to reprocess
+        pthread_mutex_unlock(&m_parm_lock);
+        return false;
+    }
+
+    uint32_t feature_mask = 0;
+    feature_mask = gCamCapability[mCameraId]->qcom_supported_feature_mask;
+    if (((feature_mask & CAM_QCOM_FEATURE_CPP) > 0) &&
+        (getJpegRotation() > 0)) {
+        // current rotation is not zero
+        // and pp has the capability to process rotation
+        CDBG_HIGH("%s: need to do reprocess for rotation=%d",
+              __func__,
+              getJpegRotation());
+        pthread_mutex_unlock(&m_parm_lock);
+        return true;
+    }
+
+    pthread_mutex_unlock(&m_parm_lock);
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : needScaleReprocess
+ *
+ * DESCRIPTION: if scale needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needScaleReprocess()
+{
+    pthread_mutex_lock(&m_parm_lock);
+    if (!mParameters.isJpegPictureFormat() &&
+        !mParameters.isNV21PictureFormat()) {
+        // RAW image, no need to reprocess
+        pthread_mutex_unlock(&m_parm_lock);
+        return false;
+    }
+
+    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SCALE) > 0 &&
+        mParameters.m_reprocScaleParam.isScaleEnabled() &&
+        mParameters.m_reprocScaleParam.isUnderScaling()) {
+        // Reproc Scale is enaled and also need Scaling to current Snapshot
+        CDBG_HIGH("%s: need do reprocess for scale", __func__);
+        pthread_mutex_unlock(&m_parm_lock);
+        return true;
+    }
+
+    pthread_mutex_unlock(&m_parm_lock);
+    return false;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getThumbnailSize
+ *
+ * DESCRIPTION: get user set thumbnail size
+ *
+ * PARAMETERS :
+ *   @dim     : output of thumbnail dimension
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::getThumbnailSize(cam_dimension_t &dim)
+{
+    pthread_mutex_lock(&m_parm_lock);
+    mParameters.getThumbnailSize(&dim.width, &dim.height);
+    pthread_mutex_unlock(&m_parm_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegQuality
+ *
+ * DESCRIPTION: get user set jpeg quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg quality setting
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::getJpegQuality()
+{
+    uint32_t quality = 0;
+    pthread_mutex_lock(&m_parm_lock);
+    quality =  mParameters.getJpegQuality();
+    pthread_mutex_unlock(&m_parm_lock);
+    return quality;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegRotation
+ *
+ * DESCRIPTION: get rotation information to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation information
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::getJpegRotation() {
+    return mCaptureRotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : getOrientation
+ *
+ * DESCRIPTION: get rotation information from camera parameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation information
+ *==========================================================================*/
+void QCamera2HardwareInterface::getOrientation() {
+    pthread_mutex_lock(&m_parm_lock);
+    mCaptureRotation = mParameters.getJpegRotation();
+    pthread_mutex_unlock(&m_parm_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifData
+ *
+ * DESCRIPTION: get exif data to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : exif data from user setting and GPS
+ *==========================================================================*/
+QCameraExif *QCamera2HardwareInterface::getExifData()
+{
+    QCameraExif *exif = new QCameraExif();
+    if (exif == NULL) {
+        ALOGE("%s: No memory for QCameraExif", __func__);
+        return NULL;
+    }
+
+    int32_t rc = NO_ERROR;
+
+    pthread_mutex_lock(&m_parm_lock);
+
+    //set flash value
+    mFlash = mParameters.getFlashValue();
+    mRedEye = mParameters.getRedEyeValue();
+    mFlashPresence = mParameters.getSupportedFlashModes();
+
+    // add exif entries
+    String8 dateTime;
+    String8 subsecTime;
+    rc = mParameters.getExifDateTime(dateTime,subsecTime);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+
+
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME, EXIF_ASCII,
+                (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string());
+
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL, EXIF_ASCII,
+                (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string());
+
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED, EXIF_ASCII,
+                (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string());
+
+    } else {
+        ALOGE("%s: getExifDateTime failed", __func__);
+    }
+
+    rat_t focalLength;
+    rc = mParameters.getExifFocalLength(&focalLength);
+    if (rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
+                       EXIF_RATIONAL,
+                       1,
+                       (void *)&(focalLength));
+    } else {
+        ALOGE("%s: getExifFocalLength failed", __func__);
+    }
+
+    uint16_t isoSpeed = mParameters.getExifIsoSpeed();
+    if (getSensorType() != CAM_SENSOR_YUV) {
+        exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
+                       EXIF_SHORT,
+                       1,
+                       (void *)&(isoSpeed));
+    }
+
+    char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+    uint32_t count = 0;
+    rc = mParameters.getExifGpsProcessingMethod(gpsProcessingMethod, count);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
+                       EXIF_ASCII,
+                       count,
+                       (void *)gpsProcessingMethod);
+    } else {
+        CDBG("%s: getExifGpsProcessingMethod failed", __func__);
+    }
+
+    rat_t latitude[3];
+    char latRef[2];
+    rc = mParameters.getExifLatitude(latitude, latRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_LATITUDE,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)latitude);
+        exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
+                       EXIF_ASCII,
+                       2,
+                       (void *)latRef);
+    } else {
+        CDBG("%s: getExifLatitude failed", __func__);
+    }
+
+    rat_t longitude[3];
+    char lonRef[2];
+    rc = mParameters.getExifLongitude(longitude, lonRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)longitude);
+
+        exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
+                       EXIF_ASCII,
+                       2,
+                       (void *)lonRef);
+    } else {
+        CDBG("%s: getExifLongitude failed", __func__);
+    }
+
+    rat_t altitude;
+    char altRef;
+    rc = mParameters.getExifAltitude(&altitude, &altRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
+                       EXIF_RATIONAL,
+                       1,
+                       (void *)&(altitude));
+
+        exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
+                       EXIF_BYTE,
+                       1,
+                       (void *)&altRef);
+    } else {
+        CDBG("%s: getExifAltitude failed", __func__);
+    }
+
+    char gpsDateStamp[20];
+    rat_t gpsTimeStamp[3];
+    rc = mParameters.getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
+                       EXIF_ASCII,
+                       (uint32_t)(strlen(gpsDateStamp) + 1),
+                       (void *)gpsDateStamp);
+
+        exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)gpsTimeStamp);
+    } else {
+        ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MAKE,
+                       EXIF_ASCII,
+                       strlen(value) + 1,
+                       (void *)value);
+    } else {
+        ALOGE("%s: getExifMaker failed", __func__);
+    }
+
+    if (property_get("ro.product.model", value, "QCOM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MODEL,
+                       EXIF_ASCII,
+                       strlen(value) + 1,
+                       (void *)value);
+    } else {
+        ALOGE("%s: getExifModel failed", __func__);
+    }
+
+    pthread_mutex_unlock(&m_parm_lock);
+
+    return exif;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHistogram
+ *
+ * DESCRIPTION: set if histogram should be enabled
+ *
+ * PARAMETERS :
+ *   @histogram_en : bool flag if histogram should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setHistogram(bool histogram_en)
+{
+    return mParameters.setHistogram(histogram_en);
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetection
+ *
+ * DESCRIPTION: set if face detection should be enabled
+ *
+ * PARAMETERS :
+ *   @enabled : bool flag if face detection should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setFaceDetection(bool enabled)
+{
+    return mParameters.setFaceDetection(enabled);
+}
+
+/*===========================================================================
+ * FUNCTION   : needProcessPreviewFrame
+ *
+ * DESCRIPTION: returns whether preview frame need to be displayed
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needProcessPreviewFrame()
+{
+    return m_stateMachine.isPreviewRunning()
+            && mParameters.isDisplayFrameNeeded();
+};
+
+/*===========================================================================
+ * FUNCTION   : prepareHardwareForSnapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot, such as LED
+ *
+ * PARAMETERS :
+ *   @afNeeded: flag indicating if Auto Focus needs to be done during preparation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::prepareHardwareForSnapshot(int32_t afNeeded)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s: Prepare hardware such as LED",__func__);
+    return mCameraHandle->ops->prepare_snapshot(mCameraHandle->camera_handle,
+                                                afNeeded);
+}
+
+/*===========================================================================
+ * FUNCTION   : needFDMetadata
+ *
+ * DESCRIPTION: check whether we need process Face Detection metadata in this chanel
+ *
+ * PARAMETERS :
+ *   @channel_type: channel type
+ *
+  * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needFDMetadata(qcamera_ch_type_enum_t channel_type)
+{
+    //Note: Currently we only process ZSL channel
+    bool value = false;
+    if(channel_type == QCAMERA_CH_TYPE_ZSL){
+        //check if FD requirement is enabled
+        if(mParameters.isSnapshotFDNeeded() &&
+           mParameters.isFaceDetectionEnabled()){
+            value = true;
+            CDBG_HIGH("%s: Face Detection metadata is required in ZSL mode.", __func__);
+        }
+    }
+
+    return value;
+}
+
+bool QCamera2HardwareInterface::removeSizeFromList(cam_dimension_t* size_list,
+        size_t length, cam_dimension_t size)
+{
+   bool found = false;
+   size_t index = 0;
+   for (size_t i = 0; i < length; i++) {
+      if ((size_list[i].width == size.width
+           && size_list[i].height == size.height)) {
+         found = true;
+         index = i;
+         break;
+      }
+
+   }
+   if (found) {
+      for (size_t i = index; i < length; i++) {
+         size_list[i] = size_list[i+1];
+      }
+   }
+   return found;
+}
+
+void QCamera2HardwareInterface::copyList(cam_dimension_t *src_list,
+        cam_dimension_t *dst_list, size_t len) {
+    for (size_t i = 0; i < len; i++) {
+        dst_list[i] = src_list[i];
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : defferedWorkRoutine
+ *
+ * DESCRIPTION: data process routine that executes deffered tasks
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCamera2HardwareInterface)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCamera2HardwareInterface::defferedWorkRoutine(void *obj)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)obj;
+    QCameraCmdThread *cmdThread = &pme->mDefferedWorkThread;
+    cmdThread->setName("CAM_defrdWrk");
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                        __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            CDBG_HIGH("%s: start data proc", __func__);
+            is_active = TRUE;
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            CDBG_HIGH("%s: stop data proc", __func__);
+            is_active = FALSE;
+            // signal cmd is completed
+            cam_sem_post(&cmdThread->sync_sem);
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                DeffWork *dw =
+                    reinterpret_cast<DeffWork *>(pme->mCmdQueue.dequeue());
+
+                if ( NULL == dw ) {
+                    ALOGE("%s : Invalid deferred work", __func__);
+                    break;
+                }
+
+                switch( dw->cmd ) {
+                case CMD_DEFF_ALLOCATE_BUFF:
+                    {
+                        QCameraChannel * pChannel = dw->args.allocArgs.ch;
+
+                        if ( NULL == pChannel ) {
+                            ALOGE("%s : Invalid deferred work channel",
+                                    __func__);
+                            break;
+                        }
+
+                        cam_stream_type_t streamType = dw->args.allocArgs.type;
+
+                        uint32_t iNumOfStreams = pChannel->getNumOfStreams();
+                        QCameraStream *pStream = NULL;
+                        for ( uint32_t i = 0; i < iNumOfStreams; ++i) {
+                            pStream = pChannel->getStreamByIndex(i);
+
+                            if ( NULL == pStream ) {
+                                break;
+                            }
+
+                            if ( pStream->isTypeOf(streamType)) {
+                                if ( pStream->allocateBuffers() ) {
+                                    ALOGE("%s: Error allocating buffers !!!",
+                                            __func__);
+                                }
+                                break;
+                            }
+                        }
+                        {
+                            Mutex::Autolock l(pme->mDeffLock);
+                            pme->mDeffOngoingJobs[dw->id] = false;
+                            delete dw;
+                            pme->mDeffCond.signal();
+                        }
+
+                    }
+                    break;
+                case CMD_DEFF_PPROC_START:
+                    {
+                        QCameraChannel * pChannel = dw->args.pprocArgs;
+                        assert(pChannel);
+
+                        if (pme->m_postprocessor.start(pChannel) != NO_ERROR) {
+                            ALOGE("%s: cannot start postprocessor", __func__);
+                            pme->delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                        }
+                        {
+                            Mutex::Autolock l(pme->mDeffLock);
+                            pme->mDeffOngoingJobs[dw->id] = false;
+                            delete dw;
+                            pme->mDeffCond.signal();
+                        }
+                    }
+                    break;
+                default:
+                    ALOGE("%s[%d]:  Incorrect command : %d",
+                          __func__,
+                          __LINE__,
+                          dw->cmd);
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : isCaptureShutterEnabled
+ *
+ * DESCRIPTION: Check whether shutter should be triggered immediately after
+ *              capture
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - regular capture
+ *              false - other type of capture
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCaptureShutterEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.shutter", prop, "0");
+    int enableShutter = atoi(prop);
+    return enableShutter == 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : queueDefferedWork
+ *
+ * DESCRIPTION: function which queues deferred tasks
+ *
+ * PARAMETERS :
+ *   @cmd     : deferred task
+ *   @args    : deffered task arguments
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::queueDefferedWork(DefferedWorkCmd cmd,
+                                                     DefferWorkArgs args)
+{
+    Mutex::Autolock l(mDeffLock);
+    for (uint32_t i = 0; i < MAX_ONGOING_JOBS; ++i) {
+        if (!mDeffOngoingJobs[i]) {
+            mCmdQueue.enqueue(new DeffWork(cmd, i, args));
+            mDeffOngoingJobs[i] = true;
+            mDefferedWorkThread.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB,
+                    FALSE,
+                    FALSE);
+            return (int32_t)i;
+        }
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : waitDefferedWork
+ *
+ * DESCRIPTION: waits for a deffered task to finish
+ *
+ * PARAMETERS :
+ *   @job_id  : deferred task id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::waitDefferedWork(int32_t &job_id)
+{
+    Mutex::Autolock l(mDeffLock);
+
+    if ((MAX_ONGOING_JOBS <= job_id) || (0 > job_id)) {
+        return NO_ERROR;
+    }
+
+    while ( mDeffOngoingJobs[job_id] == true ) {
+        mDeffCond.wait(mDeffLock);
+    }
+
+    job_id = MAX_ONGOING_JOBS;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : isRegularCapture
+ *
+ * DESCRIPTION: Check configuration for regular catpure
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - regular capture
+ *              false - other type of capture
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isRegularCapture()
+{
+    bool ret = false;
+
+    if (numOfSnapshotsExpected() == 1 &&
+        !isLongshotEnabled() &&
+        !mParameters.getRecordingHintValue() &&
+        !isZSLMode() && !(mParameters.isHDREnabled())) {
+            ret = true;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getLogLevel
+ *
+ * DESCRIPTION: Reads the log level property into a variable
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     :
+ *   None
+ *==========================================================================*/
+void QCamera2HardwareInterface::getLogLevel()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+
+    /*  Higher 4 bits : Value of Debug log level (Default level is 1 to print all CDBG_HIGH)
+        Lower 28 bits : Control mode for sub module logging(Only 3 sub modules in HAL)
+                        0x1 for HAL
+                        0x10 for mm-camera-interface
+                        0x100 for mm-jpeg-interface  */
+    property_get("persist.camera.hal.debug.mask", prop, "268435463"); // 0x10000007=268435463
+    uint32_t temp = (uint32_t) atoi(prop);
+    uint32_t log_level = ((temp >> 28) & 0xF);
+    uint32_t debug_mask = (temp & HAL_DEBUG_MASK_HAL);
+    if (debug_mask > 0)
+        gCamHalLogLevel = log_level;
+    else
+        gCamHalLogLevel = 0; // Debug logs are not required if debug_mask is zero
+
+    ALOGI("%s gCamHalLogLevel=%d",__func__, gCamHalLogLevel);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSensorType
+ *
+ * DESCRIPTION: Returns the type of sensor being used whether YUV or Bayer
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : Type of sensor - bayer or YUV
+ *
+ *==========================================================================*/
+cam_sensor_t QCamera2HardwareInterface::getSensorType()
+{
+    return gCamCapability[mCameraId]->sensor_type.sens_type;
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCamera2HWI.h b/msm8974/QCamera2/HAL/QCamera2HWI.h
new file mode 100644
index 0000000..861f329
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCamera2HWI.h
@@ -0,0 +1,654 @@
+/* Copyright (c) 2012-2014,2016 The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HARDWAREINTERFACE_H__
+#define __QCAMERA2HARDWAREINTERFACE_H__
+
+#include <hardware/camera.h>
+#include <hardware/power.h>
+#include <utils/Log.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <QCameraParameters.h>
+
+#include "QCameraQueue.h"
+#include "QCameraCmdThread.h"
+#include "QCameraChannel.h"
+#include "QCameraStream.h"
+#include "QCameraStateMachine.h"
+#include "QCameraAllocator.h"
+#include "QCameraPostProc.h"
+#include "QCameraThermalAdapter.h"
+#include "QCameraMem.h"
+#include "cam_intf.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+
+#if DISABLE_DEBUG_LOG
+
+inline void __null_log(int, const char *, const char *, ...) {}
+
+#ifdef ALOGD
+#undef ALOGD
+#define ALOGD(...) do { __null_log(0, LOG_TAG,__VA_ARGS__); } while (0)
+#endif
+
+#ifdef ALOGI
+#undef ALOGI
+#define ALOGI(...) do { __null_log(0, LOG_TAG,__VA_ARGS__); } while (0)
+#endif
+
+#ifdef CDBG
+#undef CDBG
+#endif
+#define CDBG(...) do{} while(0)
+
+#else
+
+#ifdef CDBG
+#undef CDBG
+#endif //#ifdef CDBG
+#define CDBG(fmt, args...) ALOGD_IF(gCamHalLogLevel >= 2, fmt, ##args)
+
+#endif // DISABLE_DEBUG_LOG
+
+#ifdef CDBG_HIGH
+#undef CDBG_HIGH
+#endif //#ifdef CDBG_HIGH
+#define CDBG_HIGH(fmt, args...) ALOGD_IF(gCamHalLogLevel >= 1, fmt, ##args)
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+    QCAMERA_CH_TYPE_ZSL,
+    QCAMERA_CH_TYPE_CAPTURE,
+    QCAMERA_CH_TYPE_PREVIEW,
+    QCAMERA_CH_TYPE_VIDEO,
+    QCAMERA_CH_TYPE_SNAPSHOT,
+    QCAMERA_CH_TYPE_RAW,
+    QCAMERA_CH_TYPE_METADATA,
+    QCAMERA_CH_TYPE_MAX
+} qcamera_ch_type_enum_t;
+
+typedef struct {
+    int32_t msg_type;
+    int32_t ext1;
+    int32_t ext2;
+} qcamera_evt_argm_t;
+
+#define QCAMERA_DUMP_FRM_PREVIEW    1
+#define QCAMERA_DUMP_FRM_VIDEO      (1<<1)
+#define QCAMERA_DUMP_FRM_SNAPSHOT   (1<<2)
+#define QCAMERA_DUMP_FRM_THUMBNAIL  (1<<3)
+#define QCAMERA_DUMP_FRM_RAW        (1<<4)
+#define QCAMERA_DUMP_FRM_JPEG       (1<<5)
+
+#define QCAMERA_DUMP_FRM_MASK_ALL    0x000000ff
+
+#define QCAMERA_ION_USE_CACHE   true
+#define QCAMERA_ION_USE_NOCACHE false
+#define MAX_ONGOING_JOBS 25
+#define QCAMERA_MAX_FILEPATH_LENGTH 50
+
+extern volatile uint32_t gCamHalLogLevel;
+
+/** IMG_SWAP
+ *  @a: input a
+ *  @b: input b
+ *
+ *  Swaps the input values
+ **/
+#define IMG_SWAP(a, b) ({typeof(a) c; c=a; a=b; b=c;})
+
+typedef enum {
+    QCAMERA_NOTIFY_CALLBACK,
+    QCAMERA_DATA_CALLBACK,
+    QCAMERA_DATA_TIMESTAMP_CALLBACK,
+    QCAMERA_DATA_SNAPSHOT_CALLBACK
+} qcamera_callback_type_m;
+
+typedef void (*camera_release_callback)(void *user_data,
+                                        void *cookie,
+                                        int32_t cb_status);
+
+typedef struct {
+    qcamera_callback_type_m  cb_type;    // event type
+    int32_t                  msg_type;   // msg type
+    int32_t                  ext1;       // extended parameter
+    int32_t                  ext2;       // extended parameter
+    camera_memory_t *        data;       // ptr to data memory struct
+    unsigned int             index;      // index of the buf in the whole buffer
+    int64_t                  timestamp;  // buffer timestamp
+    camera_frame_metadata_t *metadata;   // meta data
+    void                    *user_data;  // any data needs to be released after callback
+    void                    *cookie;     // release callback cookie
+    camera_release_callback  release_cb; // release callback
+} qcamera_callback_argm_t;
+
+typedef struct {
+   cam_dimension_t all_preview_sizes[MAX_SIZES_CNT];
+   size_t all_preview_sizes_cnt;
+   cam_dimension_t all_video_sizes[MAX_SIZES_CNT];
+   size_t all_video_sizes_cnt;
+} qcamera_saved_sizes_list;
+
+class QCameraCbNotifier {
+public:
+    QCameraCbNotifier(QCamera2HardwareInterface *parent) :
+                          mNotifyCb (NULL),
+                          mDataCb (NULL),
+                          mDataCbTimestamp (NULL),
+                          mCallbackCookie (NULL),
+                          mParent (parent),
+                          mDataQ(releaseNotifications, this),
+                          mActive(false){}
+
+    virtual ~QCameraCbNotifier();
+
+    virtual int32_t notifyCallback(qcamera_callback_argm_t &cbArgs);
+    virtual void setCallbacks(camera_notify_callback notifyCb,
+                              camera_data_callback dataCb,
+                              camera_data_timestamp_callback dataCbTimestamp,
+                              void *callbackCookie);
+    virtual int32_t startSnapshots();
+    virtual void stopSnapshots();
+    virtual void exit();
+    static void * cbNotifyRoutine(void * data);
+    static void releaseNotifications(void *data, void *user_data);
+    static bool matchSnapshotNotifications(void *data, void *user_data);
+    static bool matchPreviewNotifications(void *data, void *user_data);
+    virtual int32_t flushPreviewNotifications();
+    static bool matchTimestampNotifications(void *data, void *user_data);
+    virtual int32_t flushVideoNotifications();
+
+private:
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    void                          *mCallbackCookie;
+    QCamera2HardwareInterface     *mParent;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh;
+    bool             mActive;
+};
+
+class QCamera2HardwareInterface : public QCameraAllocator,
+                                  public QCameraThermalCallback,
+                                  public QCameraAdjustFPS,
+                                  public QCameraTorchInterface
+{
+public:
+    /* static variable and functions accessed by camera service */
+    static camera_device_ops_t mCameraOps;
+
+    static int set_preview_window(struct camera_device *,
+        struct preview_stream_ops *window);
+    static void set_CallBacks(struct camera_device *,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user);
+    static void enable_msg_type(struct camera_device *, int32_t msg_type);
+    static void disable_msg_type(struct camera_device *, int32_t msg_type);
+    static int msg_type_enabled(struct camera_device *, int32_t msg_type);
+    static int start_preview(struct camera_device *);
+    static void stop_preview(struct camera_device *);
+    static int preview_enabled(struct camera_device *);
+    static int store_meta_data_in_buffers(struct camera_device *, int enable);
+    static int start_recording(struct camera_device *);
+    static void stop_recording(struct camera_device *);
+    static int recording_enabled(struct camera_device *);
+    static void release_recording_frame(struct camera_device *, const void *opaque);
+    static int auto_focus(struct camera_device *);
+    static int cancel_auto_focus(struct camera_device *);
+    static int take_picture(struct camera_device *);
+    int takeLiveSnapshot_internal();
+    int takeBackendPic_internal(bool *JpegMemOpt);
+    void checkIntPicPending(bool JpegMemOpt);
+    static int cancel_picture(struct camera_device *);
+    static int set_parameters(struct camera_device *, const char *parms);
+    static char* get_parameters(struct camera_device *);
+    static void put_parameters(struct camera_device *, char *);
+    static int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+    static void release(struct camera_device *);
+    static int dump(struct camera_device *, int fd);
+    static int close_camera_device(hw_device_t *);
+
+    static int register_face_image(struct camera_device *,
+                                   void *img_ptr,
+                                   cam_pp_offline_src_config_t *config);
+public:
+    QCamera2HardwareInterface(uint32_t cameraId);
+    virtual ~QCamera2HardwareInterface();
+    int openCamera(struct hw_device_t **hw_device);
+
+    static int getCapabilities(uint32_t cameraId, struct camera_info *info);
+    static int initCapabilities(uint32_t cameraId, mm_camera_vtbl_t *cameraHandle);
+
+    // Implementation of QCameraAllocator
+    virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+            size_t size, int stride, int scanline, uint8_t &bufferCnt);
+    virtual int32_t allocateMoreStreamBuf(QCameraMemory *mem_obj,
+            size_t size, uint8_t &bufferCnt);
+
+    virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type);
+
+    // Implementation of QCameraThermalCallback
+    virtual int thermalEvtHandle(qcamera_thermal_level_enum_t level,
+            void *userdata, void *data);
+
+    virtual int recalcFPSRange(int &minFPS, int &maxFPS,
+            cam_fps_range_t &adjustedRange);
+
+    // Implementation of QCameraTorchInterface
+    virtual int prepareTorchCamera();
+    virtual int releaseTorchCamera();
+
+    friend class QCameraStateMachine;
+    friend class QCameraPostProcessor;
+    friend class QCameraCbNotifier;
+
+private:
+    int setPreviewWindow(struct preview_stream_ops *window);
+    int setCallBacks(
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user);
+    int enableMsgType(int32_t msg_type);
+    int disableMsgType(int32_t msg_type);
+    int msgTypeEnabled(int32_t msg_type);
+    int msgTypeEnabledWithLock(int32_t msg_type);
+    int startPreview();
+    int stopPreview();
+    int storeMetaDataInBuffers(int enable);
+    int startRecording();
+    int stopRecording();
+    int releaseRecordingFrame(const void *opaque);
+    int autoFocus();
+    int cancelAutoFocus();
+    int takePicture();
+    int stopCaptureChannel(bool destroy);
+    int cancelPicture();
+    int takeLiveSnapshot();
+    int takePictureInternal();
+    int cancelLiveSnapshot();
+    char* getParameters();
+    int putParameters(char *);
+    int sendCommand(int32_t cmd, int32_t &arg1, int32_t &arg2);
+    int release();
+    int dump(int fd);
+    int registerFaceImage(void *img_ptr,
+                          cam_pp_offline_src_config_t *config,
+                          int32_t &faceID);
+    int32_t longShot();
+
+    int openCamera();
+    int closeCamera();
+
+    int processAPI(qcamera_sm_evt_enum_t api, void *api_payload);
+    int processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+    int processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+    void lockAPI();
+    void waitAPIResult(qcamera_sm_evt_enum_t api_evt, qcamera_api_result_t *apiResult);
+    void unlockAPI();
+    void signalAPIResult(qcamera_api_result_t *result);
+    void signalEvtResult(qcamera_api_result_t *result);
+
+    int calcThermalLevel(qcamera_thermal_level_enum_t level,
+            const int minFPSi, const int maxFPSi, cam_fps_range_t &adjustedRange,
+            enum msm_vfe_frame_skip_pattern &skipPattern);
+    int updateThermalLevel(qcamera_thermal_level_enum_t level);
+
+    // update entris to set parameters and check if restart is needed
+    int updateParameters(const char *parms, bool &needRestart);
+    // send request to server to set parameters
+    int commitParameterChanges();
+
+    bool needDebugFps();
+    bool isRegularCapture();
+    bool isCACEnabled();
+    bool isPreviewRestartEnabled();
+    bool is4k2kResolution(cam_dimension_t* resolution);
+    bool isCaptureShutterEnabled();
+    bool isAFRunning();
+    bool needReprocess();
+    bool needDualReprocess();
+    bool needRotationReprocess();
+    bool needScaleReprocess();
+    void debugShowVideoFPS();
+    void debugShowPreviewFPS();
+    void dumpJpegToFile(const void *data, size_t size, uint32_t index);
+    void dumpFrameToFile(QCameraStream *stream,
+            mm_camera_buf_def_t *frame, uint32_t dump_type);
+    void dumpMetadataToFile(QCameraStream *stream,
+                            mm_camera_buf_def_t *frame,char *type);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    void playShutter();
+    void getThumbnailSize(cam_dimension_t &dim);
+    uint32_t getJpegQuality();
+    uint32_t getJpegRotation();
+    void getOrientation();
+    inline int getFlash(){ return mFlash; }
+    inline int getFlashPresence(){ return mFlashPresence; }
+    inline int getRedeye(){ return mRedEye; }
+    inline bool getCancelAutoFocus(){ return mCancelAutoFocus; }
+    inline void setCancelAutoFocus(bool flag){ mCancelAutoFocus = flag; }
+    QCameraExif *getExifData();
+    cam_sensor_t getSensorType();
+
+    int32_t processAutoFocusEvent(cam_auto_focus_data_t &focus_data);
+    int32_t processZoomEvent(cam_crop_data_t &crop_info);
+    int32_t processPrepSnapshotDoneEvent(cam_prep_snapshot_state_t prep_snapshot_state);
+    int32_t processASDUpdate(cam_auto_scene_t scene);
+    int32_t processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_job);
+    int32_t processHDRData(cam_asd_hdr_scene_data_t hdr_scene);
+    int32_t transAwbMetaToParams(cam_awb_params_t &awb_params);
+    int32_t processAWBUpdate(cam_awb_params_t &awb_params);
+
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    int32_t sendDataNotify(int32_t msg_type,
+                           camera_memory_t *data,
+                           uint8_t index,
+                           camera_frame_metadata_t *metadata);
+
+    int32_t sendPreviewCallback(QCameraStream *stream,
+                                QCameraGrallocMemory *memory, int32_t idx);
+
+    int32_t addChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t startChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t stopChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t delChannel(qcamera_ch_type_enum_t ch_type, bool destroy = true);
+    int32_t addPreviewChannel();
+    int32_t addSnapshotChannel();
+    int32_t addVideoChannel();
+    int32_t addZSLChannel();
+    int32_t addCaptureChannel();
+    int32_t addRawChannel();
+    int32_t addMetaDataChannel();
+    QCameraReprocessChannel *addReprocChannel(QCameraChannel *pInputChannel);
+    QCameraReprocessChannel *addDualReprocChannel(QCameraChannel *pInputChannel);
+    QCameraReprocessChannel *addOfflineReprocChannel(
+                                                cam_pp_offline_src_config_t &img_config,
+                                                cam_pp_feature_config_t &pp_feature,
+                                                stream_cb_routine stream_cb,
+                                                void *userdata);
+    int32_t addStreamToChannel(QCameraChannel *pChannel,
+                               cam_stream_type_t streamType,
+                               stream_cb_routine streamCB,
+                               void *userData);
+    int32_t preparePreview();
+    void unpreparePreview();
+    int32_t prepareRawStream(QCameraChannel *pChannel);
+    QCameraChannel *getChannelByHandle(uint32_t channelHandle);
+    mm_camera_buf_def_t *getSnapshotFrame(mm_camera_super_buf_t *recvd_frame);
+    int32_t processFaceDetectionResult(cam_face_detection_data_t *fd_data);
+    bool needPreviewFDCallback(uint8_t num_faces);
+    int32_t processHistogramStats(cam_hist_stats_t &stats_data);
+    int32_t setHistogram(bool histogram_en);
+    int32_t setFaceDetection(bool enabled);
+    int32_t prepareHardwareForSnapshot(int32_t afNeeded);
+    bool needProcessPreviewFrame();
+    bool isNoDisplayMode() {return mParameters.isNoDisplayMode();};
+    bool isZSLMode() {return mParameters.isZSLMode();};
+    bool isHFRMode() {return mParameters.isHfrMode();};
+    uint8_t numOfSnapshotsExpected() {
+        return (uint8_t) ((mParameters.isUbiRefocus() ||
+                    mParameters.isMTFRefocus()) ?
+                1 : mParameters.getNumOfSnapshots());
+    }
+    bool isLongshotEnabled() { return mLongshotEnabled; };
+    uint8_t getBufNumRequired(cam_stream_type_t stream_type);
+    bool needFDMetadata(qcamera_ch_type_enum_t channel_type);
+    bool removeSizeFromList(cam_dimension_t *size_list, size_t length,
+            cam_dimension_t size);
+    int32_t unconfigureAdvancedCapture();
+    int32_t configureAdvancedCapture();
+    int32_t configureAFBracketing(bool enable = true);
+    int32_t configureMTFBracketing(bool enable = true);
+    int32_t configureFlashBracketing(bool enable = true);
+    int32_t startAdvancedCapture(QCameraPicChannel *pChannel);
+    int32_t configureZSLHDRBracketing();
+    int32_t startZslAdvancedCapture(QCameraPicChannel *pChannel);
+    int32_t configureOptiZoom();
+    int32_t configureFssr();
+    int32_t configureAEBracketing();
+    inline void setOutputImageCount(uint32_t aCount) {mOutputCount = aCount;}
+    inline uint32_t getOutputImageCount() {return mOutputCount;}
+    bool processUFDumps(qcamera_jpeg_evt_payload_t *evt);
+    bool processMTFDumps(qcamera_jpeg_evt_payload_t *evt);
+    void captureDone();
+    static void copyList(cam_dimension_t *src_list, cam_dimension_t *dst_list,
+            size_t len);
+    static void camEvtHandle(uint32_t camera_handle,
+                          mm_camera_event_t *evt,
+                          void *user_data);
+    static void jpegEvtHandle(jpeg_job_status_t status,
+                              uint32_t client_hdl,
+                              uint32_t jobId,
+                              mm_jpeg_output_t *p_buf,
+                              void *userdata);
+
+    static void *evtNotifyRoutine(void *data);
+
+    // functions for different data notify cb
+    static void zsl_channel_cb(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                           void *userdata);
+    static void postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                            void *userdata);
+    static void dual_reproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                            void *userdata);
+    static void nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                                    QCameraStream *stream,
+                                                    void *userdata);
+    static void preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                          QCameraStream *stream,
+                                          void *userdata);
+    static void postview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void video_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                        QCameraStream *stream,
+                                        void *userdata);
+    static void snapshot_channel_cb_routine(mm_camera_super_buf_t *frame,
+           void *userdata);
+    static void raw_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                      QCameraStream *stream,
+                                      void *userdata);
+    static void preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                              QCameraStream * stream,
+                                              void * userdata);
+    static void snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                               QCameraStream * stream,
+                                               void * userdata);
+    static void metadata_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void reprocess_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                            QCameraStream *stream,
+                                            void *userdata);
+
+    static void releaseCameraMemory(void *data,
+                                    void *cookie,
+                                    int32_t cbStatus);
+    static void returnStreamBuffer(void *data,
+                                   void *cookie,
+                                   int32_t cbStatus);
+    static void getLogLevel();
+
+private:
+    camera_device_t   mCameraDevice;
+    uint32_t          mCameraId;
+    mm_camera_vtbl_t *mCameraHandle;
+    bool mCameraOpened;
+
+    preview_stream_ops_t *mPreviewWindow;
+    QCameraParameters mParameters;
+    int32_t               mMsgEnabled;
+    int                   mStoreMetaDataInFrame;
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    camera_request_memory          mGetMemory;
+    void                          *mCallbackCookie;
+
+    QCameraStateMachine m_stateMachine;   // state machine
+    QCameraPostProcessor m_postprocessor; // post processor
+    QCameraThermalAdapter &m_thermalAdapter;
+    QCameraCbNotifier m_cbNotifier;
+    pthread_mutex_t m_lock;
+    pthread_cond_t m_cond;
+    api_result_list *m_apiResultList;
+    QCameraMemoryPool m_memoryPool;
+
+    pthread_mutex_t m_evtLock;
+    pthread_cond_t m_evtCond;
+    qcamera_api_result_t m_evtResult;
+
+    pthread_mutex_t m_parm_lock;
+
+    QCameraChannel *m_channels[QCAMERA_CH_TYPE_MAX]; // array holding channel ptr
+
+    bool m_bShutterSoundPlayed;         // if shutter sound had been played
+    bool m_bPreviewStarted;             //flag indicates first preview frame callback is received
+    bool m_bRecordStarted;             //flag indicates Recording is started for first time
+
+
+    // if auto focus is running, in other words, when auto_focus is called from service,
+    // and beforeany focus callback/cancel_focus happens. This flag is not an indication
+    // of whether lens is moving or not.
+    bool m_bAutoFocusRunning;
+    cam_autofocus_state_t m_currentFocusState;
+
+    power_module_t *m_pPowerModule;   // power module
+
+    uint32_t mDumpFrmCnt;  // frame dump count
+    uint32_t mDumpSkipCnt; // frame skip count
+    mm_jpeg_exif_params_t mExifParams;
+    qcamera_thermal_level_enum_t mThermalLevel;
+    bool mCancelAutoFocus;
+    bool mActiveAF;
+    bool m_HDRSceneEnabled;
+    bool mLongshotEnabled;
+    int32_t m_max_pic_width;
+    int32_t m_max_pic_height;
+    pthread_t mLiveSnapshotThread;
+    pthread_t mIntPicThread;
+    bool mFlashNeeded;
+    uint32_t mCaptureRotation;
+    int32_t mFlash;
+    int32_t mRedEye;
+    int32_t mFlashPresence;
+    bool mIs3ALocked;
+    bool mPrepSnapRun;
+    int32_t mZoomLevel;
+
+    //eztune variables for communication with eztune server at backend
+    bool m_bIntEvtPending;
+    char m_BackendFileName[QCAMERA_MAX_FILEPATH_LENGTH];
+    size_t mBackendFileSize;
+    pthread_mutex_t m_int_lock;
+    pthread_cond_t m_int_cond;
+
+    enum DefferedWorkCmd {
+        CMD_DEFF_ALLOCATE_BUFF,
+        CMD_DEFF_PPROC_START,
+        CMD_DEFF_MAX
+    };
+
+    typedef struct {
+        QCameraChannel *ch;
+        cam_stream_type_t type;
+    } DefferAllocBuffArgs;
+
+    typedef union {
+        DefferAllocBuffArgs allocArgs;
+        QCameraChannel *pprocArgs;
+    } DefferWorkArgs;
+
+    bool mDeffOngoingJobs[MAX_ONGOING_JOBS];
+
+    struct DeffWork
+    {
+        DeffWork(DefferedWorkCmd cmd,
+                 uint32_t id,
+                 DefferWorkArgs args)
+            : cmd(cmd),
+              id(id),
+              args(args){};
+
+        DefferedWorkCmd cmd;
+        uint32_t id;
+        DefferWorkArgs args;
+    };
+
+    QCameraCmdThread      mDefferedWorkThread;
+    QCameraQueue          mCmdQueue;
+
+    Mutex                 mDeffLock;
+    Condition             mDeffCond;
+
+    int32_t queueDefferedWork(DefferedWorkCmd cmd,
+                              DefferWorkArgs args);
+    int32_t waitDefferedWork(int32_t &job_id);
+    static void *defferedWorkRoutine(void *obj);
+
+    int32_t mSnapshotJob;
+    int32_t mPostviewJob;
+    int32_t mMetadataJob;
+    int32_t mReprocJob;
+    int32_t mRawdataJob;
+    uint32_t mOutputCount;
+    bool mPreviewFrameSkipValid;
+    cam_frame_idx_range_t mPreviewFrameSkipIdxRange;
+    bool mAdvancedCaptureConfigured;
+    int32_t mNumPreviewFaces;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HARDWAREINTERFACE_H__ */
diff --git a/msm8974/QCamera2/HAL/QCamera2HWICallbacks.cpp b/msm8974/QCamera2/HAL/QCamera2HWICallbacks.cpp
new file mode 100644
index 0000000..a8de3ed
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -0,0 +1,2302 @@
+/* Copyright (c) 2012-2016, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+#include <time.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <utils/Errors.h>
+#include <utils/Trace.h>
+#include <utils/Timers.h>
+#include "QCamera2HWI.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : zsl_channel_cb
+ *
+ * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
+                                               void *userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s: E",__func__);
+    char value[PROPERTY_VALUE_MAX];
+    bool dump_raw = false;
+    bool dump_yuv = false;
+    bool log_matching = false;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+       ALOGE("%s: camera obj not valid", __func__);
+       return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != recvd_frame->ch_id) {
+        ALOGE("%s: ZSL channel doesn't exist, return here", __func__);
+        return;
+    }
+
+    /* indicate the parent that capture is done */
+    pme->captureDone();
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    if (recvd_frame->num_bufs > 0) {
+        CDBG_HIGH("[KPI Perf] %s: superbuf frame_idx %d", __func__,
+            recvd_frame->bufs[0]->frame_idx);
+    }
+
+    // DUMP RAW if available
+    property_get("persist.camera.zsl_raw", value, "0");
+    dump_raw = atoi(value) > 0 ? true : false;
+    if (dump_raw) {
+        for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+            if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+                mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
+                QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
+                if (NULL != pStream) {
+                    pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+                }
+                break;
+            }
+        }
+    }
+    //
+
+    // DUMP YUV before reprocess if needed
+    property_get("persist.camera.zsl_yuv", value, "0");
+    dump_yuv = atoi(value) > 0 ? true : false;
+    if (dump_yuv) {
+        for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+            if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+                mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
+                QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
+                if (NULL != pStream) {
+                    pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_SNAPSHOT);
+                }
+                break;
+            }
+        }
+    }
+    //
+    // whether need FD Metadata along with Snapshot frame in ZSL mode
+    if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
+        //Need Face Detection result for snapshot frames
+        //Get the Meta Data frames
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = frame->bufs[i]; //find the metadata
+                    break;
+                }
+            }
+        }
+
+        if(pMetaFrame != NULL){
+            cam_metadata_info_t *pMetaData = (cam_metadata_info_t *)pMetaFrame->buffer;
+            pMetaData->faces_data.fd_type = QCAMERA_FD_SNAPSHOT; //HARD CODE here before MCT can support
+            if(!pMetaData->is_faces_valid){
+                pMetaData->faces_data.num_faces_detected = 0;
+            }else if(pMetaData->faces_data.num_faces_detected > MAX_ROI){
+                ALOGE("%s: Invalid number of faces %d",
+                    __func__, pMetaData->faces_data.num_faces_detected);
+            }
+
+            qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
+                payload->faces_data = pMetaData->faces_data;
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: processEvt prep_snapshot failed", __func__);
+                    free(payload);
+                    payload = NULL;
+                }
+            } else {
+                ALOGE("%s: No memory for prep_snapshot qcamera_sm_internal_evt_payload_t", __func__);
+            }
+        }
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = frame->bufs[i];
+                    if (pMetaFrame != NULL &&
+                            ((cam_metadata_info_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    property_get("persist.camera.zsl_matching", value, "0");
+    log_matching = atoi(value) > 0 ? true : false;
+    if (log_matching) {
+        CDBG_HIGH("%s : ZSL super buffer contains:", __func__);
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL ) {
+                CDBG_HIGH("%s: Buffer with V4Lindex %d frame index %d of type %dTimestamp: %ld %ld",
+                        __func__,
+                        frame->bufs[i]->buf_idx,
+                        frame->bufs[i]->frame_idx,
+                        pStream->getMyType(),
+                        frame->bufs[i]->ts.tv_sec,
+                        frame->bufs[i]->ts.tv_nsec);
+            }
+        }
+    }
+
+    // send to postprocessor
+    pme->m_postprocessor.processData(frame);
+
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : capture_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                           void *userdata)
+{
+    ATRACE_CALL();
+    char value[PROPERTY_VALUE_MAX];
+    CDBG_HIGH("[KPI Perf] %s: E PROFILE_YUV_CB_TO_HAL", __func__);
+    bool dump_yuv = false;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != recvd_frame->ch_id) {
+        ALOGE("%s: Capture channel doesn't exist, return here", __func__);
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    // DUMP YUV before reprocess if needed
+    property_get("persist.camera.nonzsl.yuv", value, "0");
+    dump_yuv = atoi(value) > 0 ? true : false;
+    if (dump_yuv) {
+        for (uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++) {
+            if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+                mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
+                QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
+                if (NULL != pStream) {
+                    pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_SNAPSHOT);
+                }
+                break;
+            }
+        }
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = frame->bufs[i]; //find the metadata
+                    if (pMetaFrame != NULL &&
+                            ((cam_metadata_info_t *)pMetaFrame->buffer)->is_tuning_params_valid){
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    // Wait on Postproc initialization if needed
+    pme->waitDefferedWork(pme->mReprocJob);
+
+    // send to postprocessor
+    pme->m_postprocessor.processData(frame);
+
+/* START of test register face image for face authentication */
+#ifdef QCOM_TEST_FACE_REGISTER_FACE
+    static uint8_t bRunFaceReg = 1;
+
+    if (bRunFaceReg > 0) {
+        // find snapshot frame
+        QCameraStream *main_stream = NULL;
+        mm_camera_buf_def_t *main_frame = NULL;
+        for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+            QCameraStream *pStream =
+                pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                    main_stream = pStream;
+                    main_frame = recvd_frame->bufs[i];
+                    break;
+                }
+            }
+        }
+        if (main_stream != NULL && main_frame != NULL) {
+            int32_t faceId = -1;
+            cam_pp_offline_src_config_t config;
+            memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
+            config.num_of_bufs = 1;
+            main_stream->getFormat(config.input_fmt);
+            main_stream->getFrameDimension(config.input_dim);
+            main_stream->getFrameOffset(config.input_buf_planes.plane_info);
+            CDBG_HIGH("DEBUG: registerFaceImage E");
+            int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
+            CDBG_HIGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
+            bRunFaceReg = 0;
+        }
+    }
+
+#endif
+/* END of test register face image for face authentication */
+
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : postproc_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                            void *userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    // send to postprocessor
+    if (pme->needDualReprocess()) {
+        //send for reprocess again
+        pme->m_postprocessor.processData(frame);
+    } else {
+        pme->m_postprocessor.processPPData(frame);
+    }
+
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : dual_reproc_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::dual_reproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                            void *userdata)
+{
+    CDBG_HIGH("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    // send to postprocessor
+    pme->m_postprocessor.processPPData(frame);
+
+    ATRACE_INT("Camera:Reprocess", 0);
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ *              normal case with display.
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. The new
+ *             preview frame will be sent to display, and an older frame
+ *             will be dequeued from display and needs to be returned back
+ *             to kernel for future use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                          QCameraStream * stream,
+                                                          void *userdata)
+{
+    ATRACE_CALL();
+    CDBG("[KPI Perf] %s : BEGIN", __func__);
+    int err = NO_ERROR;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+    if (pme == NULL) {
+        ALOGE("%s: Invalid hardware object", __func__);
+        free(super_frame);
+        return;
+    }
+    if (memory == NULL) {
+        ALOGE("%s: Invalid memory object", __func__);
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        ALOGE("%s: preview frame is NLUL", __func__);
+        free(super_frame);
+        return;
+    }
+
+    if (!pme->needProcessPreviewFrame()) {
+        ALOGE("%s: preview is not running, no need to process", __func__);
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+
+    uint32_t idx = frame->buf_idx;
+    pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
+
+    if (pme->mPreviewFrameSkipValid) {
+        uint32_t min_frame_idx = pme->mPreviewFrameSkipIdxRange.min_frame_idx;
+        uint32_t max_frame_idx = pme->mPreviewFrameSkipIdxRange.max_frame_idx;
+        uint32_t current_frame_idx = frame->frame_idx;
+        if (current_frame_idx >= max_frame_idx) {
+            // Reset the flags when current frame ID >= max frame ID
+            pme->mPreviewFrameSkipValid = 0;
+            pme->mPreviewFrameSkipIdxRange.min_frame_idx = 0;
+            pme->mPreviewFrameSkipIdxRange.max_frame_idx = 0;
+        }
+        if (current_frame_idx >= min_frame_idx && current_frame_idx <= max_frame_idx) {
+            CDBG_HIGH("%s: Skip Preview frame ID %d during flash", __func__, current_frame_idx);
+            stream->bufDone(frame->buf_idx);
+            free(super_frame);
+            return;
+        }
+    }
+
+    if(pme->m_bPreviewStarted) {
+       CDBG_HIGH("[KPI Perf] %s : PROFILE_FIRST_PREVIEW_FRAME", __func__);
+       pme->m_bPreviewStarted = false ;
+    }
+
+    // Display the buffer.
+    CDBG("%p displayBuffer %d E", pme, idx);
+    int dequeuedIdx = memory->displayBuffer(idx);
+    if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
+        CDBG_HIGH("%s: Invalid dequeued buffer index %d from display",
+              __func__, dequeuedIdx);
+    } else {
+        // Return dequeued buffer back to driver
+        err = stream->bufDone((uint32_t)dequeuedIdx);
+        if ( err < 0) {
+            ALOGE("stream bufDone failed %d", err);
+        }
+    }
+
+    // Handle preview data callback
+    if (pme->mDataCb != NULL && pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
+        int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
+        if (NO_ERROR != rc) {
+            ALOGE("%s: Preview callback was not sent succesfully", __func__);
+        }
+    }
+
+    free(super_frame);
+    CDBG("[KPI Perf] %s : END", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendPreviewCallback
+ *
+ * DESCRIPTION: helper function for triggering preview callbacks
+ *
+ * PARAMETERS :
+ *   @stream    : stream object
+ *   @memory    : Gralloc memory allocator
+ *   @idx       : buffer index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
+        QCameraGrallocMemory *memory, int32_t idx)
+{
+    camera_memory_t *previewMem = NULL;
+    camera_memory_t *data = NULL;
+    camera_memory_t *dataToApp = NULL;
+    size_t previewBufSize = 0;
+    size_t previewBufSizeFromCallback = 0;
+    cam_dimension_t preview_dim;
+    cam_format_t previewFmt;
+    int32_t rc = NO_ERROR;
+    int32_t yStride = 0;
+    int32_t yScanline = 0;
+    int32_t uvStride = 0;
+    int32_t uvScanline = 0;
+    int32_t uStride = 0;
+    int32_t uScanline = 0;
+    int32_t vStride = 0;
+    int32_t vScanline = 0;
+    int32_t yStrideToApp = 0;
+    int32_t uvStrideToApp = 0;
+    int32_t yScanlineToApp = 0;
+    int32_t uvScanlineToApp = 0;
+    int32_t srcOffset = 0;
+    int32_t dstOffset = 0;
+    int32_t srcBaseOffset = 0;
+    int32_t dstBaseOffset = 0;
+    int i;
+
+    if ((NULL == stream) || (NULL == memory)) {
+        ALOGE("%s: Invalid preview callback input", __func__);
+        return BAD_VALUE;
+    }
+
+    cam_stream_info_t *streamInfo =
+            reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
+    if (NULL == streamInfo) {
+        ALOGE("%s: Invalid streamInfo", __func__);
+        return BAD_VALUE;
+    }
+
+    stream->getFrameDimension(preview_dim);
+    stream->getFormat(previewFmt);
+
+    /* The preview buffer size in the callback should be
+     * (width*height*bytes_per_pixel). As all preview formats we support,
+     * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+     * We need to put a check if some other formats are supported in future. */
+    if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
+        (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
+        (previewFmt == CAM_FORMAT_YUV_420_YV12)) {
+        if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
+            yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
+            yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
+            uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
+            uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
+            vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
+            vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
+
+            previewBufSize = yStride * yScanline + uStride * uScanline + vStride * vScanline;
+            previewBufSizeFromCallback = previewBufSize;
+        } else {
+            yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
+            yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
+            uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
+            uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
+
+            yStrideToApp = preview_dim.width;
+            yScanlineToApp = preview_dim.height;
+            uvStrideToApp = yStrideToApp;
+            uvScanlineToApp = yScanlineToApp / 2;
+
+            previewBufSize = (yStrideToApp * yScanlineToApp) +
+                    (uvStrideToApp * uvScanlineToApp);
+
+            previewBufSizeFromCallback = (yStride * yScanline) +
+                    (uvStride * uvScanline);
+        }
+        if(previewBufSize == previewBufSizeFromCallback) {
+            previewMem = mGetMemory(memory->getFd(idx),
+                       previewBufSize, 1, mCallbackCookie);
+            if (!previewMem || !previewMem->data) {
+                ALOGE("%s: mGetMemory failed.\n", __func__);
+                return NO_MEMORY;
+            } else {
+                data = previewMem;
+            }
+        } else {
+            data = memory->getMemory(idx, false);
+            dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
+            if (!dataToApp || !dataToApp->data) {
+                ALOGE("%s: mGetMemory failed.\n", __func__);
+                return NO_MEMORY;
+            }
+
+            for (i = 0; i < preview_dim.height; i++) {
+                srcOffset = i * yStride;
+                dstOffset = i * yStrideToApp;
+
+                memcpy((unsigned char *) dataToApp->data + dstOffset,
+                        (unsigned char *) data->data + srcOffset, yStrideToApp);
+            }
+
+            srcBaseOffset = yStride * yScanline;
+            dstBaseOffset = yStrideToApp * yScanlineToApp;
+
+            for (i = 0; i < preview_dim.height/2; i++) {
+                srcOffset = i * uvStride + srcBaseOffset;
+                dstOffset = i * uvStrideToApp + dstBaseOffset;
+
+                memcpy((unsigned char *) dataToApp->data + dstOffset,
+                        (unsigned char *) data->data + srcOffset,
+                        yStrideToApp);
+            }
+        }
+    } else {
+        data = memory->getMemory(idx, false);
+        ALOGE("%s: Invalid preview format, buffer size in preview callback may be wrong.",
+                __func__);
+    }
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+    if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
+            previewBufSize == previewBufSizeFromCallback) {
+        cbArg.data = data;
+    } else {
+        cbArg.data = dataToApp;
+    }
+    if ( previewMem ) {
+        cbArg.user_data = previewMem;
+        cbArg.release_cb = releaseCameraMemory;
+    } else if (dataToApp) {
+        cbArg.user_data = dataToApp;
+        cbArg.release_cb = releaseCameraMemory;
+    }
+    cbArg.cookie = this;
+    rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: fail sending notification", __func__);
+        if (previewMem) {
+            previewMem->release(previewMem);
+        } else if (dataToApp) {
+            dataToApp->release(dataToApp);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : nodisplay_preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ *              no-display case
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
+                                                          mm_camera_super_buf_t *super_frame,
+                                                          QCameraStream *stream,
+                                                          void * userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s E",__func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        ALOGE("%s: preview frame is NLUL", __func__);
+        free(super_frame);
+        return;
+    }
+
+    if (!pme->needProcessPreviewFrame()) {
+        CDBG_HIGH("%s: preview is not running, no need to process", __func__);
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+
+    if (pme->mPreviewFrameSkipValid) {
+        uint32_t min_frame_idx = pme->mPreviewFrameSkipIdxRange.min_frame_idx;
+        uint32_t max_frame_idx = pme->mPreviewFrameSkipIdxRange.max_frame_idx;
+        uint32_t current_frame_idx = frame->frame_idx;
+        if (current_frame_idx >= max_frame_idx) {
+            // Reset the flags when current frame ID >= max frame ID
+            pme->mPreviewFrameSkipValid = 0;
+            pme->mPreviewFrameSkipIdxRange.min_frame_idx = 0;
+            pme->mPreviewFrameSkipIdxRange.max_frame_idx = 0;
+        }
+        if (current_frame_idx >= min_frame_idx && current_frame_idx <= max_frame_idx) {
+            CDBG_HIGH("%s: Skip Preview frame ID %d during flash", __func__, current_frame_idx);
+            stream->bufDone(frame->buf_idx);
+            free(super_frame);
+            return;
+        }
+    }
+
+    QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+    camera_memory_t *preview_mem = NULL;
+    if (previewMemObj != NULL) {
+        preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
+    }
+    if (NULL != previewMemObj && NULL != preview_mem) {
+        pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
+
+        if (pme->needProcessPreviewFrame() &&
+            pme->mDataCb != NULL &&
+            pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0 ) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+            cbArg.data = preview_mem;
+            cbArg.user_data = (void *) &frame->buf_idx;
+            cbArg.cookie = stream;
+            cbArg.release_cb = returnStreamBuffer;
+            int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: fail sending data notify", __func__);
+                stream->bufDone(frame->buf_idx);
+            }
+        } else {
+            stream->bufDone(frame->buf_idx);
+        }
+    }
+    free(super_frame);
+    CDBG_HIGH("[KPI Perf] %s X",__func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : postview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle post frame from postview stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                           QCameraStream *stream,
+                                                           void *userdata)
+{
+    ATRACE_CALL();
+    int err = NO_ERROR;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+    if (pme == NULL) {
+        ALOGE("%s: Invalid hardware object", __func__);
+        free(super_frame);
+        return;
+    }
+    if (memory == NULL) {
+        ALOGE("%s: Invalid memory object", __func__);
+        free(super_frame);
+        return;
+    }
+
+    CDBG_HIGH("[KPI Perf] %s : BEGIN", __func__);
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        ALOGE("%s: preview frame is NLUL", __func__);
+        free(super_frame);
+        return;
+    }
+
+    QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
+    if (NULL != memObj) {
+        pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
+    }
+
+    // Return buffer back to driver
+    err = stream->bufDone(frame->buf_idx);
+    if ( err < 0) {
+        ALOGE("stream bufDone failed %d", err);
+    }
+
+    free(super_frame);
+    CDBG_HIGH("[KPI Perf] %s : END", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : video_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle video frame from video stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. video
+ *             frame will be sent to video encoder. Once video encoder is
+ *             done with the video frame, it will call another API
+ *             (release_recording_frame) to return the frame back
+ *==========================================================================*/
+void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                        QCameraStream *stream,
+                                                        void *userdata)
+{
+    ATRACE_CALL();
+    QCameraVideoMemory *videoMemObj = NULL;
+    CDBG("[KPI Perf] %s : BEGIN", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+
+    if (pme->needDebugFps()) {
+        pme->debugShowVideoFPS();
+    }
+    if(pme->m_bRecordStarted) {
+       CDBG_HIGH("[KPI Perf] %s : PROFILE_FIRST_RECORD_FRAME", __func__);
+       pme->m_bRecordStarted = false ;
+    }
+    CDBG_HIGH("%s: Stream(%d), Timestamp: %ld %ld",
+          __func__,
+          frame->stream_id,
+          frame->ts.tv_sec,
+          frame->ts.tv_nsec);
+    nsecs_t timeStamp;
+    timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
+
+    CDBG_HIGH("Send Video frame to services/encoder TimeStamp : %lld", timeStamp);
+    videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+    camera_memory_t *video_mem = NULL;
+    if (NULL != videoMemObj) {
+        video_mem = videoMemObj->getMemory(frame->buf_idx, (pme->mStoreMetaDataInFrame > 0)? true : false);
+        videoMemObj->getNativeHandle(frame->buf_idx);
+    }
+    if (NULL != videoMemObj && NULL != video_mem) {
+        pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
+        if ((pme->mDataCbTimestamp != NULL) &&
+            pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
+            cbArg.data = video_mem;
+            cbArg.timestamp = timeStamp;
+            int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: fail sending data notify", __func__);
+                stream->bufDone(frame->buf_idx);
+            }
+        }
+    }
+    free(super_frame);
+    CDBG("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : snapshot_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
+       void *userdata)
+{
+    ATRACE_CALL();
+    char value[PROPERTY_VALUE_MAX];
+
+    CDBG_HIGH("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != super_frame->ch_id) {
+        ALOGE("%s: Snapshot channel doesn't exist, return here", __func__);
+        return;
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+        if (pChannel == NULL ||
+            pChannel->getMyHandle() != super_frame->ch_id) {
+            ALOGE("%s: Capture channel doesn't exist, return here", __func__);
+            return;
+        }
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = super_frame->bufs[i]; //find the metadata
+                    if (pMetaFrame != NULL &&
+                            ((cam_metadata_info_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.",
+                __func__);
+        pChannel->bufDone(super_frame);
+        return;
+    }
+    *frame = *super_frame;
+
+    pme->m_postprocessor.processData(frame);
+
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw dump frame from raw stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. For raw
+ *             frame, there is no need to send to postprocessor for jpeg
+ *             encoding. this function will play shutter and send the data
+ *             callback to upper layer. Raw frame buffer will be returned
+ *             back to kernel, and frame will be free after use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                      QCameraStream * /*stream*/,
+                                                      void * userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s : BEGIN", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processRawData(super_frame);
+    CDBG_HIGH("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw frame during standard preview
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                              QCameraStream * stream,
+                                                              void * userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s : BEGIN", __func__);
+    char value[PROPERTY_VALUE_MAX];
+    bool dump_raw = false;
+
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    property_get("persist.camera.preview_raw", value, "0");
+    dump_raw = atoi(value) > 0 ? true : false;
+
+    for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+        if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+            mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
+            if (NULL != stream) {
+                if (dump_raw) {
+                    pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+                }
+                stream->bufDone(super_frame->bufs[i]->buf_idx);
+            }
+            break;
+        }
+    }
+
+    free(super_frame);
+
+    CDBG_HIGH("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : snapshot_raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw frame during standard capture
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                               QCameraStream * stream,
+                                                               void * userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s : BEGIN", __func__);
+    char value[PROPERTY_VALUE_MAX];
+    bool dump_raw = false;
+
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    property_get("persist.camera.snapshot_raw", value, "0");
+    dump_raw = atoi(value) > 0 ? true : false;
+
+    for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+        if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+            mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
+            if (NULL != stream) {
+                if (dump_raw) {
+                    pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+                }
+                stream->bufDone(super_frame->bufs[i]->buf_idx);
+            }
+            break;
+        }
+    }
+
+    free(super_frame);
+
+    CDBG_HIGH("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : metadata_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle metadata frame from metadata stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. Metadata
+ *             could have valid entries for face detection result or
+ *             histogram statistics information.
+ *==========================================================================*/
+void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                           QCameraStream * stream,
+                                                           void * userdata)
+{
+    ATRACE_CALL();
+    CDBG("[KPI Perf] %s : BEGIN", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    cam_metadata_info_t *pMetaData = (cam_metadata_info_t *)frame->buffer;
+
+    if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
+       (pMetaData->is_meta_valid == 1) &&
+       !pme->mLongshotEnabled) {
+       //Make shutter call back in non ZSL mode once raw frame is received from VFE.
+       pme->playShutter();
+    }
+
+    if (pMetaData->is_preview_frame_skip_valid) {
+        pme->mPreviewFrameSkipValid = 1;
+        pme->mPreviewFrameSkipIdxRange = pMetaData->preview_frame_skip_idx_range;
+        CDBG_HIGH("%s: Skip preview frame ID range min = %d max = %d", __func__,
+                   pme->mPreviewFrameSkipIdxRange.min_frame_idx,
+                   pme->mPreviewFrameSkipIdxRange.max_frame_idx);
+    }
+
+    if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
+        //Dump Tuning data for video
+        pme->dumpMetadataToFile(stream,frame,(char *)"Video");
+    }
+
+    if (pMetaData->is_faces_valid) {
+        if (pMetaData->faces_data.num_faces_detected > MAX_ROI) {
+            ALOGE("%s: Invalid number of faces %d",
+                __func__, pMetaData->faces_data.num_faces_detected);
+        } else {
+            // process face detection result
+            if (pMetaData->faces_data.num_faces_detected)
+                CDBG_HIGH("[KPI Perf] %s: PROFILE_NUMBER_OF_FACES_DETECTED %d",__func__,
+                           pMetaData->faces_data.num_faces_detected);
+            pMetaData->faces_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
+            qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
+                payload->faces_data = pMetaData->faces_data;
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: processEvt face detection failed", __func__);
+                    free(payload);
+                    payload = NULL;
+
+                }
+            } else {
+                ALOGE("%s: No memory for face detect qcamera_sm_internal_evt_payload_t", __func__);
+            }
+        }
+    }
+
+    if (pMetaData->is_stats_valid) {
+        // process histogram statistics info
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
+            payload->stats_data = pMetaData->stats_data;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEvt histogram failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for histogram qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+
+    if (pMetaData->is_focus_valid) {
+        // process focus info
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
+            payload->focus_data = pMetaData->focus_data;
+            payload->focus_data.focused_frame_idx = frame->frame_idx;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEvt focus failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for focus qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+
+    if (pMetaData->is_crop_valid) {
+        if (pMetaData->crop_data.num_of_streams > MAX_NUM_STREAMS) {
+            ALOGE("%s: Invalid num_of_streams %d in crop_data", __func__,
+                pMetaData->crop_data.num_of_streams);
+        } else {
+            qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
+                payload->crop_data = pMetaData->crop_data;
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: processEvt crop info failed", __func__);
+                    free(payload);
+                    payload = NULL;
+
+                }
+            } else {
+                ALOGE("%s: No memory for crop info qcamera_sm_internal_evt_payload_t", __func__);
+            }
+        }
+    }
+
+    if (pMetaData->is_prep_snapshot_done_valid) {
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
+            payload->prep_snapshot_state = pMetaData->prep_snapshot_done_state;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEvt prep_snapshot failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for prep_snapshot qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+    if (pMetaData->is_hdr_scene_data_valid) {
+        CDBG("%s: hdr_scene_data: %d %d %f\n",
+                   __func__,
+                   pMetaData->is_hdr_scene_data_valid,
+                   pMetaData->hdr_scene_data.is_hdr_scene,
+                   pMetaData->hdr_scene_data.hdr_confidence);
+    }
+    //Handle this HDR meta data only if capture is not in process
+    if (pMetaData->is_hdr_scene_data_valid && !pme->m_stateMachine.isCaptureRunning()) {
+        int32_t rc = pme->processHDRData(pMetaData->hdr_scene_data);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: processHDRData failed", __func__);
+        }
+    }
+
+    /* Update 3a info */
+    if(pMetaData->is_ae_params_valid) {
+        pme->mExifParams.ae_params = pMetaData->ae_params;
+        pme->mFlashNeeded = pMetaData->ae_params.flash_needed ? true : false;
+    }
+    if(pMetaData->is_awb_params_valid) {
+        pme->mExifParams.awb_params = pMetaData->awb_params;
+    }
+    if(pMetaData->is_focus_valid) {
+        pme->mExifParams.af_params = pMetaData->focus_data;
+    }
+
+    if (pMetaData->is_awb_params_valid) {
+        CDBG("%s, metadata for awb params.", __func__);
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
+            payload->awb_data = pMetaData->awb_params;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEvt awb_update failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for awb_update qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+
+    /* Update 3A debug info */
+    if (pMetaData->is_ae_exif_debug_valid) {
+        pme->mExifParams.ae_debug_params_valid = TRUE;
+        pme->mExifParams.ae_debug_params = pMetaData->ae_exif_debug_params;
+    }
+    if (pMetaData->is_awb_exif_debug_valid) {
+        pme->mExifParams.awb_debug_params_valid = TRUE;
+        pme->mExifParams.awb_debug_params = pMetaData->awb_exif_debug_params;
+    }
+    if (pMetaData->is_af_exif_debug_valid) {
+        pme->mExifParams.af_debug_params_valid = TRUE;
+        pme->mExifParams.af_debug_params = pMetaData->af_exif_debug_params;
+    }
+    if (pMetaData->is_asd_exif_debug_valid) {
+        pme->mExifParams.asd_debug_params_valid = TRUE;
+        pme->mExifParams.asd_debug_params = pMetaData->asd_exif_debug_params;
+    }
+    if (pMetaData->is_stats_buffer_exif_debug_valid) {
+        pme->mExifParams.stats_debug_params_valid = TRUE;
+        pme->mExifParams.stats_debug_params = pMetaData->stats_buffer_exif_debug_params;
+    }
+
+    /*Update Sensor info*/
+    if (pMetaData->is_sensor_params_valid) {
+        pme->mExifParams.sensor_params = pMetaData->sensor_params;
+    }
+
+    if (pMetaData->is_asd_decision_valid) {
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
+            payload->asd_data = pMetaData->scene;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEvt prep_snapshot failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for prep_snapshot qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+
+    if (pMetaData->is_chromatix_mobicat_af_valid) {
+        memcpy(pme->mExifParams.af_mobicat_params,
+            pMetaData->chromatix_mobicat_af_data.private_mobicat_af_data,
+            sizeof(pme->mExifParams.af_mobicat_params));
+    }
+
+    stream->bufDone(frame->buf_idx);
+    free(super_frame);
+
+    CDBG("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : reprocess_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
+                (after reprocess, e.g., ZSL snapshot frame after WNR if
+ *              WNR is enabled)
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. In this
+ *             case, reprocessed frame need to be passed to postprocessor
+ *             for jpeg encoding.
+ *==========================================================================*/
+void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                            QCameraStream * /*stream*/,
+                                                            void * userdata)
+{
+    ATRACE_CALL();
+    CDBG_HIGH("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processPPData(super_frame);
+
+    CDBG_HIGH("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
+ *
+ * PARAMETERS :
+ *    @data : data ptr
+ *    @size : length of data buffer
+ *    @index : identifier for data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
+        size_t size, uint32_t index)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dumpimg", value, "0");
+    uint32_t enabled = (uint32_t) atoi(value);
+    uint32_t frm_num = 0;
+    uint32_t skip_mode = 0;
+
+    char buf[32];
+    cam_dimension_t dim;
+    memset(buf, 0, sizeof(buf));
+    memset(&dim, 0, sizeof(dim));
+
+    if(((enabled & QCAMERA_DUMP_FRM_JPEG) && data) ||
+        ((true == m_bIntEvtPending) && data)) {
+        frm_num = ((enabled & 0xffff0000) >> 16);
+        if(frm_num == 0) {
+            frm_num = 10; //default 10 frames
+        }
+        if(frm_num > 256) {
+            frm_num = 256; //256 buffers cycle around
+        }
+        skip_mode = ((enabled & 0x0000ff00) >> 8);
+        if(skip_mode == 0) {
+            skip_mode = 1; //no-skip
+        }
+
+        if( mDumpSkipCnt % skip_mode == 0) {
+            if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
+                // reset frame count if cycling
+                mDumpFrmCnt = 0;
+            }
+            if (mDumpFrmCnt <= frm_num) {
+                snprintf(buf, sizeof(buf), "/data/misc/camera/%d_%d.jpg", mDumpFrmCnt, index);
+                if (true == m_bIntEvtPending) {
+                    strncpy(m_BackendFileName, buf, sizeof(buf));
+                    mBackendFileSize = size;
+                }
+
+                int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+                if (file_fd >= 0) {
+                    ssize_t written_len = 0;
+                    written_len = write(file_fd, data, size);
+                    fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+                    CDBG_HIGH("%s: written number of bytes %d\n", __func__, written_len);
+                    close(file_fd);
+                } else {
+                    ALOGE("%s: fail t open file for image dumping", __func__);
+                }
+                if (false == m_bIntEvtPending) {
+                    mDumpFrmCnt++;
+                }
+            }
+        }
+        mDumpSkipCnt++;
+    }
+}
+
+
+void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
+                                                   mm_camera_buf_def_t *frame,char *type)
+{
+    char value[PROPERTY_VALUE_MAX];
+    uint32_t frm_num = 0;
+    cam_metadata_info_t *metadata = (cam_metadata_info_t *)frame->buffer;
+    property_get("persist.camera.dumpmetadata", value, "0");
+    uint32_t enabled = (uint32_t) atoi(value);
+    if (stream == NULL) {
+        ALOGE("No op");
+        return;
+    }
+
+    uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
+    if(enabled){
+        frm_num = ((enabled & 0xffff0000) >> 16);
+        if (frm_num == 0) {
+            frm_num = 10; //default 10 frames
+        }
+        if (frm_num > 256) {
+            frm_num = 256; //256 buffers cycle around
+        }
+        if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
+            // reset frame count if cycling
+            dumpFrmCnt = 0;
+        }
+        CDBG_HIGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
+        if (dumpFrmCnt < frm_num) {
+            char timeBuf[128];
+            char buf[32];
+            memset(buf, 0, sizeof(buf));
+            memset(timeBuf, 0, sizeof(timeBuf));
+            time_t current_time;
+            struct tm * timeinfo;
+            time (&current_time);
+            timeinfo = localtime (&current_time);
+            if (timeinfo != NULL)
+                strftime (timeBuf, sizeof(timeBuf),"/data/misc/camera/%Y%m%d%H%M%S", timeinfo);
+            String8 filePath(timeBuf);
+            snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
+            filePath.append(buf);
+            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+            if (file_fd > 0) {
+                ssize_t written_len = 0;
+                metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
+                void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
+                CDBG_HIGH("tuning_sensor_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
+                CDBG_HIGH("tuning_vfe_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
+                CDBG_HIGH("tuning_cpp_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
+                CDBG_HIGH("tuning_cac_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data);
+                written_len += write(file_fd, data, total_size);
+                total_size = metadata->tuning_params.tuning_vfe_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                total_size = metadata->tuning_params.tuning_cpp_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                total_size = metadata->tuning_params.tuning_cac_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                close(file_fd);
+            }else {
+                ALOGE("%s: fail t open file for image dumping", __func__);
+            }
+            dumpFrmCnt++;
+        }
+    }
+    stream->mDumpMetaFrame = dumpFrmCnt;
+}
+/*===========================================================================
+ * FUNCTION   : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump frame into file for debug purpose.
+ *
+ * PARAMETERS :
+ *    @data : data ptr
+ *    @size : length of data buffer
+ *    @index : identifier for data
+ *    @dump_type : type of the frame to be dumped. Only such
+ *                 dump type is enabled, the frame will be
+ *                 dumped into a file.
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
+        mm_camera_buf_def_t *frame, uint32_t dump_type)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dumpimg", value, "0");
+    uint32_t enabled = (uint32_t) atoi(value);
+    uint32_t frm_num = 0;
+    uint32_t skip_mode = 0;
+
+    if (stream)
+        mDumpFrmCnt = stream->mDumpFrame;
+
+    if(enabled & QCAMERA_DUMP_FRM_MASK_ALL) {
+        if((enabled & dump_type) && stream && frame) {
+            frm_num = ((enabled & 0xffff0000) >> 16);
+            if(frm_num == 0) {
+                frm_num = 10; //default 10 frames
+            }
+            if(frm_num > 256) {
+                frm_num = 256; //256 buffers cycle around
+            }
+            skip_mode = ((enabled & 0x0000ff00) >> 8);
+            if(skip_mode == 0) {
+                skip_mode = 1; //no-skip
+            }
+            if(stream->mDumpSkipCnt == 0)
+                stream->mDumpSkipCnt = 1;
+
+            if( stream->mDumpSkipCnt % skip_mode == 0) {
+                if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
+                    // reset frame count if cycling
+                    mDumpFrmCnt = 0;
+                }
+                if (mDumpFrmCnt <= frm_num) {
+                    char buf[32];
+                    char timeBuf[128];
+                    time_t current_time;
+                    struct tm * timeinfo;
+
+
+                    time (&current_time);
+                    timeinfo = localtime (&current_time);
+                    memset(buf, 0, sizeof(buf));
+
+                    cam_dimension_t dim;
+                    memset(&dim, 0, sizeof(dim));
+                    stream->getFrameDimension(dim);
+
+                    cam_frame_len_offset_t offset;
+                    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+                    stream->getFrameOffset(offset);
+
+                    if (timeinfo != NULL)
+                        strftime (timeBuf, sizeof(timeBuf),"/data/misc/camera/%Y%m%d%H%M%S", timeinfo);
+                    String8 filePath(timeBuf);
+                    switch (dump_type) {
+                    case QCAMERA_DUMP_FRM_PREVIEW:
+                        {
+                            snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_THUMBNAIL:
+                        {
+                            snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_SNAPSHOT:
+                        {
+                            snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_VIDEO:
+                        {
+                            snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_RAW:
+                        {
+                            snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
+                                     mDumpFrmCnt, offset.mp[0].stride,
+                                     offset.mp[0].scanline, frame->frame_idx);
+                        }
+                        break;
+                    default:
+                        ALOGE("%s: Not supported for dumping stream type %d",
+                              __func__, dump_type);
+                        return;
+                    }
+
+                    filePath.append(buf);
+                    int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+                    if (file_fd > 0) {
+                        void *data = NULL;
+                        ssize_t written_len = 0;
+
+                        for (uint32_t i = 0; i < offset.num_planes; i++) {
+                            uint32_t index = offset.mp[i].offset;
+                            if (i > 0) {
+                                index += offset.mp[i-1].len;
+                            }
+                            for (int j = 0; j < offset.mp[i].height; j++) {
+                                data = (void *)((uint8_t *)frame->buffer + index);
+                                written_len += write(file_fd, data,
+                                        (size_t)offset.mp[i].width);
+                                index += (uint32_t)offset.mp[i].stride;
+                            }
+                        }
+
+                        CDBG_HIGH("%s: written number of bytes %d\n", __func__, written_len);
+                        close(file_fd);
+                    } else {
+                        ALOGE("%s: fail t open file for image dumping", __func__);
+                    }
+                    mDumpFrmCnt++;
+                }
+            }
+            stream->mDumpSkipCnt++;
+        }
+    } else {
+        mDumpFrmCnt = 0;
+    }
+    if (stream)
+        stream->mDumpFrame = mDumpFrmCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : debugShowVideoFPS
+ *
+ * DESCRIPTION: helper function to log video frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowVideoFPS()
+{
+    static int n_vFrameCount = 0;
+    static int n_vLastFrameCount = 0;
+    static nsecs_t n_vLastFpsTime = 0;
+    static double n_vFps = 0;
+    n_vFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - n_vLastFpsTime;
+    if (diff > ms2ns(250)) {
+        n_vFps = (((double)(n_vFrameCount - n_vLastFrameCount)) *
+                (double)(s2ns(1))) / (double)diff;
+        ALOGE("Video Frames Per Second: %.4f", n_vFps);
+        n_vLastFpsTime = now;
+        n_vLastFrameCount = n_vFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : debugShowPreviewFPS
+ *
+ * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowPreviewFPS()
+{
+    static int n_pFrameCount = 0;
+    static int n_pLastFrameCount = 0;
+    static nsecs_t n_pLastFpsTime = 0;
+    static double n_pFps = 0;
+    n_pFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - n_pLastFpsTime;
+    if (diff > ms2ns(250)) {
+        n_pFps = (((double)(n_pFrameCount - n_pLastFrameCount)) *
+                (double)(s2ns(1))) / (double)diff;
+        CDBG_HIGH("[KPI Perf] %s: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f", __func__, n_pFps);
+        n_pLastFpsTime = now;
+        n_pLastFrameCount = n_pFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCbNotifier
+ *
+ * DESCRIPTION: Destructor for exiting the callback context.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCbNotifier::~QCameraCbNotifier()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : exit
+ *
+ * DESCRIPTION: exit notify thread.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::exit()
+{
+    mActive = false;
+    mProcTh.exit();
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseNotifications
+ *
+ * DESCRIPTION: callback for releasing data stored in the callback queue.
+ *
+ * PARAMETERS :
+ *   @data      : data to be released
+ *   @user_data : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+
+    if ( ( NULL != arg ) && ( NULL != user_data ) ) {
+        if ( arg->release_cb ) {
+            arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : matchSnapshotNotifications
+ *
+ * DESCRIPTION: matches snapshot data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
+                                                   void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if ( NULL != arg ) {
+        if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : matchPreviewNotifications
+ *
+ * DESCRIPTION: matches preview data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchPreviewNotifications(void *data,
+        void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if (NULL != arg) {
+        if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
+                (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+* FUNCTION   : matchTimestampNotifications
+*
+* DESCRIPTION: matches timestamp data callbacks
+*
+* PARAMETERS :
+*   @data      : data to match
+*   @user_data : context data
+*
+* RETURN     : bool match
+*              true - match found
+*              false- match not found
+*==========================================================================*/
+bool QCameraCbNotifier::matchTimestampNotifications(void *data, void * /*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if (NULL != arg) {
+        if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
+            (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : cbNotifyRoutine
+ *
+ * DESCRIPTION: callback thread which interfaces with the upper layers
+ *              given input commands.
+ *
+ * PARAMETERS :
+ *   @data    : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void * QCameraCbNotifier::cbNotifyRoutine(void * data)
+{
+    int running = 1;
+    int ret;
+    QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+    cmdThread->setName("CAM_cbNotify");
+    uint8_t isSnapshotActive = FALSE;
+    bool longShotEnabled = false;
+    uint32_t numOfSnapshotExpected = 0;
+    uint32_t numOfSnapshotRcvd = 0;
+    int32_t cbStatus = NO_ERROR;
+
+    CDBG("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                CDBG("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        CDBG("%s: get cmd %d", __func__, cmd);
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            {
+                isSnapshotActive = TRUE;
+                numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
+                longShotEnabled = pme->mParent->isLongshotEnabled();
+                numOfSnapshotRcvd = 0;
+            }
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                pme->mDataQ.flushNodes(matchSnapshotNotifications);
+                isSnapshotActive = FALSE;
+
+                numOfSnapshotExpected = 0;
+                numOfSnapshotRcvd = 0;
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                qcamera_callback_argm_t *cb =
+                    (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
+                cbStatus = NO_ERROR;
+                if (NULL != cb) {
+                    CDBG("%s: cb type %d received",
+                          __func__,
+                          cb->cb_type);
+
+                    if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
+                        switch (cb->cb_type) {
+                        case QCAMERA_NOTIFY_CALLBACK:
+                            {
+                                if (cb->msg_type == CAMERA_MSG_FOCUS) {
+                                    ATRACE_INT("Camera:AutoFocus", 0);
+                                    CDBG_HIGH("[KPI Perf] %s : PROFILE_SENDING_FOCUS_EVT_TO APP",
+                                            __func__);
+                                }
+                                if (pme->mNotifyCb) {
+                                    pme->mNotifyCb(cb->msg_type,
+                                                  cb->ext1,
+                                                  cb->ext2,
+                                                  pme->mCallbackCookie);
+                                } else {
+                                    ALOGE("%s : notify callback not set!",
+                                          __func__);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_CALLBACK:
+                            {
+                                if (pme->mDataCb) {
+                                    pme->mDataCb(cb->msg_type,
+                                                 cb->data,
+                                                 cb->index,
+                                                 cb->metadata,
+                                                 pme->mCallbackCookie);
+                                } else {
+                                    ALOGE("%s : data callback not set!",
+                                          __func__);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_TIMESTAMP_CALLBACK:
+                            {
+                                if(pme->mDataCbTimestamp) {
+                                    pme->mDataCbTimestamp(cb->timestamp,
+                                                          cb->msg_type,
+                                                          cb->data,
+                                                          cb->index,
+                                                          pme->mCallbackCookie);
+                                } else {
+                                    ALOGE("%s:data cb with tmp not set!",
+                                          __func__);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_SNAPSHOT_CALLBACK:
+                            {
+                                if (TRUE == isSnapshotActive && pme->mDataCb ) {
+                                    if (!longShotEnabled) {
+                                        numOfSnapshotRcvd++;
+                                        if (numOfSnapshotExpected > 0 &&
+                                            numOfSnapshotExpected == numOfSnapshotRcvd) {
+                                            // notify HWI that snapshot is done
+                                            pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
+                                                                         NULL);
+                                        }
+                                    }
+                                    pme->mDataCb(cb->msg_type,
+                                                 cb->data,
+                                                 cb->index,
+                                                 cb->metadata,
+                                                 pme->mCallbackCookie);
+                                }
+                            }
+                            break;
+                        default:
+                            {
+                                ALOGE("%s : invalid cb type %d",
+                                      __func__,
+                                      cb->cb_type);
+                                cbStatus = BAD_VALUE;
+                            }
+                            break;
+                        };
+                    } else {
+                        ALOGE("%s : cb message type %d not enabled!",
+                              __func__,
+                              cb->msg_type);
+                        cbStatus = INVALID_OPERATION;
+                    }
+                    if ( cb->release_cb ) {
+                        cb->release_cb(cb->user_data, cb->cookie, cbStatus);
+                    }
+                    delete cb;
+                } else {
+                    ALOGE("%s: invalid cb type passed", __func__);
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            {
+                running = 0;
+                pme->mDataQ.flush();
+            }
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    CDBG("%s: X", __func__);
+
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : notifyCallback
+ *
+ * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
+ *
+ * PARAMETERS :
+ *   @cbArgs  : callback arguments
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
+{
+    if (!mActive) {
+        ALOGE("%s: notify thread is not active", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
+    if (NULL == cbArg) {
+        ALOGE("%s: no mem for qcamera_callback_argm_t", __func__);
+        return NO_MEMORY;
+    }
+    memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
+    *cbArg = cbArgs;
+
+    if (mDataQ.enqueue((void *)cbArg)) {
+        return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        ALOGE("%s: Error adding cb data into queue", __func__);
+        delete cbArg;
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallbacks
+ *
+ * DESCRIPTION: Initializes the callback functions, which would be used for
+ *              communication with the upper layers and launches the callback
+ *              context in which the callbacks will occur.
+ *
+ * PARAMETERS :
+ *   @notifyCb          : notification callback
+ *   @dataCb            : data callback
+ *   @dataCbTimestamp   : data with timestamp callback
+ *   @callbackCookie    : callback context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
+                                     camera_data_callback dataCb,
+                                     camera_data_timestamp_callback dataCbTimestamp,
+                                     void *callbackCookie)
+{
+    if ( ( NULL == mNotifyCb ) &&
+         ( NULL == mDataCb ) &&
+         ( NULL == mDataCbTimestamp ) &&
+         ( NULL == mCallbackCookie ) ) {
+        mNotifyCb = notifyCb;
+        mDataCb = dataCb;
+        mDataCbTimestamp = dataCbTimestamp;
+        mCallbackCookie = callbackCookie;
+        mActive = true;
+        mProcTh.launch(cbNotifyRoutine, this);
+    } else {
+        ALOGE("%s : Camera callback notifier already initialized!",
+              __func__);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : flushPreviewNotifications
+ *
+ * DESCRIPTION: flush all pending preview notifications
+ *              from the notifier queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::flushPreviewNotifications()
+{
+    if (!mActive) {
+        ALOGE("%s: notify thread is not active", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    mDataQ.flushNodes(matchPreviewNotifications);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION   : flushVideoNotifications
+*
+* DESCRIPTION: flush all pending video notifications
+*              from the notifier queue
+*
+* PARAMETERS : None
+*
+* RETURN     : int32_t type of status
+*              NO_ERROR  -- success
+*              none-zero failure code
+*==========================================================================*/
+int32_t QCameraCbNotifier::flushVideoNotifications()
+{
+    if (!mActive) {
+        ALOGE("notify thread is not active");
+        return UNKNOWN_ERROR;
+    }
+    mDataQ.flushNodes(matchTimestampNotifications);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startSnapshots
+ *
+ * DESCRIPTION: Enables snapshot mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::startSnapshots()
+{
+    return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
+}
+
+/*===========================================================================
+ * FUNCTION   : stopSnapshots
+ *
+ * DESCRIPTION: Disables snapshot processing mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::stopSnapshots()
+{
+    mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCamera2Hal.cpp b/msm8974/QCamera2/HAL/QCamera2Hal.cpp
new file mode 100644
index 0000000..4898b10
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCamera2Hal.cpp
@@ -0,0 +1,54 @@
+/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "QCamera2Factory.h"
+
+static hw_module_t camera_common = {
+    .tag = HARDWARE_MODULE_TAG,
+    .module_api_version = CAMERA_MODULE_API_VERSION_1_0,
+    .hal_api_version = HARDWARE_HAL_API_VERSION,
+    .id = CAMERA_HARDWARE_MODULE_ID,
+    .name = "QCamera Module",
+    .author = "Qualcomm Innovation Center Inc",
+    .methods = &qcamera::QCamera2Factory::mModuleMethods,
+    .dso = NULL,
+    .reserved = {0},
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+    .common = camera_common,
+    .get_number_of_cameras = qcamera::QCamera2Factory::get_number_of_cameras,
+    .get_camera_info = qcamera::QCamera2Factory::get_camera_info,
+    .set_callbacks = NULL,
+    .get_vendor_tag_ops = NULL,
+    .open_legacy = NULL,
+    .set_torch_mode = NULL,
+    .init = NULL,
+    .reserved = {0}
+};
diff --git a/msm8974/QCamera2/HAL/QCameraAllocator.h b/msm8974/QCamera2/HAL/QCameraAllocator.h
new file mode 100644
index 0000000..13672d2
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraAllocator.h
@@ -0,0 +1,53 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_ALLOCATOR__
+#define __QCAMERA_ALLOCATOR__
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCameraMemory;
+class QCameraHeapMemory;
+
+class QCameraAllocator {
+public:
+    virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+            size_t size, int stride, int scanline, uint8_t &bufferCnt) = 0;
+    virtual int32_t allocateMoreStreamBuf(QCameraMemory *mem_obj,
+            size_t size, uint8_t &bufferCnt) = 0;
+    virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type) = 0;
+    virtual ~QCameraAllocator() {}
+};
+
+}; /* namespace qcamera */
+#endif /* __QCAMERA_ALLOCATOR__ */
diff --git a/msm8974/QCamera2/HAL/QCameraChannel.cpp b/msm8974/QCamera2/HAL/QCameraChannel.cpp
new file mode 100644
index 0000000..1384af9
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraChannel.cpp
@@ -0,0 +1,1355 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraChannel"
+
+#include <utils/Errors.h>
+#include "QCameraParameters.h"
+#include "QCamera2HWI.h"
+#include "QCameraChannel.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraChannel
+ *
+ * DESCRIPTION: constrcutor of QCameraChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel(uint32_t cam_handle,
+                               mm_camera_ops_t *cam_ops)
+{
+    m_camHandle = cam_handle;
+    m_camOps = cam_ops;
+    m_bIsActive = false;
+    m_bAllowDynBufAlloc = false;
+
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraChannel
+ *
+ * DESCRIPTION: default constrcutor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel()
+{
+    m_camHandle = 0;
+    m_camOps = NULL;
+    m_bIsActive = false;
+
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraChannel
+ *
+ * DESCRIPTION: destructor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::~QCameraChannel()
+{
+    if (m_bIsActive) {
+        stop();
+    }
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL) {
+                if (m_handle == mStreams[i]->getChannelHandle()) {
+                    delete mStreams[i];
+                }
+        }
+    }
+    mStreams.clear();
+    m_camOps->delete_channel(m_camHandle, m_handle);
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : deleteChannel
+ *
+ * DESCRIPTION: deletes a camera channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraChannel::deleteChannel()
+{
+    if (m_bIsActive) {
+        stop();
+    }
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL) {
+                if (m_handle == mStreams[i]->getChannelHandle()) {
+                    mStreams[i]->deleteStream();
+                }
+        }
+    }
+    m_camOps->delete_channel(m_camHandle, m_handle);
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of channel
+ *
+ * PARAMETERS :
+ *   @attr    : channel bundle attribute setting
+ *   @dataCB  : data notify callback
+ *   @userData: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::init(mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t dataCB,
+                             void *userData)
+{
+    m_handle = m_camOps->add_channel(m_camHandle,
+                                      attr,
+                                      dataCB,
+                                      userData);
+    if (m_handle == 0) {
+        ALOGE("%s: Add channel failed", __func__);
+        return UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : addStream
+ *
+ * DESCRIPTION: add a stream into channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @streamInfoBuf  : ptr to buf that constains stream info
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @paddingInfo    : padding information
+ *   @stream_cb      : stream data notify callback
+ *   @userdata       : user data ptr
+ *   @bDynAllocBuf   : flag indicating if allow allocate buffers in 2 steps
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::addStream(QCameraAllocator &allocator,
+                                  QCameraHeapMemory *streamInfoBuf,
+                                  uint8_t minStreamBufNum,
+                                  cam_padding_info_t *paddingInfo,
+                                  stream_cb_routine stream_cb,
+                                  void *userdata,
+                                  bool bDynAllocBuf,
+                                  bool bDeffAlloc)
+{
+    int32_t rc = NO_ERROR;
+    if (mStreams.size() >= MAX_STREAM_NUM_IN_BUNDLE) {
+        ALOGE("%s: stream number (%d) exceeds max limit (%d)",
+              __func__, mStreams.size(), MAX_STREAM_NUM_IN_BUNDLE);
+        return BAD_VALUE;
+    }
+    QCameraStream *pStream = new QCameraStream(allocator,
+                                               m_camHandle,
+                                               m_handle,
+                                               m_camOps,
+                                               paddingInfo,
+                                               bDeffAlloc);
+    if (pStream == NULL) {
+        ALOGE("%s: No mem for Stream", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pStream->init(streamInfoBuf, minStreamBufNum,
+                       stream_cb, userdata, bDynAllocBuf);
+    if (rc == 0) {
+        mStreams.add(pStream);
+    } else {
+        delete pStream;
+    }
+    return rc;
+}
+/*===========================================================================
+ * FUNCTION   : config
+ *
+ * DESCRIPTION: Configure any deffered channel streams
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::config()
+{
+    int32_t rc = NO_ERROR;
+
+    for (size_t i = 0; i < mStreams.size(); ++i) {
+        if ((mStreams[i] != NULL) &&
+                mStreams[i]->isDeffered() &&
+                (m_handle == mStreams[i]->getChannelHandle())) {
+            rc = mStreams[i]->configStream();
+            if (rc != NO_ERROR) {
+                break;
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : linkStream
+ *
+ * DESCRIPTION: link a stream into channel
+ *
+ * PARAMETERS :
+ *   @ch      : Channel which the stream belongs to
+ *   @stream  : Stream which needs to be linked
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::linkStream(QCameraChannel *ch, QCameraStream *stream)
+{
+    int32_t rc = NO_ERROR;
+
+    if ((0 == m_handle) || (NULL == ch) || (NULL == stream)) {
+        return NO_INIT;
+    }
+
+    int32_t handle = m_camOps->link_stream(m_camHandle,
+            ch->getMyHandle(),
+            stream->getMyHandle(),
+            m_handle);
+    if (0 == handle) {
+        ALOGE("%s : Linking of stream failed", __func__);
+        rc = INVALID_OPERATION;
+    } else {
+        mStreams.add(stream);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start channel, which will start all streams belong to this channel
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::start()
+{
+    int32_t rc = NO_ERROR;
+
+    if (mStreams.size() > 1) {
+        // there is more than one stream in the channel
+        // we need to notify mctl that all streams in this channel need to be bundled
+        cam_bundle_config_t bundleInfo;
+        memset(&bundleInfo, 0, sizeof(bundleInfo));
+        rc = m_camOps->get_bundle_info(m_camHandle, m_handle, &bundleInfo);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: get_bundle_info failed", __func__);
+            return rc;
+        }
+        if (bundleInfo.num_of_streams > 1) {
+            for (int i = 0; i < bundleInfo.num_of_streams; i++) {
+                QCameraStream *pStream = getStreamByServerID(bundleInfo.stream_ids[i]);
+                if (pStream != NULL) {
+                    if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                        // Skip metadata for reprocess now because PP module cannot handle meta data
+                        // May need furthur discussion if Imaginglib need meta data
+                        continue;
+                    }
+
+                    cam_stream_parm_buffer_t param;
+                    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+                    param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
+                    param.bundleInfo = bundleInfo;
+                    rc = pStream->setParameter(param);
+                    if (rc != NO_ERROR) {
+                        ALOGE("%s: stream setParameter for set bundle failed", __func__);
+                        return rc;
+                    }
+                }
+            }
+        }
+    }
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) &&
+                (m_handle == mStreams[i]->getChannelHandle())) {
+            mStreams[i]->start();
+        }
+    }
+    rc = m_camOps->start_channel(m_camHandle, m_handle);
+
+    if (rc != NO_ERROR) {
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if ((mStreams[i] != NULL) &&
+                    (m_handle == mStreams[i]->getChannelHandle())) {
+                mStreams[i]->stop();
+            }
+        }
+    } else {
+        m_bIsActive = true;
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if (mStreams[i] != NULL) {
+                mStreams[i]->cond_signal();
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::stop()
+{
+    int32_t rc = NO_ERROR;
+    ssize_t linkedIdx = -1;
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL) {
+               if (m_handle == mStreams[i]->getChannelHandle()) {
+                   mStreams[i]->stop();
+               } else {
+                   // Remove linked stream from stream list
+                   linkedIdx = (ssize_t)i;
+               }
+        }
+    }
+    if (linkedIdx > 0) {
+        mStreams.removeAt((size_t)linkedIdx);
+    }
+
+    rc = m_camOps->stop_channel(m_camHandle, m_handle);
+
+    m_bIsActive = false;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return a stream buf back to kernel
+ *
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        if (recvd_frame->bufs[i] != NULL) {
+            for (size_t j = 0; j < mStreams.size(); j++) {
+                if (mStreams[j] != NULL &&
+                        mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
+                    rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+                    break; // break loop j
+                }
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ *   @previewWindoe : ptr to preview window ops table, needed to set preview
+ *                    crop information
+ *   @crop_info     : crop info as a result of zoom operation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::processZoomDone(preview_stream_ops_t *previewWindow,
+                                        cam_crop_data_t &crop_info)
+{
+    int32_t rc = NO_ERROR;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) &&
+                (m_handle == mStreams[i]->getChannelHandle())) {
+            rc = mStreams[i]->processZoomDone(previewWindow, crop_info);
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByHandle
+ *
+ * DESCRIPTION: return stream object by stream handle
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByHandle(uint32_t streamHandle)
+{
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByServerID
+ *
+ * DESCRIPTION: return stream object by stream server ID from daemon
+ *
+ * PARAMETERS :
+ *   @serverID : stream server ID
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByServerID(uint32_t serverID)
+{
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyServerID() == serverID) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByIndex
+ *
+ * DESCRIPTION: return stream object by index of streams in the channel
+ *
+ * PARAMETERS :
+ *   @index : index of stream in the channel
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByIndex(uint32_t index)
+{
+    if (index >= MAX_STREAM_NUM_IN_BUNDLE) {
+        return NULL;
+    }
+
+    if (index < mStreams.size()) {
+        return mStreams[index];
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : UpdateStreamBasedParameters
+ *
+ * DESCRIPTION: update any stream based settings from parameters
+ *
+ * PARAMETERS :
+ *   @param   : reference to parameters object
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::UpdateStreamBasedParameters(QCameraParameters &param)
+{
+    int32_t rc = NO_ERROR;
+    if (param.isPreviewFlipChanged()) {
+        // try to find preview stream
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if (mStreams[i] != NULL &&
+                (m_handle == mStreams[i]->getChannelHandle()) &&
+                (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                (mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW))) ) {
+                cam_stream_parm_buffer_t param_buf;
+                memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+                param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+                param_buf.flipInfo.flip_mask =
+                        (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_PREVIEW);
+                rc = mStreams[i]->setParameter(param_buf);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: set preview stream flip failed", __func__);
+                }
+            }
+        }
+    }
+    if (param.isVideoFlipChanged()) {
+        // try to find video stream
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if (mStreams[i] != NULL &&
+                (m_handle == mStreams[i]->getChannelHandle()) &&
+                (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+                (mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO))) ) {
+                cam_stream_parm_buffer_t param_buf;
+                memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+                param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+                param_buf.flipInfo.flip_mask =
+                        (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_VIDEO);
+                rc = mStreams[i]->setParameter(param_buf);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: set video stream flip failed", __func__);
+                }
+            }
+        }
+    }
+    if (param.isSnapshotFlipChanged()) {
+        // try to find snapshot/postview stream
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if (mStreams[i] != NULL &&
+                (m_handle == mStreams[i]->getChannelHandle()) &&
+                (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                 mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                 mStreams[i]->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                 mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ) ) {
+                cam_stream_parm_buffer_t param_buf;
+                memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+                param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+                param_buf.flipInfo.flip_mask =
+                        (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
+                rc = mStreams[i]->setParameter(param_buf);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: set snapshot stream flip failed", __func__);
+                }
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPicChannel
+ *
+ * DESCRIPTION: constructor of QCameraPicChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel(uint32_t cam_handle,
+                                     mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops)
+{
+    m_bAllowDynBufAlloc = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPicChannel
+ *
+ * DESCRIPTION: default constructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel()
+{
+    m_bAllowDynBufAlloc = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraPicChannel
+ *
+ * DESCRIPTION: destructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::~QCameraPicChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: send request for queued snapshot frames
+ *
+ * PARAMETERS :
+ *   @num_of_snapshot : number of snapshot frames requested
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::takePicture(uint8_t num_of_snapshot)
+{
+    int32_t rc = m_camOps->request_super_buf(m_camHandle,
+                                             m_handle,
+                                             num_of_snapshot);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel request for queued snapshot frames
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::cancelPicture()
+{
+    int32_t rc = m_camOps->cancel_super_buf_request(m_camHandle, m_handle);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startAdvancedCapture
+ *
+ * DESCRIPTION: start advanced capture based on advanced capture type.
+ *
+ * PARAMETERS :
+ *   @type : advanced capture type.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::startAdvancedCapture(mm_camera_advanced_capture_t type)
+{
+    int32_t rc = m_camOps->process_advanced_capture(m_camHandle, type,
+                                              m_handle, 1);
+    return rc;
+}
+
+/*===========================================================================
+* FUNCTION   : flushSuperbuffer
+ *
+ * DESCRIPTION: flush the all superbuffer frames.
+ *
+ * PARAMETERS :
+ *   @frame_idx : .
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::flushSuperbuffer(uint32_t frame_idx)
+{
+    int32_t rc = m_camOps->flush_super_buf_queue(m_camHandle, m_handle, frame_idx);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoChannel
+ *
+ * DESCRIPTION: constructor of QCameraVideoChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel(uint32_t cam_handle,
+                                         mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoChannel
+ *
+ * DESCRIPTION: default constructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraVideoChannel
+ *
+ * DESCRIPTION: destructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::~QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrame
+ *
+ * DESCRIPTION: return video frame from app
+ *
+ * PARAMETERS :
+ *   @opaque     : ptr to video frame to be returned
+ *   @isMetaData : if frame is a metadata or real frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::releaseFrame(const void * opaque, bool isMetaData)
+{
+    QCameraStream *pVideoStream = NULL;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL && mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO)) {
+            pVideoStream = mStreams[i];
+            break;
+        }
+    }
+
+    if (NULL == pVideoStream) {
+        ALOGE("%s: No video stream in the channel", __func__);
+        return BAD_VALUE;
+    }
+
+    int32_t rc = pVideoStream->bufDone(opaque, isMetaData);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel(uint32_t cam_handle,
+                                                 mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops),
+    m_pSrcChannel(NULL)
+{
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocessChannel
+ *
+ * DESCRIPTION: default constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel() :
+    m_pSrcChannel(NULL)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraReprocessChannel
+ *
+ * DESCRIPTION: destructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::~QCameraReprocessChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocStreamsFromSource
+ *
+ * DESCRIPTION: add reprocess streams from input source channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @config         : pp feature configuration
+ *   @pSrcChannel    : ptr to input source channel that needs reprocess
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @burstNum       : number of burst captures needed
+ *   @paddingInfo    : padding information
+ *   @param          : reference to parameters
+ *   @contStream     : continous streaming mode or burst
+ *   @offline        : configure for offline reprocessing
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::addReprocStreamsFromSource(
+        QCameraAllocator& allocator, cam_pp_feature_config_t &config,
+        QCameraChannel *pSrcChannel, uint8_t minStreamBufNum, uint8_t burstNum,
+        cam_padding_info_t *paddingInfo, QCameraParameters &param, bool contStream,
+        bool offline)
+{
+    int32_t rc = 0;
+    QCameraStream *pStream = NULL;
+    QCameraHeapMemory *pStreamInfoBuf = NULL;
+    cam_stream_info_t *streamInfo = NULL;
+
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+
+    for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); i++) {
+        pStream = pSrcChannel->getStreamByIndex(i);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA) ||
+                pStream->isTypeOf(CAM_STREAM_TYPE_RAW)) {
+                // Skip metadata&raw for reprocess now because PP module cannot handle
+                // meta data&raw. May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+                // Skip postview: in non zsl case, dont want to send
+                // thumbnail through reprocess.
+                // Skip preview: for same reason for zsl case
+                continue;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                uint32_t feature_mask = config.feature_mask;
+
+                if ((feature_mask & ~CAM_QCOM_FEATURE_HDR) == 0
+                        && param.isHDREnabled()
+                        && !param.isHDRThumbnailProcessNeeded()) {
+
+                    // Skip thumbnail stream reprocessing in HDR
+                    // if only hdr is enabled
+                    continue;
+                }
+
+                // skip thumbnail reprocessing if not needed
+                if (!param.needThumbnailReprocess(&feature_mask)) {
+                    continue;
+                }
+
+                //Don't do WNR for thumbnail
+                feature_mask &= ~CAM_QCOM_FEATURE_DENOISE2D;
+                if (!feature_mask) {
+                    // Skip thumbnail stream reprocessing since no other
+                    //reprocessing is enabled.
+                    continue;
+                }
+            }
+
+            pStreamInfoBuf = allocator.allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+            if (pStreamInfoBuf == NULL) {
+                ALOGE("%s: no mem for stream info buf", __func__);
+                rc = NO_MEMORY;
+                break;
+            }
+
+            streamInfo = (cam_stream_info_t *)pStreamInfoBuf->getPtr(0);
+            memset(streamInfo, 0, sizeof(cam_stream_info_t));
+            streamInfo->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+            rc = pStream->getFormat(streamInfo->fmt);
+            rc = pStream->getFrameDimension(streamInfo->dim);
+
+            //FSSR generates 4x output
+            uint32_t feature_mask = config.feature_mask;
+            if (feature_mask & CAM_QCOM_FEATURE_FSSR) {
+                (streamInfo->dim).width *= 2;
+                (streamInfo->dim).height *= 2;
+            }
+
+            if ( contStream ) {
+                streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+                streamInfo->num_of_burst = 0;
+            } else {
+                streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+                streamInfo->num_of_burst = burstNum;
+            }
+
+            cam_stream_reproc_config_t rp_cfg;
+            memset(&rp_cfg, 0, sizeof(cam_stream_reproc_config_t));
+            if (offline) {
+                cam_frame_len_offset_t offset;
+                memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+
+                rp_cfg.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+                pStream->getFormat(rp_cfg.offline.input_fmt);
+                pStream->getFrameDimension(rp_cfg.offline.input_dim);
+                pStream->getFrameOffset(offset);
+                rp_cfg.offline.input_buf_planes.plane_info = offset;
+                rp_cfg.offline.input_type = pStream->getMyType();
+                //For input metadata + input buffer
+                rp_cfg.offline.num_of_bufs = 2;
+            } else {
+                rp_cfg.pp_type = CAM_ONLINE_REPROCESS_TYPE;
+                rp_cfg.online.input_stream_id = pStream->getMyServerID();
+                if (CAM_STREAM_TYPE_OFFLINE_PROC ==
+                        (rp_cfg.online.input_stream_type = pStream->getMyType())) {
+                    rp_cfg.online.input_stream_type = pStream->getMyOriginalType();
+                }
+            }
+            streamInfo->reprocess_config = rp_cfg;
+            streamInfo->reprocess_config.pp_feature_config = config;
+
+            if (!(pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT))) {
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &= ~CAM_QCOM_FEATURE_CAC;
+                //Don't do WNR for thumbnail
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &= ~CAM_QCOM_FEATURE_DENOISE2D;
+
+                if (param.isHDREnabled()
+                  && !param.isHDRThumbnailProcessNeeded()){
+                    streamInfo->reprocess_config.pp_feature_config.feature_mask
+                      &= ~CAM_QCOM_FEATURE_HDR;
+                }
+            }
+
+            uint32_t mask;
+            mask = streamInfo->reprocess_config.pp_feature_config.feature_mask;
+            if (mask & CAM_QCOM_FEATURE_CPP) {
+                if (streamInfo->reprocess_config.pp_feature_config.rotation == ROTATE_90 ||
+                    streamInfo->reprocess_config.pp_feature_config.rotation == ROTATE_270) {
+                    // rotated by 90 or 270, need to switch width and height
+                    int32_t temp = streamInfo->dim.height;
+                    streamInfo->dim.height = streamInfo->dim.width;
+                    streamInfo->dim.width = temp;
+                }
+            }
+
+            cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+            if (offline) {
+                type = streamInfo->reprocess_config.offline.input_type;
+            } else {
+                type = streamInfo->reprocess_config.online.input_stream_type;
+            }
+            if (type == CAM_STREAM_TYPE_SNAPSHOT) {
+                int flipMode = param.getFlipMode(type);
+                if (flipMode > 0) {
+                    streamInfo->reprocess_config.pp_feature_config.feature_mask |= CAM_QCOM_FEATURE_FLIP;
+                    streamInfo->reprocess_config.pp_feature_config.flip = (uint32_t)flipMode;
+                }
+            }
+
+            if (mask & CAM_QCOM_FEATURE_SCALE) {
+                //we only Scale Snapshot frame
+                if(pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)){
+                    //also check whether rotation is needed
+                    if((mask & CAM_QCOM_FEATURE_CPP) &&
+                       (streamInfo->reprocess_config.pp_feature_config.rotation == ROTATE_90 ||
+                        streamInfo->reprocess_config.pp_feature_config.rotation == ROTATE_270)){
+                        //need swap
+                        streamInfo->dim.width = streamInfo->reprocess_config.pp_feature_config.scale_param.output_height;
+                        streamInfo->dim.height = streamInfo->reprocess_config.pp_feature_config.scale_param.output_width;
+                    }else{
+                        streamInfo->dim.width = streamInfo->reprocess_config.pp_feature_config.scale_param.output_width;
+                        streamInfo->dim.height = streamInfo->reprocess_config.pp_feature_config.scale_param.output_height;
+                    }
+                }
+                CDBG_HIGH("%s: stream width=%d, height=%d.", __func__, streamInfo->dim.width,
+                           streamInfo->dim.height);
+            }
+
+            // save source stream handler
+            mSrcStreamHandles[mStreams.size()] = pStream->getMyHandle();
+
+            // add reprocess stream
+            rc = addStream(allocator,
+                           pStreamInfoBuf, minStreamBufNum,
+                           paddingInfo,
+                           NULL, NULL, false);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: add reprocess stream failed, ret = %d", __func__, rc);
+                break;
+            }
+        }
+    }
+
+    if (rc == NO_ERROR) {
+        m_pSrcChannel = pSrcChannel;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBySrouceHandle
+ *
+ * DESCRIPTION: find reprocess stream by its source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCameraStream * QCameraReprocessChannel::getStreamBySrouceHandle(uint32_t srcHandle)
+{
+    QCameraStream *pStream = NULL;
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = mStreams[i];
+            break;
+        }
+    }
+
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop channel and unmap offline buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::stop()
+{
+    int32_t rc = QCameraChannel::stop();
+
+    if (!mOfflineBuffers.empty()) {
+        QCameraStream *stream = NULL;
+        List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
+        int error = NO_ERROR;
+        for( ; it != mOfflineBuffers.end(); it++) {
+            stream = (*it).stream;
+            if (NULL != stream) {
+                error = stream->unmapBuf((*it).type,
+                                         (*it).index,
+                                         -1);
+                if (NO_ERROR != error) {
+                    ALOGE("%s: Error during offline buffer unmap %d",
+                          __func__, error);
+                }
+            }
+        }
+        mOfflineBuffers.clear();
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocessOffline
+ *
+ * DESCRIPTION: request to do offline reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocessOffline(
+                mm_camera_super_buf_t *frame)
+{
+    int32_t rc = 0;
+    OfflineBuffer mappedBuffer;
+
+    if (mStreams.size() < 1) {
+        ALOGE("%s: No reprocess streams", __func__);
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        ALOGE("%s: No source channel for reprocess", __func__);
+        return -1;
+    }
+
+    if (frame == NULL) {
+        ALOGE("%s: Invalid source frame", __func__);
+        return BAD_VALUE;
+    }
+
+    // find meta data stream and index of meta data frame in the superbuf
+    mm_camera_buf_def_t *meta_buf = NULL;
+    QCameraStream *pStream = NULL;
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        pStream = m_pSrcChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                meta_buf = frame->bufs[i];
+                break;
+            }
+        }
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+        if ((pStream != NULL) &&
+                (m_handle == pStream->getChannelHandle())) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                continue;
+            }
+
+            uint32_t meta_buf_index = 0;
+            if (NULL != meta_buf) {
+                rc = pStream->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
+                                     meta_buf_index,
+                                     -1,
+                                     meta_buf->fd,
+                                     meta_buf->frame_len);
+                if (NO_ERROR != rc ) {
+                    ALOGE("%s : Error during metadata buffer mapping",
+                          __func__);
+                    break;
+                }
+            }
+            mappedBuffer.index = meta_buf_index;
+            mappedBuffer.stream = pStream;
+            mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
+            mOfflineBuffers.push_back(mappedBuffer);
+
+            uint32_t buf_index = 1;
+            rc = pStream->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                 buf_index,
+                                 -1,
+                                 frame->bufs[i]->fd,
+                                 frame->bufs[i]->frame_len);
+            if (NO_ERROR != rc ) {
+                ALOGE("%s : Error during reprocess input buffer mapping",
+                      __func__);
+                break;
+            }
+            mappedBuffer.index = buf_index;
+            mappedBuffer.stream = pStream;
+            mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
+            mOfflineBuffers.push_back(mappedBuffer);
+
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = buf_index;
+            param.reprocess.frame_pp_config.uv_upsample =
+                            frame->bufs[i]->is_uv_subsampled;
+            if (NULL != meta_buf) {
+                // we have meta data sent together with reprocess frame
+                param.reprocess.meta_present = 1;
+                param.reprocess.meta_buf_index = meta_buf_index;
+                uint32_t stream_id = frame->bufs[i]->stream_id;
+                QCameraStream *srcStream =
+                        m_pSrcChannel->getStreamByHandle(stream_id);
+                cam_metadata_info_t *meta =
+                        (cam_metadata_info_t *)meta_buf->buffer;
+                if ((NULL != meta) && (NULL != srcStream)) {
+
+                    for (int j = 0; j < MAX_NUM_STREAMS; j++) {
+                        if (meta->crop_data.crop_info[j].stream_id ==
+                                        srcStream->getMyServerID()) {
+                            param.reprocess.frame_pp_config.crop.crop_enabled = 1;
+                            param.reprocess.frame_pp_config.crop.input_crop =
+                                    meta->crop_data.crop_info[j].crop;
+                            break;
+                        }
+                    }
+                }
+            }
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: stream setParameter for reprocess failed",
+                      __func__);
+                break;
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(mm_camera_super_buf_t *frame)
+{
+    int32_t rc = 0;
+    if (mStreams.size() < 1) {
+        ALOGE("%s: No reprocess streams", __func__);
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        ALOGE("%s: No source channel for reprocess", __func__);
+        return -1;
+    }
+
+    // find meta data stream and index of meta data frame in the superbuf
+    QCameraStream *pMetaStream = NULL;
+    uint32_t meta_buf_index = 0;
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream = m_pSrcChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                meta_buf_index = frame->bufs[i]->buf_idx;
+                pMetaStream = pStream;
+                break;
+            }
+        }
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+        if ((pStream != NULL) &&
+                (m_handle == pStream->getChannelHandle())) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                // Skip metadata for reprocess now because PP module cannot handle meta data
+                // May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+                // Skip postview: In non zsl case, dont want to send
+                // thumbnail through reprocess.
+                // Skip preview: for same reason in ZSL case
+                continue;
+            }
+
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = frame->bufs[i]->buf_idx;
+            param.reprocess.frame_idx = frame->bufs[i]->frame_idx;
+            param.reprocess.frame_pp_config.uv_upsample = frame->bufs[i]->is_uv_subsampled;
+            if (pMetaStream != NULL) {
+                // we have meta data frame bundled, sent together with reprocess frame
+                param.reprocess.meta_present = 1;
+                param.reprocess.meta_stream_handle = pMetaStream->getMyServerID();
+                param.reprocess.meta_buf_index = meta_buf_index;
+            }
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: stream setParameter for reprocess failed", __func__);
+                break;
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @buf_fd     : fd to the input buffer that needs reprocess
+ *   @buf_lenght : length of the input buffer
+ *   @ret_val    : result of reprocess.
+ *                 Example: Could be faceID in case of register face image.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(int buf_fd,
+        size_t buf_length, int32_t &ret_val)
+{
+    int32_t rc = 0;
+    if (mStreams.size() < 1) {
+        ALOGE("%s: No reprocess streams", __func__);
+        return -1;
+    }
+
+    uint32_t buf_idx = 0;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) &&
+                (m_handle != mStreams[i]->getChannelHandle())) {
+            continue;
+        }
+        rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                 buf_idx, -1,
+                                 buf_fd, buf_length);
+
+        if (rc == NO_ERROR) {
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = buf_idx;
+            rc = mStreams[i]->setParameter(param);
+            if (rc == NO_ERROR) {
+                ret_val = param.reprocess.ret_val;
+            }
+            mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                  buf_idx, -1);
+        }
+    }
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraChannel.h b/msm8974/QCamera2/HAL/QCameraChannel.h
new file mode 100644
index 0000000..07fdc97
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraChannel.h
@@ -0,0 +1,158 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CHANNEL_H__
+#define __QCAMERA_CHANNEL_H__
+
+#include <hardware/camera.h>
+#include "QCameraStream.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCameraChannel
+{
+public:
+    QCameraChannel(uint32_t cam_handle,
+                   mm_camera_ops_t *cam_ops);
+    QCameraChannel();
+    virtual ~QCameraChannel();
+    virtual int32_t init(mm_camera_channel_attr_t *attr,
+                         mm_camera_buf_notify_t dataCB, // data CB for channel data
+                         void *userData);
+    // Owner of memory is transferred from the caller to the caller with this call.
+    virtual int32_t addStream(QCameraAllocator& allocator,
+                              QCameraHeapMemory *streamInfoBuf,
+                              uint8_t minStreamBufnum,
+                              cam_padding_info_t *paddingInfo,
+                              stream_cb_routine stream_cb,
+                              void *userdata,
+                              bool bDynAllocBuf,
+                              bool bDeffAlloc = false);
+    virtual int32_t linkStream(QCameraChannel *ch, QCameraStream *stream);
+
+    virtual int32_t start();
+    virtual int32_t stop();
+    virtual int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+    virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+                                    cam_crop_data_t &crop_info);
+    int32_t config();
+    QCameraStream *getStreamByHandle(uint32_t streamHandle);
+    uint32_t getMyHandle() const {return m_handle;};
+    uint32_t getNumOfStreams() const {
+        return (uint32_t) mStreams.size();
+    }
+    QCameraStream *getStreamByIndex(uint32_t index);
+    QCameraStream *getStreamByServerID(uint32_t serverID);
+    int32_t UpdateStreamBasedParameters(QCameraParameters &param);
+    void deleteChannel();
+
+protected:
+    uint32_t m_camHandle;
+    mm_camera_ops_t *m_camOps;
+    bool m_bIsActive;
+    bool m_bAllowDynBufAlloc; // if buf allocation can be in two steps
+
+    uint32_t m_handle;
+    Vector<QCameraStream *> mStreams;
+    mm_camera_buf_notify_t mDataCB;
+    void *mUserData;
+};
+
+// burst pic channel: i.e. zsl burst mode
+class QCameraPicChannel : public QCameraChannel
+{
+public:
+    QCameraPicChannel(uint32_t cam_handle,
+                      mm_camera_ops_t *cam_ops);
+    QCameraPicChannel();
+    virtual ~QCameraPicChannel();
+    int32_t takePicture(uint8_t num_of_snapshot);
+    int32_t cancelPicture();
+    int32_t startAdvancedCapture(mm_camera_advanced_capture_t type);
+    int32_t flushSuperbuffer(uint32_t frame_idx);
+};
+
+// video channel class
+class QCameraVideoChannel : public QCameraChannel
+{
+public:
+    QCameraVideoChannel(uint32_t cam_handle,
+                        mm_camera_ops_t *cam_ops);
+    QCameraVideoChannel();
+    virtual ~QCameraVideoChannel();
+    int32_t releaseFrame(const void *opaque, bool isMetaData);
+};
+
+// reprocess channel class
+class QCameraReprocessChannel : public QCameraChannel
+{
+public:
+    QCameraReprocessChannel(uint32_t cam_handle,
+                            mm_camera_ops_t *cam_ops);
+    QCameraReprocessChannel();
+    virtual ~QCameraReprocessChannel();
+    int32_t addReprocStreamsFromSource(QCameraAllocator& allocator,
+                                       cam_pp_feature_config_t &config,
+                                       QCameraChannel *pSrcChannel,
+                                       uint8_t minStreamBufNum,
+                                       uint8_t burstNum,
+                                       cam_padding_info_t *paddingInfo,
+                                       QCameraParameters &param,
+                                       bool contStream,
+                                       bool offline);
+    // online reprocess
+    int32_t doReprocess(mm_camera_super_buf_t *frame);
+    // offline reprocess
+    int32_t doReprocess(int buf_fd, size_t buf_length, int32_t &ret_val);
+    int32_t doReprocessOffline(mm_camera_super_buf_t *frame);
+    int32_t stop();
+    QCameraChannel *getSourceChannel() { return m_pSrcChannel; }
+
+private:
+    QCameraStream *getStreamBySrouceHandle(uint32_t srcHandle);
+
+    typedef struct {
+        QCameraStream *stream;
+        cam_mapping_buf_type type;
+        uint32_t index;
+    } OfflineBuffer;
+
+    uint32_t mSrcStreamHandles[MAX_STREAM_NUM_IN_BUNDLE];
+    QCameraChannel *m_pSrcChannel; // ptr to source channel for reprocess
+    android::List<OfflineBuffer> mOfflineBuffers;
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CHANNEL_H__ */
diff --git a/msm8974/QCamera2/HAL/QCameraMem.cpp b/msm8974/QCamera2/HAL/QCameraMem.cpp
new file mode 100644
index 0000000..f809991
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraMem.cpp
@@ -0,0 +1,2002 @@
+/* Copyright (c) 2012-2016, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#define MEMLOG_THRESH 102400
+#define LOG_TAG "QCameraHWI_Mem"
+
+#include <string.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <utils/Errors.h>
+#include <utils/Trace.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+#include "QCamera2HWI.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+
+// Media dependencies
+#ifdef USE_MEDIA_EXTENSIONS
+#include <media/hardware/HardwareAPI.h>
+typedef struct VideoNativeHandleMetadata media_metadata_buffer;
+#else
+#include "QComOMXMetadata.h"
+typedef struct encoder_media_buffer_type media_metadata_buffer;
+#endif
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// QCaemra2Memory base class
+
+/*===========================================================================
+ * FUNCTION   : QCameraMemory
+ *
+ * DESCRIPTION: default constructor of QCameraMemory
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemory::QCameraMemory(bool cached,
+        QCameraMemoryPool *pool,
+        cam_stream_type_t streamType)
+    :m_bCached(cached),
+     mMemoryPool(pool),
+     mStreamType(streamType)
+{
+    mBufferCount = 0;
+    memset(mMemInfo, 0, sizeof(mMemInfo));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemory::~QCameraMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOpsInternal
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *   @vaddr   : ptr to the virtual address
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr)
+{
+    if (!m_bCached) {
+        // Memory is not cached, no need for cache ops
+        CDBG("%s: No cache ops here for uncached memory", __func__);
+        return OK;
+    }
+
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = OK;
+
+    if (index >= mBufferCount) {
+        ALOGE("%s: index %d out of bound [0, %d)", __func__, index, mBufferCount);
+        return BAD_INDEX;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = vaddr;
+    cache_inv_data.fd = mMemInfo[index].fd;
+    cache_inv_data.handle = mMemInfo[index].handle;
+    cache_inv_data.length =
+            ( /* FIXME: Should remove this after ION interface changes */ unsigned int)
+            mMemInfo[index].size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    CDBG("%s: addr = %p, fd = %d, handle = %lx length = %d, ION Fd = %d",
+         __func__, cache_inv_data.vaddr, cache_inv_data.fd,
+         (unsigned long)cache_inv_data.handle, cache_inv_data.length,
+         mMemInfo[index].main_ion_fd);
+    ret = ioctl(mMemInfo[index].main_ion_fd, ION_IOC_CUSTOM, &custom_data);
+    if (ret < 0)
+        ALOGE("%s: Cache Invalidate failed: %s\n", __func__, strerror(errno));
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : file descriptor
+ *==========================================================================*/
+int QCameraMemory::getFd(uint32_t index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return mMemInfo[index].fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer size
+ *==========================================================================*/
+ssize_t QCameraMemory::getSize(uint32_t index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return (ssize_t)mMemInfo[index].size;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers allocated
+ *==========================================================================*/
+uint8_t QCameraMemory::getCnt() const
+{
+    return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @offset  : [input] frame buffer offset
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @index   : [input] index of the buffer
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::getBufDef(const cam_frame_len_offset_t &offset,
+        mm_camera_buf_def_t &bufDef, uint32_t index) const
+{
+    if (!mBufferCount) {
+        ALOGE("Memory not allocated");
+        return;
+    }
+    bufDef.fd = mMemInfo[index].fd;
+    bufDef.frame_len = mMemInfo[index].size;
+    bufDef.mem_info = (void *)this;
+    bufDef.num_planes = (int8_t)offset.num_planes;
+    bufDef.buffer = getPtr(index);
+    bufDef.buf_idx = index;
+
+    /* Plane 0 needs to be set separately. Set other planes in a loop */
+    bufDef.planes[0].length = offset.mp[0].len;
+    bufDef.planes[0].m.userptr = (long unsigned int)mMemInfo[index].fd;
+    bufDef.planes[0].data_offset = offset.mp[0].offset;
+    bufDef.planes[0].reserved[0] = 0;
+    for (int i = 1; i < bufDef.num_planes; i++) {
+         bufDef.planes[i].length = offset.mp[i].len;
+         bufDef.planes[i].m.userptr = (long unsigned int)mMemInfo[i].fd;
+         bufDef.planes[i].data_offset = offset.mp[i].offset;
+         bufDef.planes[i].reserved[0] =
+                 bufDef.planes[i-1].reserved[0] +
+                 bufDef.planes[i-1].length;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : traceLogAllocStart
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @size  : [input] alloc
+ *   @count  : [input] number of buffers
+ *   @allocName   : [input] name for the alloc
+ *
+ * RETURN     : none
+ *==========================================================================*/
+inline void QCameraMemory::traceLogAllocStart(size_t size, int count, const char *allocName)
+{
+    ALOGD("%s : alloc E count=%d size=%zu", __func__, count, size);
+#ifdef ATRACE_TAG_CAMERA
+    char atracer[30];
+    if ((size * (size_t)count) > MEMLOG_THRESH) {
+        snprintf(atracer,sizeof(atracer), "%s %zu",allocName, size);
+        ATRACE_BEGIN(atracer);
+        ALOGE("%s:%s", __func__, atracer);
+    } else {
+        ATRACE_CALL();
+    }
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : traceLogAllocEnd
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @size  : [input] alloc
+ *   @count  : [input] number of buffers
+ *
+ * RETURN     : none
+ *==========================================================================*/
+inline void QCameraMemory::traceLogAllocEnd(size_t size)
+{
+    ALOGD(" %s : X", __func__);
+#ifdef ATRACE_TAG_CAMERA
+    if (size > MEMLOG_THRESH) {
+        ATRACE_END();
+        ALOGE("%s %zu", __func__, size);
+    }
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : alloc
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *   @heap_id : heap id to indicate where the buffers will be allocated from
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::alloc(int count, size_t size, unsigned int heap_id)
+{
+    int rc = OK;
+
+    if (0 > count) {
+        ALOGE("%s: Negative count %d", __func__, count);
+        return BAD_INDEX;
+    }
+
+    int new_bufCnt = mBufferCount + count;
+    traceLogAllocStart(size, count, "Memsize");
+
+    if (new_bufCnt > MM_CAMERA_MAX_NUM_FRAMES) {
+        ALOGE("%s: Buffer count %d out of bound. Max is %d",
+              __func__, new_bufCnt, MM_CAMERA_MAX_NUM_FRAMES);
+        return BAD_INDEX;
+    }
+
+    for (int i = mBufferCount; i < new_bufCnt; i ++) {
+        if ( NULL == mMemoryPool ) {
+            ALOGE("%s : No memory pool available", __func__);
+            rc = allocOneBuffer(mMemInfo[i], heap_id, size, m_bCached);
+            if (rc < 0) {
+                ALOGE("%s: AllocateIonMemory failed", __func__);
+                for (int j = i-1; j >= 0; j--)
+                    deallocOneBuffer(mMemInfo[j]);
+                break;
+            }
+        } else {
+            rc = mMemoryPool->allocateBuffer(mMemInfo[i],
+                                             heap_id,
+                                             size,
+                                             m_bCached,
+                                             mStreamType);
+            if (rc < 0) {
+                ALOGE("%s: Memory pool allocation failed", __func__);
+                for (int j = i-1; j >= 0; j--)
+                    mMemoryPool->releaseBuffer(mMemInfo[j],
+                                               mStreamType);
+                break;
+            }
+        }
+
+    }
+    traceLogAllocEnd (size * (size_t)count);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dealloc
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::dealloc()
+{
+    for (int i = 0; i < mBufferCount; i++) {
+        if ( NULL == mMemoryPool ) {
+            deallocOneBuffer(mMemInfo[i]);
+        } else {
+            mMemoryPool->releaseBuffer(mMemInfo[i], mStreamType);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : allocOneBuffer
+ *
+ * DESCRIPTION: impl of allocating one buffers of certain size
+ *
+ * PARAMETERS :
+ *   @memInfo : [output] reference to struct to store additional memory allocation info
+ *   @heap    : [input] heap id to indicate where the buffers will be allocated from
+ *   @size    : [input] lenght of the buffer to be allocated
+ *   @cached  : [input] flag whether buffer needs to be cached
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::allocOneBuffer(QCameraMemInfo &memInfo,
+        unsigned int heap_id, size_t size, bool cached)
+{
+    int rc = OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data alloc;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = 0;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd < 0) {
+        ALOGE("Ion dev open failed: %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095U) & (~4095U);
+    alloc.align = 4096;
+    if (cached) {
+        alloc.flags = ION_FLAG_CACHED;
+    }
+    alloc.heap_mask = heap_id;
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        ALOGE("ION allocation failed: %s\n", strerror(errno));
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        ALOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    memInfo.main_ion_fd = main_ion_fd;
+    memInfo.fd = ion_info_fd.fd;
+    memInfo.handle = ion_info_fd.handle;
+    memInfo.size = alloc.len;
+    memInfo.cached = cached;
+    memInfo.heap_id = heap_id;
+
+    CDBG_HIGH("%s : ION buffer %lx with size %d allocated",
+            __func__, (unsigned long)memInfo.handle, memInfo.size);
+    return OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocOneBuffer
+ *
+ * DESCRIPTION: impl of deallocating one buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::deallocOneBuffer(QCameraMemInfo &memInfo)
+{
+    struct ion_handle_data handle_data;
+
+    if (memInfo.fd > 0) {
+        close(memInfo.fd);
+        memInfo.fd = 0;
+    }
+
+    if (memInfo.main_ion_fd > 0) {
+        memset(&handle_data, 0, sizeof(handle_data));
+        handle_data.handle = memInfo.handle;
+        ioctl(memInfo.main_ion_fd, ION_IOC_FREE, &handle_data);
+        close(memInfo.main_ion_fd);
+        memInfo.main_ion_fd = 0;
+    }
+    memInfo.handle = 0;
+    memInfo.size = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraMemoryPool
+ *
+ * DESCRIPTION: default constructor of QCameraMemoryPool
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemoryPool::QCameraMemoryPool()
+{
+    pthread_mutex_init(&mLock, NULL);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraMemoryPool
+ *
+ * DESCRIPTION: deconstructor of QCameraMemoryPool
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemoryPool::~QCameraMemoryPool()
+{
+    clear();
+    pthread_mutex_destroy(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseBuffer
+ *
+ * DESCRIPTION: release one cached buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *   @streamType: Type of stream the buffers belongs to
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemoryPool::releaseBuffer(
+        struct QCameraMemory::QCameraMemInfo &memInfo,
+        cam_stream_type_t streamType)
+{
+    pthread_mutex_lock(&mLock);
+
+    mPools[streamType].push_back(memInfo);
+
+    pthread_mutex_unlock(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : clear
+ *
+ * DESCRIPTION: clears all cached buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemoryPool::clear()
+{
+    pthread_mutex_lock(&mLock);
+
+    for (int i = CAM_STREAM_TYPE_DEFAULT; i < CAM_STREAM_TYPE_MAX; i++ ) {
+        List<struct QCameraMemory::QCameraMemInfo>::iterator it = mPools[i].begin();
+        for( ; it != mPools[i].end() ; it++) {
+            QCameraMemory::deallocOneBuffer(*it);
+        }
+
+        mPools[i].clear();
+    }
+
+    pthread_mutex_unlock(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : findBufferLocked
+ *
+ * DESCRIPTION: search for a appropriate cached buffer
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *   @heap_id : type of heap
+ *   @size    : size of the buffer
+ *   @cached  : whether the buffer should be cached
+ *   @streaType: type of stream this buffer belongs to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemoryPool::findBufferLocked(
+        struct QCameraMemory::QCameraMemInfo &memInfo, unsigned int heap_id,
+        size_t size, bool cached, cam_stream_type_t streamType)
+{
+    int rc = NAME_NOT_FOUND;
+
+    if (mPools[streamType].empty()) {
+        return NAME_NOT_FOUND;
+    }
+
+    List<struct QCameraMemory::QCameraMemInfo>::iterator it = mPools[streamType].begin();
+    for( ; it != mPools[streamType].end() ; it++) {
+        if( ((*it).size >= size) &&
+            ((*it).heap_id == heap_id) &&
+            ((*it).cached == cached) ) {
+            memInfo = *it;
+            ALOGE("%s : Found buffer %lx size %d",
+                    __func__, (unsigned long)memInfo.handle, memInfo.size);
+            mPools[streamType].erase(it);
+            rc = NO_ERROR;
+            break;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateBuffer
+ *
+ * DESCRIPTION: allocates a buffer from the memory pool,
+ *              it will re-use cached buffers if possible
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *   @heap_id : type of heap
+ *   @size    : size of the buffer
+ *   @cached  : whether the buffer should be cached
+ *   @streaType: type of stream this buffer belongs to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemoryPool::allocateBuffer(
+        struct QCameraMemory::QCameraMemInfo &memInfo, unsigned int heap_id,
+        size_t size, bool cached, cam_stream_type_t streamType)
+{
+    int rc = NO_ERROR;
+
+    pthread_mutex_lock(&mLock);
+
+    rc = findBufferLocked(memInfo, heap_id, size, cached, streamType);
+    if (NAME_NOT_FOUND == rc ) {
+        ALOGE("%s : Buffer not found!", __func__);
+        rc = QCameraMemory::allocOneBuffer(memInfo, heap_id, size, cached);
+    }
+
+    pthread_mutex_unlock(&mLock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraHeapMemory
+ *
+ * DESCRIPTION: constructor of QCameraHeapMemory for ion memory used internally in HAL
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraHeapMemory::QCameraHeapMemory(bool cached)
+    : QCameraMemory(cached)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mPtr[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraHeapMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraHeapMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraHeapMemory::~QCameraHeapMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraHeapMemory::getPtr(uint32_t index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocate(uint8_t count, size_t size)
+{
+    traceLogAllocStart(size, count, "HeapMemsize");
+    unsigned int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        void *vaddr = mmap(NULL,
+                    mMemInfo[i].size,
+                    PROT_READ | PROT_WRITE,
+                    MAP_SHARED,
+                    mMemInfo[i].fd, 0);
+        if (vaddr == MAP_FAILED) {
+            for (int j = i-1; j >= 0; j --) {
+                munmap(mPtr[j], mMemInfo[j].size);
+                mPtr[j] = NULL;
+                deallocOneBuffer(mMemInfo[j]);
+            }
+            return NO_MEMORY;
+        } else
+            mPtr[i] = vaddr;
+    }
+    if (rc == 0)
+        mBufferCount = count;
+    traceLogAllocEnd((size * count));
+    return OK;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocateMore(uint8_t count, size_t size)
+{
+    traceLogAllocStart(size, count, "HeapMemsize");
+    unsigned int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = mBufferCount; i < count + mBufferCount; i ++) {
+        void *vaddr = mmap(NULL,
+                    mMemInfo[i].size,
+                    PROT_READ | PROT_WRITE,
+                    MAP_SHARED,
+                    mMemInfo[i].fd, 0);
+        if (vaddr == MAP_FAILED) {
+            for (int j = i-1; j >= mBufferCount; j --) {
+                munmap(mPtr[j], mMemInfo[j].size);
+                mPtr[j] = NULL;
+                deallocOneBuffer(mMemInfo[j]);
+            }
+            return NO_MEMORY;
+        } else {
+            mPtr[i] = vaddr;
+        }
+    }
+    mBufferCount = (uint8_t)(mBufferCount + count);
+    traceLogAllocEnd((size * count));
+    return OK;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraHeapMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i++) {
+        munmap(mPtr[i], mMemInfo[i].size);
+        mPtr[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::getRegFlags(uint8_t * /*regFlags*/) const
+{
+    return INVALID_OPERATION;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraHeapMemory::getMemory(uint32_t /*index*/, bool /*metadata*/) const
+{
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraHeapMemory::getMatchBufIndex(const void *opaque,
+                                        bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mPtr[i] == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStreamMemory
+ *
+ * DESCRIPTION: constructor of QCameraStreamMemory
+ *              ION memory allocated directly from /dev/ion and shared with framework
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *   @cached    : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStreamMemory::QCameraStreamMemory(camera_request_memory getMemory,
+        void* cbCookie,
+        bool cached,
+        QCameraMemoryPool *pool,
+        cam_stream_type_t streamType)
+    :QCameraMemory(cached, pool, streamType),
+     mGetMemory(getMemory),
+     mCallbackCookie(cbCookie)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mCameraMemory[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStreamMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraStreamMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStreamMemory::~QCameraStreamMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocate(uint8_t count, size_t size)
+{
+    traceLogAllocStart(size, count, "StreamMemsize");
+    unsigned int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, mCallbackCookie);
+    }
+    mBufferCount = count;
+    traceLogAllocEnd((size * count));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocateMore(uint8_t count, size_t size)
+{
+    traceLogAllocStart(size, count, "StreamMemsize");
+    unsigned int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = mBufferCount; i < mBufferCount + count; i++) {
+        mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, mCallbackCookie);
+    }
+    mBufferCount = (uint8_t)(mBufferCount + count);
+    traceLogAllocEnd((size * count));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStreamMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i ++) {
+        mCameraMemory[i]->release(mCameraMemory[i]);
+        mCameraMemory[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::getRegFlags(uint8_t *regFlags) const
+{
+    for (int i = 0; i < mBufferCount; i ++)
+        regFlags[i] = 1;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraStreamMemory::getMemory(uint32_t index,
+        bool metadata) const
+{
+    if (index >= mBufferCount || metadata)
+        return NULL;
+    return mCameraMemory[index];
+}
+
+#ifdef USE_MEDIA_EXTENSIONS
+/*===========================================================================
+* FUNCTION   : getNativeHandle
+
+* DESCRIPTION: Get native handle pointer
+*
+* PARAMETERS :
+*   @index   : buffer index
+*   @metadata: flag if it's metadata
+*
+* RETURN     : camera native handle ptr
+*              NULL if not supported or failed
+*==========================================================================*/
+native_handle_t *QCameraVideoMemory::getNativeHandle(uint32_t index, bool metadata)
+{
+    if (index >= mMetaBufCount || (!metadata && index >= mBufferCount)) {
+        return NULL;
+    }
+    return mNativeHandle[index];
+}
+
+/*===========================================================================
+* FUNCTION   : closeNativeHandle
+*
+* DESCRIPTION: close video native handle
+*
+* PARAMETERS :
+*   @opaque  : ptr to video frame to be returned
+*
+* RETURN     : int32_t type of status
+*              NO_ERROR  -- success
+*              none-zero failure code
+*==========================================================================*/
+int QCameraVideoMemory::closeNativeHandle(const void *data, bool metadata)
+{
+    int32_t rc = NO_ERROR;
+
+    if (metadata) {
+        const media_metadata_buffer * packet = (const media_metadata_buffer*)data;
+        if (packet != NULL && packet->eType ==
+            kMetadataBufferTypeNativeHandleSource) {
+            for (int i = 0; i < mMetaBufCount; i++) {
+                if(mMetadata[i]->data == data) {
+                    media_metadata_buffer *mem =
+                      (media_metadata_buffer *)mMetadata[i]->data;
+                    native_handle_close(mem->pHandle);
+                    native_handle_delete(mem->pHandle);
+                    mem->pHandle = NULL;
+                    break;
+                }
+            }
+        } else {
+            ALOGE("Invalid Data. Could not release");
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGW("Warning: Not of type video meta buffer");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeNativeHandle
+ *
+ * DESCRIPTION: static function to close video native handle.
+ *
+ * PARAMETERS :
+ *   @data  : ptr to video frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::closeNativeHandle(const void *data)
+{
+    int32_t rc = NO_ERROR;
+
+    const media_metadata_buffer *packet =
+            (const media_metadata_buffer *)data;
+    if ((packet != NULL) && (packet->eType ==
+            kMetadataBufferTypeNativeHandleSource)
+            && (packet->pHandle)) {
+        native_handle_close(packet->pHandle);
+        native_handle_delete(packet->pHandle);
+    } else {
+       ALOGE("Invalid Data. Could not release");
+        return BAD_VALUE;
+    }
+   return rc;
+}
+#endif
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraStreamMemory::getMatchBufIndex(const void *opaque,
+                                          bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mCameraMemory[i]->data == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraStreamMemory::getPtr(uint32_t index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mCameraMemory[index]->data;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoMemory
+ *
+ * DESCRIPTION: constructor of QCameraVideoMemory
+ *              VideoStream buffers also include metadata buffers
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *   @cached    : flag indicates if using cached ION memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoMemory::QCameraVideoMemory(camera_request_memory getMemory,
+                                       void* cbCookie,
+                                       bool cached)
+    : QCameraStreamMemory(getMemory, cbCookie, cached)
+{
+    memset(mMetadata, 0, sizeof(mMetadata));
+    memset(mNativeHandle, 0, sizeof(mNativeHandle));
+    mMetaBufCount = 0;
+    //Set Default color conversion format
+    mUsage |= private_handle_t::PRIV_FLAGS_ITU_R_601_FR;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraVideoMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraVideoMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoMemory::~QCameraVideoMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocate(uint8_t count, size_t size)
+{
+    traceLogAllocStart(size, count, "VideoMemsize");
+    int rc = QCameraStreamMemory::allocate(count, size);
+    native_handle_t *nh =  NULL;
+    
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        mMetadata[i] = mGetMemory(-1,
+                sizeof(media_metadata_buffer), 1, mCallbackCookie);
+        if (!mMetadata[i]) {
+            ALOGE("allocation of video metadata failed.");
+            for (int j = 0; j <= i-1; j ++)
+                mMetadata[j]->release(mMetadata[j]);
+            QCameraStreamMemory::deallocate();
+            return NO_MEMORY;
+        }
+        media_metadata_buffer * packet =
+                (media_metadata_buffer *)mMetadata[i]->data;
+#ifdef USE_MEDIA_EXTENSIONS
+        if (!mNativeHandle[i]) {
+            mNativeHandle[i] = native_handle_create(1, VIDEO_METADATA_NUM_INTS+
+                                   VIDEO_METADATA_NUM_COMMON_INTS);
+            if (mNativeHandle[i] == NULL) {
+                ALOGE("Error in creating video native handle");
+                for (int j = (i - 1); j >= 0; j--) {
+                    mMetadata[i]->release(mMetadata[i]);
+                    if (NULL != mNativeHandle[j]) {
+                        native_handle_delete(mNativeHandle[j]);
+                    }
+                    mMetadata[j]->release(mMetadata[j]);
+                }
+               return NO_MEMORY;
+            }
+        }
+        //assign buffer index to native handle.
+        nh =  mNativeHandle[i];
+        nh->data[1 + VIDEO_METADATA_NUM_INTS] = i;
+
+        packet->eType = kMetadataBufferTypeNativeHandleSource;
+        packet->pHandle = NULL;
+#else
+        nh = mNativeHandle[i];
+        packet->meta_handle = mNativeHandle[i];
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+#endif
+
+
+        nh->data[0] = mMemInfo[i].fd;
+        nh->data[1] = 0;
+        nh->data[2] = (int)mMemInfo[i].size;
+        //value for usage
+        nh->data[3] = mUsage | private_handle_t::PRIV_FLAGS_ITU_R_601_FR;
+        nh->data[4] = 0; //dummy value for timestamp in non-batch mode
+        nh->data[5] = mFormat;
+        nh->data[6] = i;//buffer index
+    }
+    mBufferCount = count;
+    mMetaBufCount = count;
+    traceLogAllocEnd((size * count));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocateMore(uint8_t count, size_t size)
+{
+    traceLogAllocStart(size, count, "VideoMemsize");
+    int rc = QCameraStreamMemory::allocateMore(count, size);
+    if (rc < 0)
+        return rc;
+
+    for (int i = mBufferCount; i < count + mBufferCount; i ++) {
+        mMetadata[i] = mGetMemory(-1,
+                sizeof(media_metadata_buffer), 1, mCallbackCookie);
+        if (!mMetadata[i]) {
+            ALOGE("allocation of video metadata failed.");
+            for (int j = mBufferCount; j <= i-1; j ++) {
+                mMetadata[j]->release(mMetadata[j]);
+                mCameraMemory[j]->release(mCameraMemory[j]);
+                mCameraMemory[j] = NULL;
+                deallocOneBuffer(mMemInfo[j]);;
+            }
+            return NO_MEMORY;
+        }
+        media_metadata_buffer * packet =
+            (media_metadata_buffer *)mMetadata[i]->data;
+
+        native_handle_t * nh = NULL;
+#ifdef USE_MEDIA_EXTENSIONS
+        mNativeHandle[i] = native_handle_create(1, VIDEO_METADATA_NUM_INTS+
+                                              VIDEO_METADATA_NUM_COMMON_INTS);
+        packet->eType = kMetadataBufferTypeNativeHandleSource;
+        packet->pHandle = mNativeHandle[i];
+        nh = mNativeHandle[i];
+#else
+        mNativeHandle[i] = native_handle_create(1, VIDEO_METADATA_NUM_INTS);
+        packet->meta_handle = mNativeHandle[i];
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+        nh = const_cast<native_handle_t *>(packet->meta_handle);
+#endif
+
+        nh->data[0] = mMemInfo[i].fd;
+        nh->data[1] = 0;
+        nh->data[2] = (int)mMemInfo[i].size;
+        //value for usage
+        nh->data[3] = mUsage | private_handle_t::PRIV_FLAGS_ITU_R_601_FR;
+        nh->data[4] = 0; //dummy value for timestamp in non-batch mode
+        nh->data[5] = mFormat;
+        nh->data[6] = i;//buffer index
+    }
+    mBufferCount = (uint8_t)(mBufferCount + count);
+    mMetaBufCount = mBufferCount;
+    traceLogAllocEnd((size * count));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraVideoMemory::deallocate()
+{
+    for (int i = 0; i < mMetaBufCount; i ++) {
+        native_handle_t *nh = mNativeHandle[i];
+        if (NULL != nh) {
+            if (native_handle_delete(nh)) {
+                ALOGE("Unable to delete native handle");
+            }
+        } else {
+            ALOGE("native handle not available");
+        }
+        mNativeHandle[i] = NULL;
+        mMetadata[i]->release(mMetadata[i]);
+        mMetadata[i] = NULL;
+    }
+    QCameraStreamMemory::deallocate();
+    mBufferCount = 0;
+    mMetaBufCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraVideoMemory::getMemory(uint32_t index,
+        bool metadata) const
+{
+    if (index >= mBufferCount || (!metadata && index >= mBufferCount))
+        return NULL;
+    if (metadata) {
+#ifdef USE_MEDIA_EXTENSIONS
+      int i;
+        media_metadata_buffer *packet = NULL;
+        for (i = 0; i < mMetaBufCount; i++) {
+            packet = (media_metadata_buffer *)mMetadata[i]->data;
+            if (packet != NULL && packet->pHandle == NULL) {
+                packet->pHandle = mNativeHandle[index];
+                break;
+            }
+        }
+        if (i < mMetaBufCount) {
+            return mMetadata[i];
+        } else {
+            CDBG_HIGH("No free video meta memory");
+            return NULL;
+        }
+#else
+        return mMetadata[index];
+#endif
+    } else {
+        return mCameraMemory[index];
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraVideoMemory::getMatchBufIndex(const void *opaque,
+                                         bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+#ifdef USE_MEDIA_EXTENSIONS
+        const media_metadata_buffer *packet =
+            (const media_metadata_buffer *)opaque;
+        native_handle_t *nh = NULL;
+        if ((packet != NULL) && (packet->eType ==
+                kMetadataBufferTypeNativeHandleSource)
+                && (packet->pHandle)) {
+            nh = (native_handle_t *)packet->pHandle;
+            int mCommonIdx = (nh->numInts + nh->numFds -
+                                VIDEO_METADATA_NUM_COMMON_INTS);
+            for (int i = 0; i < mMetaBufCount; i++) {
+                if(nh->data[mCommonIdx] == mNativeHandle[i]->data[mCommonIdx]) {
+                    index = i;
+                    break;
+                }
+            }
+        }
+#else
+        for (int i = 0; i < mMetaBufCount; i++) {
+            if (mMetadata[i]->data == opaque) {
+                index = i;
+                break;
+            }
+        }
+#endif
+    } else {
+        for (int i = 0; i < mBufferCount; i++) {
+            if (mCameraMemory[i]->data == opaque) {
+                index = i;
+                break;
+            }
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ *   @usage : usage bit for video
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraVideoMemory::setVideoInfo(int usage, cam_format_t format)
+{
+    mUsage = usage;
+    mFormat = convCamtoOMXFormat(format);
+}
+
+/*===========================================================================
+ * FUNCTION   : convCamtoOMXFormat
+ *
+ * DESCRIPTION: map cam_format_t to corresponding OMX format
+ *
+ * PARAMETERS :
+ *   @format : format in cam_format_t type
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int QCameraVideoMemory::convCamtoOMXFormat(cam_format_t format)
+{
+    //OMX format is purely based on YUV pattern. For UBWC
+    //or any other format change, hint to be provided in the
+    //usage flags of native_handle_t.
+    int omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+    switch (format) {
+        case CAM_FORMAT_YUV_420_NV21:
+        case CAM_FORMAT_YUV_420_NV21_ADRENO:
+            omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+            break;
+        case CAM_FORMAT_YUV_420_NV12:
+        case CAM_FORMAT_YUV_420_NV12_VENUS:
+            omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+            break;
+        default:
+            omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+    }
+    return omxFormat;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : QCameraGrallocMemory
+ *
+ * DESCRIPTION: constructor of QCameraGrallocMemory
+ *              preview stream buffers are allocated from gralloc native_windoe
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraGrallocMemory::QCameraGrallocMemory(camera_request_memory getMemory, void* cbCookie)
+        : QCameraMemory(true)
+{
+    mMinUndequeuedBuffers = 0;
+    mWindow = NULL;
+    mWidth = mHeight = mStride = mScanline = 0;
+    mFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+    mGetMemory = getMemory;
+    mCallbackCookie = cbCookie;
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
+        mBufferHandle[i] = NULL;
+        mLocalFlag[i] = BUFFER_NOT_OWNED;
+        mPrivateHandle[i] = NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraGrallocMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraGrallocMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraGrallocMemory::~QCameraGrallocMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : setWindowInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ *   @window  : gralloc ops table ptr
+ *   @width   : width of preview frame
+ *   @height  : height of preview frame
+ *   @stride  : stride of preview frame
+ *   @scanline: scanline of preview frame
+ *   @foramt  : format of preview image
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setWindowInfo(preview_stream_ops_t *window,
+        int width, int height, int stride, int scanline, int format)
+{
+    mWindow = window;
+    mWidth = width;
+    mHeight = height;
+    mStride = stride;
+    mScanline = scanline;
+    mFormat = format;
+}
+
+/*===========================================================================
+ * FUNCTION   : displayBuffer
+ *
+ * DESCRIPTION: send received frame to display
+ *
+ * PARAMETERS :
+ *   @index   : index of preview frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::displayBuffer(uint32_t index)
+{
+    int err = NO_ERROR;
+    int dequeuedIdx = BAD_INDEX;
+
+    if (BUFFER_NOT_OWNED == mLocalFlag[index]) {
+        ALOGE("%s: buffer to be enqueued is not owned", __func__);
+        return INVALID_OPERATION;
+    }
+
+    err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
+    if(err != 0) {
+        ALOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+    } else {
+        CDBG("%s: enqueue_buffer hdl=%p", __func__, *mBufferHandle[index]);
+        mLocalFlag[index] = BUFFER_NOT_OWNED;
+    }
+
+    buffer_handle_t *buffer_handle = NULL;
+    int stride = 0;
+    err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
+    if (err == NO_ERROR && buffer_handle != NULL) {
+        int i;
+        CDBG("%s: dequed buf hdl =%p", __func__, *buffer_handle);
+        for(i = 0; i < mBufferCount; i++) {
+            if(mBufferHandle[i] == buffer_handle) {
+                CDBG("%s: Found buffer in idx:%d", __func__, i);
+                mLocalFlag[i] = BUFFER_OWNED;
+                dequeuedIdx = i;
+                break;
+            }
+        }
+    } else {
+        CDBG_HIGH("%s: dequeue_buffer, no free buffer from display now", __func__);
+    }
+    return dequeuedIdx;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocate(uint8_t count, size_t /*size*/)
+{
+    traceLogAllocStart(0,count, "Grallocbufcnt");
+    int err = 0;
+    status_t ret = NO_ERROR;
+    int gralloc_usage = 0;
+    struct ion_fd_data ion_info_fd;
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+
+    CDBG_HIGH(" %s : E ", __func__);
+
+    if (!mWindow) {
+        ALOGE("Invalid native window");
+        return INVALID_OPERATION;
+    }
+
+    // Increment buffer count by min undequeued buffer.
+    err = mWindow->get_min_undequeued_buffer_count(mWindow,&mMinUndequeuedBuffers);
+    if (err != 0) {
+        ALOGE("get_min_undequeued_buffer_count  failed: %s (%d)",
+                strerror(-err), -err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+
+    err = mWindow->set_buffer_count(mWindow, count);
+    if (err != 0) {
+         ALOGE("set_buffer_count failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    err = mWindow->set_buffers_geometry(mWindow, mStride, mScanline, mFormat);
+    if (err != 0) {
+         ALOGE("%s: set_buffers_geometry failed: %s (%d)",
+               __func__, strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    err = mWindow->set_crop(mWindow, 0, 0, mWidth, mHeight);
+    if (err != 0) {
+         ALOGE("%s: set_crop failed: %s (%d)",
+               __func__, strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    gralloc_usage = GRALLOC_USAGE_HW_CAMERA_WRITE | GRALLOC_USAGE_PRIVATE_IOMMU_HEAP;
+    err = mWindow->set_usage(mWindow, gralloc_usage);
+    if(err != 0) {
+        /* set_usage error out */
+        ALOGE("%s: set_usage rc = %d", __func__, err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+    CDBG_HIGH("%s: usage = %d, geometry: %p, %d, %d, %d, %d, %d",
+          __func__, gralloc_usage, mWindow, mWidth, mHeight, mStride,
+          mScanline, mFormat);
+
+    //Allocate cnt number of buffers from native window
+    for (int cnt = 0; cnt < count; cnt++) {
+        int stride;
+        err = mWindow->dequeue_buffer(mWindow, &mBufferHandle[cnt], &stride);
+        if(!err) {
+            CDBG("dequeue buf hdl =%p", mBufferHandle[cnt]);
+            mLocalFlag[cnt] = BUFFER_OWNED;
+        } else {
+            mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+            ALOGE("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+        }
+
+        CDBG("%s: dequeue buf: %p\n", __func__, mBufferHandle[cnt]);
+
+        if(err != 0) {
+            ALOGE("%s: dequeue_buffer failed: %s (%d)",
+                  __func__, strerror(-err), -err);
+            ret = UNKNOWN_ERROR;
+            for(int i = 0; i < cnt; i++) {
+                struct ion_handle_data ion_handle;
+                memset(&ion_handle, 0, sizeof(ion_handle));
+                ion_handle.handle = mMemInfo[i].handle;
+                if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                    ALOGE("ion free failed");
+                }
+                if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                    err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                    CDBG_HIGH("%s: cancel_buffer: hdl =%p", __func__, (*mBufferHandle[i]));
+                }
+                mLocalFlag[i] = BUFFER_NOT_OWNED;
+                mBufferHandle[i] = NULL;
+            }
+            memset(&mMemInfo, 0, sizeof(mMemInfo));
+            goto end;
+        }
+
+        mPrivateHandle[cnt] =
+            (struct private_handle_t *)(*mBufferHandle[cnt]);
+        mMemInfo[cnt].main_ion_fd = open("/dev/ion", O_RDONLY);
+        if (mMemInfo[cnt].main_ion_fd < 0) {
+            ALOGE("%s: failed: could not open ion device", __func__);
+            for(int i = 0; i < cnt; i++) {
+                struct ion_handle_data ion_handle;
+                memset(&ion_handle, 0, sizeof(ion_handle));
+                ion_handle.handle = mMemInfo[i].handle;
+                if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                    ALOGE("%s: ion free failed", __func__);
+                }
+                close(mMemInfo[i].main_ion_fd);
+                if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                    err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                    CDBG_HIGH("%s: cancel_buffer: hdl =%p", __func__, (*mBufferHandle[i]));
+                }
+                mLocalFlag[i] = BUFFER_NOT_OWNED;
+                mBufferHandle[i] = NULL;
+            }
+            memset(&mMemInfo, 0, sizeof(mMemInfo));
+            ret = UNKNOWN_ERROR;
+            goto end;
+        } else {
+            ion_info_fd.fd = mPrivateHandle[cnt]->fd;
+            if (ioctl(mMemInfo[cnt].main_ion_fd,
+                      ION_IOC_IMPORT, &ion_info_fd) < 0) {
+                ALOGE("%s: ION import failed\n", __func__);
+                for(int i = 0; i < cnt; i++) {
+                    struct ion_handle_data ion_handle;
+                    memset(&ion_handle, 0, sizeof(ion_handle));
+                    ion_handle.handle = mMemInfo[i].handle;
+                    if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                        ALOGE("ion free failed");
+                    }
+                    close(mMemInfo[i].main_ion_fd);
+
+                    if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                        err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                        CDBG_HIGH("%s: cancel_buffer: hdl =%p", __func__, (*mBufferHandle[i]));
+                    }
+                    mLocalFlag[i] = BUFFER_NOT_OWNED;
+                    mBufferHandle[i] = NULL;
+                }
+                close(mMemInfo[cnt].main_ion_fd);
+                memset(&mMemInfo, 0, sizeof(mMemInfo));
+                ret = UNKNOWN_ERROR;
+                goto end;
+            }
+        }
+        mCameraMemory[cnt] =
+            mGetMemory(mPrivateHandle[cnt]->fd,
+                    (size_t)mPrivateHandle[cnt]->size,
+                    1,
+                    mCallbackCookie);
+        CDBG("%s: idx = %d, fd = %d, size = %d, offset = %d",
+              __func__, cnt, mPrivateHandle[cnt]->fd,
+              mPrivateHandle[cnt]->size,
+              mPrivateHandle[cnt]->offset);
+        mMemInfo[cnt].fd = mPrivateHandle[cnt]->fd;
+        mMemInfo[cnt].size = (size_t)mPrivateHandle[cnt]->size;
+        mMemInfo[cnt].handle = ion_info_fd.handle;
+    }
+    mBufferCount = count;
+
+    //Cancel min_undequeued_buffer buffers back to the window
+    for (int i = 0; i < mMinUndequeuedBuffers; i ++) {
+        err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+        mLocalFlag[i] = BUFFER_NOT_OWNED;
+    }
+
+end:
+    CDBG_HIGH(" %s : X ",__func__);
+    traceLogAllocEnd(count);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocateMore(uint8_t /*count*/, size_t /*size*/)
+{
+    ALOGE("%s: Not implenmented yet", __func__);
+    return UNKNOWN_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::deallocate()
+{
+    CDBG("%s: E ", __FUNCTION__);
+
+    for (int cnt = 0; cnt < mBufferCount; cnt++) {
+        mCameraMemory[cnt]->release(mCameraMemory[cnt]);
+        struct ion_handle_data ion_handle;
+        memset(&ion_handle, 0, sizeof(ion_handle));
+        ion_handle.handle = mMemInfo[cnt].handle;
+        if (ioctl(mMemInfo[cnt].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+            ALOGE("ion free failed");
+        }
+        close(mMemInfo[cnt].main_ion_fd);
+        if(mLocalFlag[cnt] != BUFFER_NOT_OWNED) {
+            if (mWindow) {
+                mWindow->cancel_buffer(mWindow, mBufferHandle[cnt]);
+                CDBG_HIGH("cancel_buffer: hdl =%p", (*mBufferHandle[cnt]));
+            } else {
+                ALOGE("Preview window is NULL, cannot cancel_buffer: hdl =%p",
+                      (*mBufferHandle[cnt]));
+            }
+        }
+        mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+        CDBG_HIGH("put buffer %d successfully", cnt);
+    }
+    mBufferCount = 0;
+    CDBG(" %s : X ",__FUNCTION__);
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::getRegFlags(uint8_t *regFlags) const
+{
+    int i = 0;
+    for (i = 0; i < mMinUndequeuedBuffers; i ++)
+        regFlags[i] = 0;
+    for (; i < mBufferCount; i ++)
+        regFlags[i] = 1;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraGrallocMemory::getMemory(uint32_t index,
+        bool metadata) const
+{
+    if (index >= mBufferCount || metadata)
+        return NULL;
+    return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraGrallocMemory::getMatchBufIndex(const void *opaque,
+                                           bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mCameraMemory[i]->data == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraGrallocMemory::getPtr(uint32_t index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mCameraMemory[index]->data;
+}
+
+}; //namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraMem.h b/msm8974/QCamera2/HAL/QCameraMem.h
new file mode 100644
index 0000000..98d85b1
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraMem.h
@@ -0,0 +1,265 @@
+/* Copyright (c) 2012-2016, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HWI_MEM_H__
+#define __QCAMERA2HWI_MEM_H__
+
+#include <hardware/camera.h>
+#include <utils/Mutex.h>
+#include <utils/List.h>
+
+extern "C" {
+#include <sys/types.h>
+#include <linux/msm_ion.h>
+#include <mm_camera_interface.h>
+}
+
+//OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT, BUFFER INDEX
+#define VIDEO_METADATA_NUM_INTS 6
+
+#ifdef USE_MEDIA_EXTENSIONS
+#ifndef VIDEO_METADATA_NUM_COMMON_INTS
+#define VIDEO_METADATA_NUM_COMMON_INTS 1
+#endif
+#endif
+
+namespace qcamera {
+
+class QCameraMemoryPool;
+
+// Base class for all memory types. Abstract.
+class QCameraMemory {
+
+public:
+    int cleanCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_CLEAN_CACHES);
+    }
+    int invalidateCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_INV_CACHES);
+    }
+    int cleanInvalidateCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_CLEAN_INV_CACHES);
+    }
+    int getFd(uint32_t index) const;
+    ssize_t getSize(uint32_t index) const;
+    uint8_t getCnt() const;
+
+    virtual int allocate(uint8_t count, size_t size) = 0;
+    virtual void deallocate() = 0;
+    virtual int allocateMore(uint8_t count, size_t size) = 0;
+    virtual int cacheOps(uint32_t index, unsigned int cmd) = 0;
+    virtual int getRegFlags(uint8_t *regFlags) const = 0;
+    virtual camera_memory_t *getMemory(uint32_t index,
+            bool metadata) const = 0;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const = 0;
+    virtual void *getPtr(uint32_t index) const= 0;
+
+    QCameraMemory(bool cached,
+                  QCameraMemoryPool *pool = NULL,
+                  cam_stream_type_t streamType = CAM_STREAM_TYPE_DEFAULT);
+    virtual ~QCameraMemory();
+
+    void getBufDef(const cam_frame_len_offset_t &offset,
+            mm_camera_buf_def_t &bufDef, uint32_t index) const;
+
+    void traceLogAllocStart(size_t size, int count, const char *allocName);
+    void traceLogAllocEnd(size_t size);
+
+protected:
+
+    friend class QCameraMemoryPool;
+
+    struct QCameraMemInfo {
+        int fd;
+        int main_ion_fd;
+        ion_user_handle_t handle;
+        size_t size;
+        bool cached;
+        unsigned int heap_id;
+    };
+
+    int alloc(int count, size_t size, unsigned int heap_id);
+    void dealloc();
+    static int allocOneBuffer(struct QCameraMemInfo &memInfo,
+            unsigned int heap_id, size_t size, bool cached);
+    static void deallocOneBuffer(struct QCameraMemInfo &memInfo);
+    int cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr);
+
+    bool m_bCached;
+    uint8_t mBufferCount;
+    struct QCameraMemInfo mMemInfo[MM_CAMERA_MAX_NUM_FRAMES];
+    QCameraMemoryPool *mMemoryPool;
+    cam_stream_type_t mStreamType;
+};
+
+class QCameraMemoryPool {
+
+public:
+
+    QCameraMemoryPool();
+    virtual ~QCameraMemoryPool();
+
+    int allocateBuffer(struct QCameraMemory::QCameraMemInfo &memInfo,
+            unsigned int heap_id, size_t size, bool cached,
+            cam_stream_type_t streamType);
+    void releaseBuffer(struct QCameraMemory::QCameraMemInfo &memInfo,
+            cam_stream_type_t streamType);
+    void clear();
+
+protected:
+
+    int findBufferLocked(struct QCameraMemory::QCameraMemInfo &memInfo,
+            unsigned int heap_id, size_t size, bool cached,
+            cam_stream_type_t streamType);
+
+    android::List<QCameraMemory::QCameraMemInfo> mPools[CAM_STREAM_TYPE_MAX];
+    pthread_mutex_t mLock;
+};
+
+// Internal heap memory is used for memories used internally
+// They are allocated from /dev/ion.
+class QCameraHeapMemory : public QCameraMemory {
+public:
+    QCameraHeapMemory(bool cached);
+    virtual ~QCameraHeapMemory();
+
+    virtual int allocate(uint8_t count, size_t size);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    virtual void *getPtr(uint32_t index) const;
+
+private:
+    void *mPtr[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraStreamMemory : public QCameraMemory {
+public:
+    QCameraStreamMemory(camera_request_memory getMemory,
+                        void* cbCookie,
+                        bool cached,
+                        QCameraMemoryPool *pool = NULL,
+                        cam_stream_type_t streamType = CAM_STREAM_TYPE_DEFAULT);
+    virtual ~QCameraStreamMemory();
+
+    virtual int allocate(uint8_t count, size_t size);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    virtual void *getPtr(uint32_t index) const;
+
+protected:
+    camera_request_memory mGetMemory;
+    void* mCallbackCookie;
+    camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraVideoMemory : public QCameraStreamMemory {
+public:
+    QCameraVideoMemory(camera_request_memory getMemory, void* cbCookie, bool cached);
+    virtual ~QCameraVideoMemory();
+
+    virtual int allocate(uint8_t count, size_t size);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+#ifdef USE_MEDIA_EXTENSIONS
+    native_handle_t *getNativeHandle(uint32_t index, bool metadata = true);
+    int closeNativeHandle(const void *data, bool metadata);
+    static int closeNativeHandle(const void *data);
+#endif
+    int getUsage(){return mUsage;};
+    int getFormat(){return mFormat;};
+    void setVideoInfo(int usage, cam_format_t format);
+    int convCamtoOMXFormat(cam_format_t format);
+private:
+    camera_memory_t *mMetadata[MM_CAMERA_MAX_NUM_FRAMES];
+    uint8_t mMetaBufCount;
+#ifdef USE_MEDIA_EXTENSIONS
+    native_handle_t *mNativeHandle[MM_CAMERA_MAX_NUM_FRAMES];
+#endif
+    int mUsage, mFormat;
+};
+
+// Gralloc Memory is acquired from preview window
+class QCameraGrallocMemory : public QCameraMemory {
+    enum {
+        BUFFER_NOT_OWNED,
+        BUFFER_OWNED,
+    };
+public:
+    QCameraGrallocMemory(camera_request_memory getMemory, void* cbCookie);
+    void setNativeWindow(preview_stream_ops_t *anw);
+    virtual ~QCameraGrallocMemory();
+
+    virtual int allocate(uint8_t count, size_t size);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    virtual void *getPtr(uint32_t index) const;
+
+    void setWindowInfo(preview_stream_ops_t *window, int width, int height,
+        int stride, int scanline, int format);
+    // Enqueue/display buffer[index] onto the native window,
+    // and dequeue one buffer from it.
+    // Returns the buffer index of the dequeued buffer.
+    int displayBuffer(uint32_t index);
+
+private:
+    buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    int mLocalFlag[MM_CAMERA_MAX_NUM_FRAMES];
+    struct private_handle_t *mPrivateHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    preview_stream_ops_t *mWindow;
+    int mWidth, mHeight, mFormat, mStride, mScanline;
+    camera_request_memory mGetMemory;
+    void* mCallbackCookie;
+    camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+    int mMinUndequeuedBuffers;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HWI_MEM_H__ */
diff --git a/msm8974/QCamera2/HAL/QCameraParameters.cpp b/msm8974/QCamera2/HAL/QCameraParameters.cpp
new file mode 100644
index 0000000..b3d9b52
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraParameters.cpp
@@ -0,0 +1,9864 @@
+/* Copyright (c) 2012-2015, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraParameters"
+
+#include <cutils/properties.h>
+#include <math.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <string.h>
+#include <stdlib.h>
+#include <gralloc_priv.h>
+#include <sys/sysinfo.h>
+#include "QCamera2HWI.h"
+#include "QCameraParameters.h"
+
+#define ASPECT_TOLERANCE 0.001
+#define ISP_SEC_SCALAR_MAX_LIMIT 2048*1536
+
+namespace qcamera {
+// Parameter keys to communicate between camera application and driver.
+const char QCameraParameters::KEY_QC_SUPPORTED_HFR_SIZES[] = "hfr-size-values";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_MODE[] = "preview-frame-rate-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[] = "preview-frame-rate-modes";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[] = "frame-rate-auto";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[] = "frame-rate-fixed";
+const char QCameraParameters::KEY_QC_TOUCH_AF_AEC[] = "touch-af-aec";
+const char QCameraParameters::KEY_QC_SUPPORTED_TOUCH_AF_AEC[] = "touch-af-aec-values";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AEC[] = "touch-index-aec";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AF[] = "touch-index-af";
+const char QCameraParameters::KEY_QC_SCENE_DETECT[] = "scene-detect";
+const char QCameraParameters::KEY_QC_SUPPORTED_SCENE_DETECT[] = "scene-detect-values";
+const char QCameraParameters::KEY_QC_ISO_MODE[] = "iso";
+const char QCameraParameters::KEY_QC_SUPPORTED_ISO_MODES[] = "iso-values";
+const char QCameraParameters::KEY_QC_EXPOSURE_TIME[] = "exposure-time";
+const char QCameraParameters::KEY_QC_MIN_EXPOSURE_TIME[] = "min-exposure-time";
+const char QCameraParameters::KEY_QC_MAX_EXPOSURE_TIME[] = "max-exposure-time";
+const char QCameraParameters::KEY_QC_LENSSHADE[] = "lensshade";
+const char QCameraParameters::KEY_QC_SUPPORTED_LENSSHADE_MODES[] = "lensshade-values";
+const char QCameraParameters::KEY_QC_AUTO_EXPOSURE[] = "auto-exposure";
+const char QCameraParameters::KEY_QC_SUPPORTED_AUTO_EXPOSURE[] = "auto-exposure-values";
+const char QCameraParameters::KEY_QC_DENOISE[] = "denoise";
+const char QCameraParameters::KEY_QC_SUPPORTED_DENOISE[] = "denoise-values";
+const char QCameraParameters::KEY_QC_FOCUS_ALGO[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_SUPPORTED_FOCUS_ALGOS[] = "selectable-zone-af-values";
+const char QCameraParameters::KEY_QC_MANUAL_FOCUS_POSITION[] = "manual-focus-position";
+const char QCameraParameters::KEY_QC_MANUAL_FOCUS_POS_TYPE[] = "manual-focus-pos-type";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_INDEX[] = "min-focus-pos-index";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_INDEX[] = "max-focus-pos-index";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_DAC[] = "min-focus-pos-dac";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_DAC[] = "max-focus-pos-dac";
+const char QCameraParameters::KEY_QC_FACE_DETECTION[] = "face-detection";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_DETECTION[] = "face-detection-values";
+const char QCameraParameters::KEY_QC_FACE_RECOGNITION[] = "face-recognition";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_RECOGNITION[] = "face-recognition-values";
+const char QCameraParameters::KEY_QC_MEMORY_COLOR_ENHANCEMENT[] = "mce";
+const char QCameraParameters::KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[] = "mce-values";
+const char QCameraParameters::KEY_QC_DIS[] = "dis";
+const char QCameraParameters::KEY_QC_SUPPORTED_DIS_MODES[] = "dis-values";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_FRAME_RATE[] = "video-hfr";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_SPEED_RECORDING[] = "video-hsr";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[] = "video-hfr-values";
+const char QCameraParameters::KEY_QC_REDEYE_REDUCTION[] = "redeye-reduction";
+const char QCameraParameters::KEY_QC_SUPPORTED_REDEYE_REDUCTION[] = "redeye-reduction-values";
+const char QCameraParameters::KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[] = "hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_HDR_IMAGING_MODES[] = "hdr-values";
+const char QCameraParameters::KEY_QC_ZSL[] = "zsl";
+const char QCameraParameters::KEY_QC_SUPPORTED_ZSL_MODES[] = "zsl-values";
+const char QCameraParameters::KEY_QC_ZSL_BURST_INTERVAL[] = "capture-burst-interval";
+const char QCameraParameters::KEY_QC_ZSL_BURST_LOOKBACK[] = "capture-burst-retroactive";
+const char QCameraParameters::KEY_QC_ZSL_QUEUE_DEPTH[] = "capture-burst-queue-depth";
+const char QCameraParameters::KEY_QC_CAMERA_MODE[] = "camera-mode";
+const char QCameraParameters::KEY_QC_AE_BRACKET_HDR[] = "ae-bracket-hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_AE_BRACKET_MODES[] = "ae-bracket-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_RAW_FORMATS[] = "raw-format-values";
+const char QCameraParameters::KEY_QC_RAW_FORMAT[] = "raw-format";
+const char QCameraParameters::KEY_QC_ORIENTATION[] = "orientation";
+const char QCameraParameters::KEY_QC_SELECTABLE_ZONE_AF[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_CAPTURE_BURST_EXPOSURE[] = "capture-burst-exposures";
+const char QCameraParameters::KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[] = "num-snaps-per-shutter";
+const char QCameraParameters::KEY_QC_NO_DISPLAY_MODE[] = "no-display-mode";
+const char QCameraParameters::KEY_QC_RAW_PICUTRE_SIZE[] = "raw-size";
+const char QCameraParameters::KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] = "skinToneEnhancement-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[] = "supported-live-snapshot-sizes";
+const char QCameraParameters::KEY_QC_SCALED_PICTURE_SIZES[] = "scaled-picture-sizes";
+const char QCameraParameters::KEY_QC_HDR_NEED_1X[] = "hdr-need-1x";
+const char QCameraParameters::KEY_QC_PREVIEW_FLIP[] = "preview-flip";
+const char QCameraParameters::KEY_QC_VIDEO_FLIP[] = "video-flip";
+const char QCameraParameters::KEY_QC_SNAPSHOT_PICTURE_FLIP[] = "snapshot-picture-flip";
+const char QCameraParameters::KEY_QC_SUPPORTED_FLIP_MODES[] = "flip-mode-values";
+const char QCameraParameters::KEY_QC_VIDEO_HDR[] = "video-hdr";
+const char QCameraParameters::KEY_QC_SENSOR_HDR[] = "sensor-hdr";
+const char QCameraParameters::KEY_QC_VT_ENABLE[] = "avtimer";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HDR_MODES[] = "video-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_SENSOR_HDR_MODES[] = "sensor-hdr-values";
+const char QCameraParameters::KEY_QC_AUTO_HDR_ENABLE [] = "auto-hdr-enable";
+const char QCameraParameters::KEY_QC_SNAPSHOT_BURST_NUM[] = "snapshot-burst-num";
+const char QCameraParameters::KEY_QC_SNAPSHOT_FD_DATA[] = "snapshot-fd-data-enable";
+const char QCameraParameters::KEY_QC_TINTLESS_ENABLE[] = "tintless";
+const char QCameraParameters::KEY_QC_CDS_MODE[] = "cds-mode";
+const char QCameraParameters::KEY_QC_VIDEO_ROTATION[] = "video-rotation";
+const char QCameraParameters::KEY_QC_AF_BRACKET[] = "af-bracket";
+const char QCameraParameters::KEY_QC_SUPPORTED_AF_BRACKET_MODES[] = "af-bracket-values";
+const char QCameraParameters::KEY_QC_CHROMA_FLASH[] = "chroma-flash";
+const char QCameraParameters::KEY_QC_SUPPORTED_CHROMA_FLASH_MODES[] = "chroma-flash-values";
+const char QCameraParameters::KEY_QC_OPTI_ZOOM[] = "opti-zoom";
+const char QCameraParameters::KEY_QC_SEE_MORE[] = "see-more";
+const char QCameraParameters::KEY_QC_SUPPORTED_OPTI_ZOOM_MODES[] = "opti-zoom-values";
+const char QCameraParameters::KEY_QC_FSSR[] = "FSSR";
+const char QCameraParameters::KEY_QC_SUPPORTED_FSSR_MODES[] = "FSSR-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_SEE_MORE_MODES[] = "see-more-values";
+const char QCameraParameters::KEY_QC_TRUE_PORTRAIT[] = "true-portrait";
+const char QCameraParameters::KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES[] = "true-portrait-values";
+const char QCameraParameters::KEY_QC_MULTI_TOUCH_FOCUS[] = "multi-touch-focus";
+const char QCameraParameters::KEY_QC_SUPPORTED_MULTI_TOUCH_FOCUS_MODES[] =
+        "multi-touch-focus-values";
+const char QCameraParameters::KEY_QC_WB_MANUAL_CCT[] = "wb-manual-cct";
+const char QCameraParameters::KEY_QC_MIN_WB_CCT[] = "min-wb-cct";
+const char QCameraParameters::KEY_QC_MAX_WB_CCT[] = "max-wb-cct";
+const char QCameraParameters::KEY_INTERNAL_PERVIEW_RESTART[] = "internal-restart";
+const char QCameraParameters::KEY_QC_LONG_SHOT[] = "long-shot";
+const char QCameraParameters::KEY_QC_LONGSHOT_SUPPORTED[] = "longshot-supported";
+const char QCameraParameters::KEY_QC_ZSL_HDR_SUPPORTED[] = "zsl-hdr-supported";
+const char QCameraParameters::KEY_QC_AUTO_HDR_SUPPORTED[] = "auto-hdr-supported";
+const char QCameraParameters::WHITE_BALANCE_MANUAL_CCT[] = "manual-cct";
+const char QCameraParameters::FOCUS_MODE_MANUAL_POSITION[] = "manual";
+
+// Values for effect settings.
+const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
+const char QCameraParameters::EFFECT_SKETCH[] = "sketch";
+const char QCameraParameters::EFFECT_NEON[] = "neon";
+
+// Values for auto exposure settings.
+const char QCameraParameters::TOUCH_AF_AEC_OFF[] = "touch-off";
+const char QCameraParameters::TOUCH_AF_AEC_ON[] = "touch-on";
+
+// Values for scene mode settings.
+const char QCameraParameters::SCENE_MODE_ASD[] = "asd";   // corresponds to CAMERA_BESTSHOT_AUTO in HAL
+const char QCameraParameters::SCENE_MODE_BACKLIGHT[] = "backlight";
+const char QCameraParameters::SCENE_MODE_FLOWERS[] = "flowers";
+const char QCameraParameters::SCENE_MODE_AR[] = "AR";
+const char QCameraParameters::SCENE_MODE_HDR[] = "hdr";
+
+// Formats for setPreviewFormat and setPictureFormat.
+const char QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO[] = "yuv420sp-adreno";
+const char QCameraParameters::PIXEL_FORMAT_YV12[] = "yuv420p";
+const char QCameraParameters::PIXEL_FORMAT_NV12[] = "nv12";
+const char QCameraParameters::QC_PIXEL_FORMAT_NV12_VENUS[] = "nv12-venus";
+
+// Values for raw image formats
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[] = "yuv-raw8-yuyv";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[] = "yuv-raw8-yvyu";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[] = "yuv-raw8-uyvy";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[] = "yuv-raw8-vyuy";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[] = "bayer-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[] = "bayer-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[] = "bayer-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[] = "bayer-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[] = "bayer-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[] = "bayer-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[] = "bayer-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[] = "bayer-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[] = "bayer-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[] = "bayer-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[] = "bayer-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[] = "bayer-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[] = "bayer-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[] = "bayer-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[] = "bayer-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[] = "bayer-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[] = "bayer-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[] = "bayer-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[] = "bayer-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[] = "bayer-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[] = "bayer-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[] = "bayer-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[] = "bayer-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[] = "bayer-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[] = "bayer-ideal-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[] = "bayer-ideal-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[] = "bayer-ideal-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[] = "bayer-ideal-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[] = "bayer-ideal-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[] = "bayer-ideal-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[] = "bayer-ideal-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[] = "bayer-ideal-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[] = "bayer-ideal-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[] = "bayer-ideal-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[] = "bayer-ideal-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[] = "bayer-ideal-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[] = "bayer-ideal-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[] = "bayer-ideal-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[] = "bayer-ideal-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[] = "bayer-ideal-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[] = "bayer-ideal-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[] = "bayer-ideal-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[] = "bayer-ideal-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[] = "bayer-ideal-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[] = "bayer-ideal-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[] = "bayer-ideal-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[] = "bayer-ideal-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[] = "bayer-ideal-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[] = "bayer-ideal-plain8-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[] = "bayer-ideal-plain8-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[] = "bayer-ideal-plain8-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[] = "bayer-ideal-plain8-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[] = "bayer-ideal-plain16-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[] = "bayer-ideal-plain16-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[] = "bayer-ideal-plain16-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[] = "bayer-ideal-plain16-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[] = "bayer-ideal-plain16-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[] = "bayer-ideal-plain16-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[] = "bayer-ideal-plain16-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[] = "bayer-ideal-plain16-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[] = "bayer-ideal-plain16-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[] = "bayer-ideal-plain16-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[] = "bayer-ideal-plain16-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[] = "bayer-ideal-plain16-12bggr";
+
+// Values for ISO Settings
+const char QCameraParameters::ISO_AUTO[] = "auto";
+const char QCameraParameters::ISO_HJR[] = "ISO_HJR";
+const char QCameraParameters::ISO_100[] = "ISO100";
+const char QCameraParameters::ISO_200[] = "ISO200";
+const char QCameraParameters::ISO_400[] = "ISO400";
+const char QCameraParameters::ISO_800[] = "ISO800";
+const char QCameraParameters::ISO_1600[] = "ISO1600";
+const char QCameraParameters::ISO_3200[] = "ISO3200";
+
+// Values for auto exposure settings.
+const char QCameraParameters::AUTO_EXPOSURE_FRAME_AVG[] = "frame-average";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SMART_METERING[] = "smart-metering";
+const char QCameraParameters::AUTO_EXPOSURE_USER_METERING[] = "user-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING_ADV[] = "spot-metering-adv";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[] = "center-weighted-adv";
+
+const char QCameraParameters::KEY_QC_GPS_LATITUDE_REF[] = "gps-latitude-ref";
+const char QCameraParameters::KEY_QC_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
+const char QCameraParameters::KEY_QC_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
+const char QCameraParameters::KEY_QC_GPS_STATUS[] = "gps-status";
+
+const char QCameraParameters::KEY_QC_HISTOGRAM[] = "histogram";
+const char QCameraParameters::KEY_QC_SUPPORTED_HISTOGRAM_MODES[] = "histogram-values";
+
+const char QCameraParameters::VALUE_ENABLE[] = "enable";
+const char QCameraParameters::VALUE_DISABLE[] = "disable";
+const char QCameraParameters::VALUE_OFF[] = "off";
+const char QCameraParameters::VALUE_ON[] = "on";
+const char QCameraParameters::VALUE_TRUE[] = "true";
+const char QCameraParameters::VALUE_FALSE[] = "false";
+
+const char QCameraParameters::KEY_QC_SHARPNESS[] = "sharpness";
+const char QCameraParameters::KEY_QC_MIN_SHARPNESS[] = "min-sharpness";
+const char QCameraParameters::KEY_QC_MAX_SHARPNESS[] = "max-sharpness";
+const char QCameraParameters::KEY_QC_SHARPNESS_STEP[] = "sharpness-step";
+const char QCameraParameters::KEY_QC_CONTRAST[] = "contrast";
+const char QCameraParameters::KEY_QC_MIN_CONTRAST[] = "min-contrast";
+const char QCameraParameters::KEY_QC_MAX_CONTRAST[] = "max-contrast";
+const char QCameraParameters::KEY_QC_CONTRAST_STEP[] = "contrast-step";
+const char QCameraParameters::KEY_QC_SATURATION[] = "saturation";
+const char QCameraParameters::KEY_QC_MIN_SATURATION[] = "min-saturation";
+const char QCameraParameters::KEY_QC_MAX_SATURATION[] = "max-saturation";
+const char QCameraParameters::KEY_QC_SATURATION_STEP[] = "saturation-step";
+const char QCameraParameters::KEY_QC_BRIGHTNESS[] = "luma-adaptation";
+const char QCameraParameters::KEY_QC_MIN_BRIGHTNESS[] = "min-brightness";
+const char QCameraParameters::KEY_QC_MAX_BRIGHTNESS[] = "max-brightness";
+const char QCameraParameters::KEY_QC_BRIGHTNESS_STEP[] = "brightness-step";
+const char QCameraParameters::KEY_QC_SCE_FACTOR[] = "skinToneEnhancement";
+const char QCameraParameters::KEY_QC_MIN_SCE_FACTOR[] = "min-sce-factor";
+const char QCameraParameters::KEY_QC_MAX_SCE_FACTOR[] = "max-sce-factor";
+const char QCameraParameters::KEY_QC_SCE_FACTOR_STEP[] = "sce-factor-step";
+
+const char QCameraParameters::KEY_QC_SUPPORTED_CAMERA_FEATURES[] = "qc-camera-features";
+const char QCameraParameters::KEY_QC_MAX_NUM_REQUESTED_FACES[] = "qc-max-num-requested-faces";
+
+//Values for DENOISE
+const char QCameraParameters::DENOISE_OFF[] = "denoise-off";
+const char QCameraParameters::DENOISE_ON[] = "denoise-on";
+
+// Values for selectable zone af Settings
+const char QCameraParameters::FOCUS_ALGO_AUTO[] = "auto";
+const char QCameraParameters::FOCUS_ALGO_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::FOCUS_ALGO_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::FOCUS_ALGO_FRAME_AVERAGE[] = "frame-average";
+
+// Values for HFR settings.
+const char QCameraParameters::VIDEO_HFR_OFF[] = "off";
+const char QCameraParameters::VIDEO_HFR_2X[] = "60";
+const char QCameraParameters::VIDEO_HFR_3X[] = "90";
+const char QCameraParameters::VIDEO_HFR_4X[] = "120";
+const char QCameraParameters::VIDEO_HFR_5X[] = "150";
+
+// Values for HDR Bracketing settings.
+const char QCameraParameters::AE_BRACKET_OFF[] = "Off";
+const char QCameraParameters::AE_BRACKET[] = "AE-Bracket";
+
+// Values for AF Bracketing setting.
+const char QCameraParameters::AF_BRACKET_OFF[] = "af-bracket-off";
+const char QCameraParameters::AF_BRACKET_ON[] = "af-bracket-on";
+
+// Values for Chroma Flash setting.
+const char QCameraParameters::CHROMA_FLASH_OFF[] = "chroma-flash-off";
+const char QCameraParameters::CHROMA_FLASH_ON[] = "chroma-flash-on";
+
+// Values for Opti Zoom setting.
+const char QCameraParameters::OPTI_ZOOM_OFF[] = "opti-zoom-off";
+const char QCameraParameters::OPTI_ZOOM_ON[] = "opti-zoom-on";
+
+// Values for True Portrait setting.
+const char QCameraParameters::TRUE_PORTRAIT_OFF[] = "true-portrait-off";
+const char QCameraParameters::TRUE_PORTRAIT_ON[] = "true-portrait-on";
+
+// Values for FSSR setting.
+const char QCameraParameters::FSSR_OFF[] = "FSSR-off";
+const char QCameraParameters::FSSR_ON[] = "FSSR-on";
+
+// Value for Multi-touch Focus setting.
+const char QCameraParameters::MULTI_TOUCH_FOCUS_OFF[] = "multi-touch-focus-off";
+const char QCameraParameters::MULTI_TOUCH_FOCUS_ON[] = "multi-touch-focus-on";
+
+// Values for FLIP settings.
+const char QCameraParameters::FLIP_MODE_OFF[] = "off";
+const char QCameraParameters::FLIP_MODE_V[] = "flip-v";
+const char QCameraParameters::FLIP_MODE_H[] = "flip-h";
+const char QCameraParameters::FLIP_MODE_VH[] = "flip-vh";
+
+const char QCameraParameters::CDS_MODE_OFF[] = "off";
+const char QCameraParameters::CDS_MODE_ON[] = "on";
+const char QCameraParameters::CDS_MODE_AUTO[] = "auto";
+
+const char QCameraParameters::KEY_SELECTED_AUTO_SCENE[] = "selected-auto-scene";
+
+static const char* portrait = "portrait";
+static const char* landscape = "landscape";
+
+const cam_dimension_t QCameraParameters::THUMBNAIL_SIZES_MAP[] = {
+    { 512, 288 }, //1.777778
+    { 480, 288 }, //1.666667
+    { 256, 154 }, //1.66233
+    { 432, 288 }, //1.5
+    { 320, 240 }, //1.33333
+    { 176, 144 }, //1.222222
+    { 0, 0 }      // required by Android SDK
+};
+
+const QCameraParameters::QCameraMap<cam_auto_exposure_mode_type>
+        QCameraParameters::AUTO_EXPOSURE_MAP[] = {
+    { AUTO_EXPOSURE_FRAME_AVG,           CAM_AEC_MODE_FRAME_AVERAGE },
+    { AUTO_EXPOSURE_CENTER_WEIGHTED,     CAM_AEC_MODE_CENTER_WEIGHTED },
+    { AUTO_EXPOSURE_SPOT_METERING,       CAM_AEC_MODE_SPOT_METERING },
+    { AUTO_EXPOSURE_SMART_METERING,      CAM_AEC_MODE_SMART_METERING },
+    { AUTO_EXPOSURE_USER_METERING,       CAM_AEC_MODE_USER_METERING },
+    { AUTO_EXPOSURE_SPOT_METERING_ADV,   CAM_AEC_MODE_SPOT_METERING_ADV },
+    { AUTO_EXPOSURE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
+};
+
+const QCameraParameters::QCameraMap<cam_format_t>
+        QCameraParameters::PREVIEW_FORMATS_MAP[] = {
+    {PIXEL_FORMAT_YUV420SP,        CAM_FORMAT_YUV_420_NV21},
+    {PIXEL_FORMAT_YUV420P,         CAM_FORMAT_YUV_420_YV12},
+    {PIXEL_FORMAT_YUV420SP_ADRENO, CAM_FORMAT_YUV_420_NV21_ADRENO},
+    {PIXEL_FORMAT_YV12,            CAM_FORMAT_YUV_420_YV12},
+    {PIXEL_FORMAT_NV12,            CAM_FORMAT_YUV_420_NV12},
+    {QC_PIXEL_FORMAT_NV12_VENUS,   CAM_FORMAT_YUV_420_NV12_VENUS}
+};
+
+const QCameraParameters::QCameraMap<cam_format_t>
+        QCameraParameters::PICTURE_TYPES_MAP[] = {
+    {PIXEL_FORMAT_JPEG,                          CAM_FORMAT_JPEG},
+    {PIXEL_FORMAT_YUV420SP,                      CAM_FORMAT_YUV_420_NV21},
+    {PIXEL_FORMAT_YUV422SP,                      CAM_FORMAT_YUV_422_NV16},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV,          CAM_FORMAT_YUV_RAW_8BIT_YUYV},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU,          CAM_FORMAT_YUV_RAW_8BIT_YVYU},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY,          CAM_FORMAT_YUV_RAW_8BIT_UYVY},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY,          CAM_FORMAT_YUV_RAW_8BIT_VYUY},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR}
+};
+
+const QCameraParameters::QCameraMap<cam_focus_mode_type>
+        QCameraParameters::FOCUS_MODES_MAP[] = {
+    { FOCUS_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
+    { FOCUS_MODE_INFINITY,           CAM_FOCUS_MODE_INFINITY },
+    { FOCUS_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
+    { FOCUS_MODE_FIXED,              CAM_FOCUS_MODE_FIXED },
+    { FOCUS_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
+    { FOCUS_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
+    { FOCUS_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO },
+#ifndef VANILLA_HAL
+    { FOCUS_MODE_MANUAL_POSITION,    CAM_FOCUS_MODE_MANUAL},
+#endif
+};
+
+const QCameraParameters::QCameraMap<cam_effect_mode_type>
+        QCameraParameters::EFFECT_MODES_MAP[] = {
+    { EFFECT_NONE,       CAM_EFFECT_MODE_OFF },
+    { EFFECT_MONO,       CAM_EFFECT_MODE_MONO },
+    { EFFECT_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
+    { EFFECT_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
+    { EFFECT_SEPIA,      CAM_EFFECT_MODE_SEPIA },
+    { EFFECT_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
+    { EFFECT_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
+    { EFFECT_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
+    { EFFECT_AQUA,       CAM_EFFECT_MODE_AQUA },
+    { EFFECT_EMBOSS,     CAM_EFFECT_MODE_EMBOSS },
+    { EFFECT_SKETCH,     CAM_EFFECT_MODE_SKETCH },
+    { EFFECT_NEON,       CAM_EFFECT_MODE_NEON }
+};
+
+const QCameraParameters::QCameraMap<cam_scene_mode_type>
+        QCameraParameters::SCENE_MODES_MAP[] = {
+    { SCENE_MODE_AUTO,           CAM_SCENE_MODE_OFF },
+    { SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
+    { SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
+    { SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
+    { SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
+    { SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
+    { SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
+    { SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
+    { SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
+    { SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
+    { SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
+    { SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
+    { SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
+    { SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
+    { SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
+    { SCENE_MODE_ASD,            CAM_SCENE_MODE_AUTO },
+    { SCENE_MODE_BACKLIGHT,      CAM_SCENE_MODE_BACKLIGHT },
+    { SCENE_MODE_FLOWERS,        CAM_SCENE_MODE_FLOWERS },
+    { SCENE_MODE_AR,             CAM_SCENE_MODE_AR },
+    { SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR },
+};
+
+const QCameraParameters::QCameraMap<cam_flash_mode_t>
+        QCameraParameters::FLASH_MODES_MAP[] = {
+    { FLASH_MODE_OFF,   CAM_FLASH_MODE_OFF },
+    { FLASH_MODE_AUTO,  CAM_FLASH_MODE_AUTO },
+    { FLASH_MODE_ON,    CAM_FLASH_MODE_ON },
+    { FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
+};
+
+const QCameraParameters::QCameraMap<cam_focus_algorithm_type>
+         QCameraParameters::FOCUS_ALGO_MAP[] = {
+    { FOCUS_ALGO_AUTO,            CAM_FOCUS_ALGO_AUTO },
+    { FOCUS_ALGO_SPOT_METERING,   CAM_FOCUS_ALGO_SPOT },
+    { FOCUS_ALGO_CENTER_WEIGHTED, CAM_FOCUS_ALGO_CENTER_WEIGHTED },
+    { FOCUS_ALGO_FRAME_AVERAGE,   CAM_FOCUS_ALGO_AVERAGE }
+};
+
+const QCameraParameters::QCameraMap<cam_wb_mode_type>
+        QCameraParameters::WHITE_BALANCE_MODES_MAP[] = {
+    { WHITE_BALANCE_AUTO,            CAM_WB_MODE_AUTO },
+    { WHITE_BALANCE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
+    { WHITE_BALANCE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
+    { WHITE_BALANCE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
+    { WHITE_BALANCE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
+    { WHITE_BALANCE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
+    { WHITE_BALANCE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
+    { WHITE_BALANCE_SHADE,           CAM_WB_MODE_SHADE },
+#ifndef VANILLA_HAL
+    { WHITE_BALANCE_MANUAL_CCT,      CAM_WB_MODE_CCT},
+#endif
+};
+
+const QCameraParameters::QCameraMap<cam_antibanding_mode_type>
+        QCameraParameters::ANTIBANDING_MODES_MAP[] = {
+    { ANTIBANDING_OFF,  CAM_ANTIBANDING_MODE_OFF },
+    { ANTIBANDING_50HZ, CAM_ANTIBANDING_MODE_50HZ },
+    { ANTIBANDING_60HZ, CAM_ANTIBANDING_MODE_60HZ },
+    { ANTIBANDING_AUTO, CAM_ANTIBANDING_MODE_AUTO }
+};
+
+const QCameraParameters::QCameraMap<cam_iso_mode_type>
+        QCameraParameters::ISO_MODES_MAP[] = {
+    { ISO_AUTO,  CAM_ISO_MODE_AUTO },
+    { ISO_HJR,   CAM_ISO_MODE_DEBLUR },
+    { ISO_100,   CAM_ISO_MODE_100 },
+    { ISO_200,   CAM_ISO_MODE_200 },
+    { ISO_400,   CAM_ISO_MODE_400 },
+    { ISO_800,   CAM_ISO_MODE_800 },
+    { ISO_1600,  CAM_ISO_MODE_1600 },
+    { ISO_3200,  CAM_ISO_MODE_3200 }
+};
+
+const QCameraParameters::QCameraMap<cam_hfr_mode_t>
+        QCameraParameters::HFR_MODES_MAP[] = {
+    { VIDEO_HFR_OFF, CAM_HFR_MODE_OFF },
+    { VIDEO_HFR_2X,  CAM_HFR_MODE_60FPS },
+    { VIDEO_HFR_3X,  CAM_HFR_MODE_90FPS },
+    { VIDEO_HFR_4X,  CAM_HFR_MODE_120FPS },
+    { VIDEO_HFR_5X,  CAM_HFR_MODE_150FPS }
+};
+
+const QCameraParameters::QCameraMap<cam_bracket_mode>
+        QCameraParameters::BRACKETING_MODES_MAP[] = {
+    { AE_BRACKET_OFF, CAM_EXP_BRACKETING_OFF },
+    { AE_BRACKET,         CAM_EXP_BRACKETING_ON }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::ON_OFF_MODES_MAP[] = {
+    { VALUE_OFF, 0 },
+    { VALUE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::TOUCH_AF_AEC_MODES_MAP[] = {
+    { QCameraParameters::TOUCH_AF_AEC_OFF, 0 },
+    { QCameraParameters::TOUCH_AF_AEC_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::ENABLE_DISABLE_MODES_MAP[] = {
+    { VALUE_ENABLE,  1 },
+    { VALUE_DISABLE, 0 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::DENOISE_ON_OFF_MODES_MAP[] = {
+    { DENOISE_OFF, 0 },
+    { DENOISE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::TRUE_FALSE_MODES_MAP[] = {
+    { VALUE_FALSE, 0},
+    { VALUE_TRUE,  1}
+};
+
+const QCameraParameters::QCameraMap<cam_flip_t>
+        QCameraParameters::FLIP_MODES_MAP[] = {
+    {FLIP_MODE_OFF, FLIP_NONE},
+    {FLIP_MODE_V, FLIP_V},
+    {FLIP_MODE_H, FLIP_H},
+    {FLIP_MODE_VH, FLIP_V_H}
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::AF_BRACKETING_MODES_MAP[] = {
+    { AF_BRACKET_OFF, 0 },
+    { AF_BRACKET_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::CHROMA_FLASH_MODES_MAP[] = {
+    { CHROMA_FLASH_OFF, 0 },
+    { CHROMA_FLASH_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::OPTI_ZOOM_MODES_MAP[] = {
+    { OPTI_ZOOM_OFF, 0 },
+    { OPTI_ZOOM_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::TRUE_PORTRAIT_MODES_MAP[] = {
+    { TRUE_PORTRAIT_OFF, 0 },
+    { TRUE_PORTRAIT_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::FSSR_MODES_MAP[] = {
+    { FSSR_OFF, 0 },
+    { FSSR_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::MULTI_TOUCH_FOCUS_MODES_MAP[] = {
+    { MULTI_TOUCH_FOCUS_OFF, 0 },
+    { MULTI_TOUCH_FOCUS_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<cam_cds_mode_type_t>
+        QCameraParameters::CDS_MODES_MAP[] = {
+    { CDS_MODE_OFF, CAM_CDS_MODE_OFF },
+    { CDS_MODE_ON, CAM_CDS_MODE_ON },
+    { CDS_MODE_AUTO, CAM_CDS_MODE_AUTO}
+};
+
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+#define MIN_PP_BUF_CNT 1
+#define TOTAL_RAM_SIZE_512MB 536870912
+#define PARAM_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
+
+
+/*===========================================================================
+ * FUNCTION   : QCameraParameters
+ *
+ * DESCRIPTION: default constructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters()
+    : CameraParameters(),
+      m_reprocScaleParam(this),
+      m_pCapability(NULL),
+      m_pCamOpsTbl(NULL),
+      m_pParamHeap(NULL),
+      m_pParamBuf(NULL),
+      m_bZslMode(false),
+      m_bZslMode_new(false),
+      m_bRecordingHint(false),
+      m_bRecordingHint_new(false),
+      m_bHistogramEnabled(false),
+      m_bLongshotEnabled(false),
+      m_nFaceProcMask(0),
+      m_bDebugFps(false),
+      mFocusMode(CAM_FOCUS_MODE_MAX),
+      mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+      mPictureFormat(CAM_FORMAT_JPEG),
+      m_bNeedRestart(false),
+      m_bNoDisplayMode(false),
+      m_bWNROn(false),
+      m_bInited(false),
+      m_nBurstNum(1),
+      m_bUpdateEffects(false),
+      m_bSceneTransitionAuto(false),
+      m_bPreviewFlipChanged(false),
+      m_bVideoFlipChanged(false),
+      m_bSnapshotFlipChanged(false),
+      m_bFixedFrameRateSet(false),
+      m_bHDREnabled(false),
+      m_bAVTimerEnabled(false),
+      m_bMobiMask(0),
+      m_bDISEnabled(false),
+      m_AdjustFPS(NULL),
+      m_bHDR1xFrameEnabled(false),
+      m_HDRSceneEnabled(false),
+      m_bHDRThumbnailProcessNeeded(false),
+      m_bHDR1xExtraBufferNeeded(true),
+      m_bHDROutputCropEnabled(false),
+      m_curCCT(-1),
+      m_curFocusPos(-1),
+      m_tempMap(),
+      m_bAFBracketingOn(false),
+      m_bMultiTouchFocusOn(false),
+      m_bChromaFlashOn(false),
+      m_bOptiZoomOn(false),
+      m_bFssrOn(false),
+      m_bSeeMoreOn(false),
+      m_bHfrMode(false),
+      mHfrMode(CAM_HFR_MODE_OFF),
+      m_bDisplayFrame(true),
+      m_bAeBracketingEnabled(false),
+      mFlashValue(CAM_FLASH_MODE_OFF),
+      mFlashDaemonValue(CAM_FLASH_MODE_OFF),
+      m_bSensorHDREnabled(false),
+      m_bTruePortraitOn(false)
+{
+    char value[PROPERTY_VALUE_MAX];
+    // TODO: may move to parameter instead of sysprop
+    property_get("persist.debug.sf.showfps", value, "0");
+    m_bDebugFps = atoi(value) > 0 ? true : false;
+    m_bReleaseTorchCamera = false;
+    m_pTorch = NULL;
+
+    // For thermal mode, it should be set as system property
+    // because system property applies to all applications, while
+    // parameters only apply to specific app.
+    property_get("persist.camera.thermal.mode", value, "frameskip");
+    if (!strcmp(value, "frameskip")) {
+        m_ThermalMode = QCAMERA_THERMAL_ADJUST_FRAMESKIP;
+    } else {
+        if (strcmp(value, "fps"))
+            ALOGE("%s: Invalid camera thermal mode %s", __func__, value);
+        m_ThermalMode = QCAMERA_THERMAL_ADJUST_FPS;
+    }
+
+    memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+    memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
+    memset(&m_MTFBracketInfo, 0, sizeof(m_MTFBracketInfo));
+    memset(&m_hfrFpsRange, 0, sizeof(m_hfrFpsRange));
+
+    // init focus steps to -1, invalid steps
+    for (int i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+       m_MTFBracketInfo.focus_steps[i] = -1;
+    }
+
+    m_currNumBufMTF = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraParameters
+ *
+ * DESCRIPTION: constructor of QCameraParameters
+ *
+ * PARAMETERS :
+ *   @params  : parameters in string
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters(const String8 &params)
+    : CameraParameters(params),
+    m_reprocScaleParam(this),
+    m_pCapability(NULL),
+    m_pCamOpsTbl(NULL),
+    m_pParamHeap(NULL),
+    m_pParamBuf(NULL),
+    m_bZslMode(false),
+    m_bZslMode_new(false),
+    m_bRecordingHint(false),
+    m_bRecordingHint_new(false),
+    m_bHistogramEnabled(false),
+    m_bLongshotEnabled(false),
+    m_nFaceProcMask(0),
+    m_bDebugFps(false),
+    mFocusMode(CAM_FOCUS_MODE_MAX),
+    mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+    mPictureFormat(CAM_FORMAT_JPEG),
+    m_bNeedRestart(false),
+    m_bNoDisplayMode(false),
+    m_bWNROn(false),
+    m_bInited(false),
+    m_nBurstNum(1),
+    m_bPreviewFlipChanged(false),
+    m_bVideoFlipChanged(false),
+    m_bSnapshotFlipChanged(false),
+    m_bFixedFrameRateSet(false),
+    m_bHDREnabled(false),
+    m_bAVTimerEnabled(false),
+    m_AdjustFPS(NULL),
+    m_bHDR1xFrameEnabled(false),
+    m_HDRSceneEnabled(false),
+    m_bHDRThumbnailProcessNeeded(false),
+    m_bHDR1xExtraBufferNeeded(true),
+    m_bHDROutputCropEnabled(false),
+    m_tempMap(),
+    m_bAFBracketingOn(false),
+    m_bMultiTouchFocusOn(false),
+    m_bChromaFlashOn(false),
+    m_bOptiZoomOn(false),
+    m_bFssrOn(false),
+    m_bSeeMoreOn(false),
+    m_bHfrMode(false),
+    mHfrMode(CAM_HFR_MODE_OFF),
+    m_bAeBracketingEnabled(false),
+    mFlashValue(CAM_FLASH_MODE_OFF),
+    mFlashDaemonValue(CAM_FLASH_MODE_OFF),
+    m_bSensorHDREnabled(false),
+    m_bTruePortraitOn(false)
+{
+    memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+    m_pTorch = NULL;
+    m_bReleaseTorchCamera = false;
+    m_currNumBufMTF = 0;
+    memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
+    memset(&m_hfrFpsRange, 0, sizeof(m_hfrFpsRange));
+    memset(&m_MTFBracketInfo, 0, sizeof(m_MTFBracketInfo));
+
+    // init focus steps to -1, invalid steps
+    for (int i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+       m_MTFBracketInfo.focus_steps[i] = -1;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraParameters
+ *
+ * DESCRIPTION: deconstructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::~QCameraParameters()
+{
+    deinit();
+}
+
+/*===========================================================================
+ * FUNCTION   : createSizesString
+ *
+ * DESCRIPTION: create string obj contains array of dimensions
+ *
+ * PARAMETERS :
+ *   @sizes   : array of dimensions
+ *   @len     : size of dimension array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createSizesString(const cam_dimension_t *sizes, size_t len)
+{
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+        str.append(buffer);
+    }
+    for (size_t i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d",
+                sizes[i].width, sizes[i].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createValuesString
+ *
+ * DESCRIPTION: create string obj contains array of values from map when matched
+ *              from input values array
+ *
+ * PARAMETERS :
+ *   @values  : array of values
+ *   @len     : size of values array
+ *   @map     : map contains the mapping between values and enums
+ *   @map_len : size of the map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+template <typename valuesType, class mapType> String8 createValuesString(
+        const valuesType *values, size_t len, const mapType *map, size_t map_len)
+{
+    String8 str;
+    int count = 0;
+
+    for (size_t i = 0; i < len; i++ ) {
+        for (size_t j = 0; j < map_len; j ++)
+            if (map[j].val == values[i]) {
+                if (NULL != map[j].desc) {
+                    if (count > 0) {
+                        str.append(",");
+                    }
+                    str.append(map[j].desc);
+                    count++;
+                    break; //loop j
+                }
+            }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createValuesStringFromMap
+ *
+ * DESCRIPTION: create string obj contains array of values directly from map
+ *
+ * PARAMETERS :
+ *   @map     : map contains the mapping between values and enums
+ *   @map_len : size of the map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+template <class mapType> String8 createValuesStringFromMap(
+        const mapType *map, size_t map_len)
+{
+    String8 str;
+
+    for (size_t i = 0; i < map_len; i++) {
+        if (NULL != map[i].desc) {
+            if (i > 0) {
+                str.append(",");
+            }
+            str.append(map[i].desc);
+        }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createZoomRatioValuesString
+ *
+ * DESCRIPTION: create string obj contains array of zoom ratio values
+ *
+ * PARAMETERS :
+ *   @zoomRaios  : array of zoom ratios
+ *   @length     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createZoomRatioValuesString(uint32_t *zoomRatios,
+        size_t length)
+{
+    String8 str;
+    char buffer[32] = {0};
+
+    if(length > 0){
+        snprintf(buffer, sizeof(buffer), "%d", zoomRatios[0]);
+        str.append(buffer);
+    }
+
+    for (size_t i = 1; i < length; i++) {
+        memset(buffer, 0, sizeof(buffer));
+        snprintf(buffer, sizeof(buffer), ",%d", zoomRatios[i]);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createHfrValuesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr values from map when
+ *              matched from input hfr values
+ *
+ * PARAMETERS :
+ *   @values  : array of hfr info
+ *   @len     : size of the array
+ *   @map     : map of hfr string value and enum
+ *   map_len  : size of map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrValuesString(const cam_hfr_info_t *values,
+        size_t len, const QCameraMap<cam_hfr_mode_t> *map, size_t map_len)
+{
+    String8 str;
+    int count = 0;
+
+    for (size_t i = 0; i < len; i++ ) {
+        for (size_t j = 0; j < map_len; j ++)
+            if (map[j].val == (int)values[i].mode) {
+                if (NULL != map[j].desc) {
+                    if (count > 0) {
+                        str.append(",");
+                    }
+                     str.append(map[j].desc);
+                     count++;
+                     break; //loop j
+                }
+            }
+    }
+    if (count > 0) {
+        str.append(",");
+    }
+    str.append(VIDEO_HFR_OFF);
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createHfrSizesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr sizes
+ *
+ * PARAMETERS :
+ *   @values  : array of hfr info
+ *   @len     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrSizesString(const cam_hfr_info_t *values, size_t len)
+{
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d",
+                 values[0].dim.width, values[0].dim.height);
+        str.append(buffer);
+    }
+    for (size_t i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d",
+                 values[i].dim.width, values[i].dim.height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createFpsString
+ *
+ * DESCRIPTION: create string obj contains array of FPS rates
+ *
+ * PARAMETERS :
+ *   @fps     : default fps range
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsString(cam_fps_range_t &fps)
+{
+    char buffer[32];
+    String8 fpsValues;
+
+    int min_fps = int(fps.min_fps);
+    int max_fps = int(fps.max_fps);
+
+    if (min_fps < fps.min_fps){
+        min_fps++;
+    }
+    if (max_fps > fps.max_fps) {
+        max_fps--;
+    }
+    if (min_fps <= max_fps) {
+        snprintf(buffer, sizeof(buffer), "%d", min_fps);
+        fpsValues.append(buffer);
+    }
+
+    for (int i = min_fps+1; i <= max_fps; i++) {
+        snprintf(buffer, sizeof(buffer), ",%d", i);
+        fpsValues.append(buffer);
+    }
+
+    return fpsValues;
+}
+
+/*===========================================================================
+ * FUNCTION   : createFpsRangeString
+ *
+ * DESCRIPTION: create string obj contains array of FPS ranges
+ *
+ * PARAMETERS :
+ *   @fps     : array of fps ranges
+ *   @len     : size of the array
+ *   @default_fps_index : reference to index of default fps range
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsRangeString(const cam_fps_range_t* fps,
+        size_t len, int &default_fps_index)
+{
+    String8 str;
+    char buffer[32];
+    int max_range = 0;
+    int min_fps, max_fps;
+
+    if (len > 0) {
+        min_fps = int(fps[0].min_fps * 1000);
+        max_fps = int(fps[0].max_fps * 1000);
+        max_range = max_fps - min_fps;
+        default_fps_index = 0;
+        snprintf(buffer, sizeof(buffer), "(%d,%d)", min_fps, max_fps);
+        str.append(buffer);
+    }
+    for (size_t i = 1; i < len; i++) {
+        min_fps = int(fps[i].min_fps * 1000);
+        max_fps = int(fps[i].max_fps * 1000);
+        if (max_range < (max_fps - min_fps)) {
+            max_range = max_fps - min_fps;
+            default_fps_index = (int)i;
+        }
+        snprintf(buffer, sizeof(buffer), ",(%d,%d)", min_fps, max_fps);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupAttr
+ *
+ * DESCRIPTION: lookup a value by its name
+ *
+ * PARAMETERS :
+ *   @attr    : map contains <name, value>
+ *   @len     : size of the map
+ *   @name    : name to be looked up
+ *
+ * RETURN     : valid value if found
+ *              NAME_NOT_FOUND if not found
+ *==========================================================================*/
+template <class mapType> int lookupAttr(const mapType *arr,
+        size_t len, const char *name)
+{
+    if (name) {
+        for (size_t i = 0; i < len; i++) {
+            if (!strcmp(arr[i].desc, name))
+                return arr[i].val;
+        }
+    }
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupNameByValue
+ *
+ * DESCRIPTION: lookup a name by its value
+ *
+ * PARAMETERS :
+ *   @attr    : map contains <name, value>
+ *   @len     : size of the map
+ *   @value   : value to be looked up
+ *
+ * RETURN     : name str or NULL if not found
+ *==========================================================================*/
+template <class mapType> const char *lookupNameByValue(const mapType *arr,
+        size_t len, int value)
+{
+    for (size_t i = 0; i < len; i++) {
+        if (arr[i].val == value) {
+            return arr[i].desc;
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewSize
+ *
+ * DESCRIPTION: set preview size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPreviewSize(&width, &height);
+    CDBG("Requested preview size %d x %d", width, height);
+
+    // Validate the preview size
+    for (size_t i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->preview_sizes_tbl[i].width
+           && height ==  m_pCapability->preview_sizes_tbl[i].height) {
+            // check if need to restart preview in case of preview size change
+            int old_width, old_height;
+            CameraParameters::getPreviewSize(&old_width, &old_height);
+            if (width != old_width || height != old_height) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            CDBG_HIGH("%s: Requested preview size %d x %d", __func__, width, height);
+            CameraParameters::setPreviewSize(width, height);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid preview size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureSize
+ *
+ * DESCRIPTION: set picture size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPictureSize(&width, &height);
+    CDBG("Requested picture size %d x %d", width, height);
+
+    // Validate the picture size
+    if(!m_reprocScaleParam.isScaleEnabled()){
+        for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+            if (width ==  m_pCapability->picture_sizes_tbl[i].width
+               && height ==  m_pCapability->picture_sizes_tbl[i].height) {
+                // check if need to restart preview in case of picture size change
+                int old_width, old_height;
+                CameraParameters::getPictureSize(&old_width, &old_height);
+                if ((m_bZslMode || m_bRecordingHint) &&
+                    (width != old_width || height != old_height)) {
+                    m_bNeedRestart = true;
+                }
+
+                // set the new value
+                CDBG_HIGH("%s: Requested picture size %d x %d", __func__, width, height);
+                CameraParameters::setPictureSize(width, height);
+                return NO_ERROR;
+            }
+        }
+    }else{
+        //should use scaled picture size table to validate
+        if(m_reprocScaleParam.setValidatePicSize(width, height) == NO_ERROR){
+            // check if need to restart preview in case of picture size change
+            int old_width, old_height;
+            CameraParameters::getPictureSize(&old_width, &old_height);
+            if ((m_bZslMode || m_bRecordingHint) &&
+                (width != old_width || height != old_height)) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            char val[32];
+            sprintf(val, "%dx%d", width, height);
+            CDBG_HIGH("%s: picture size requested %s", __func__, val);
+            updateParamEntry(KEY_PICTURE_SIZE, val);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid picture size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoSize
+ *
+ * DESCRIPTION: set video size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoSize(const QCameraParameters& params)
+{
+    const char *str= NULL;
+    int width, height;
+    str = params.get(KEY_VIDEO_SIZE);
+    if(!str) {
+        //If application didn't set this parameter string, use the values from
+        //getPreviewSize() as video dimensions.
+        params.getPreviewSize(&width, &height);
+        ALOGE("No Record Size requested, use the preview dimensions");
+    } else {
+        params.getVideoSize(&width, &height);
+    }
+    // Validate the video size
+    for (size_t i = 0; i < m_pCapability->video_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->video_sizes_tbl[i].width
+                && height ==  m_pCapability->video_sizes_tbl[i].height) {
+            // check if need to restart preview in case of video size change
+            int old_width, old_height;
+            CameraParameters::getVideoSize(&old_width, &old_height);
+            if (m_bRecordingHint &&
+               (width != old_width || height != old_height)) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            CDBG_HIGH("%s: Requested video size %d x %d", __func__, width, height);
+            CameraParameters::setVideoSize(width, height);
+            return NO_ERROR;
+        }
+    }
+
+    ALOGE("Invalid video size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLiveSnapshotSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLiveSnapshotSize(const QCameraParameters& params)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.opt.livepic", value, "1");
+    bool useOptimal = atoi(value) > 0 ? true : false;
+
+    // use picture size from user setting
+    params.getPictureSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+
+    size_t livesnapshot_sizes_tbl_cnt =
+            m_pCapability->livesnapshot_sizes_tbl_cnt;
+    cam_dimension_t *livesnapshot_sizes_tbl =
+            &m_pCapability->livesnapshot_sizes_tbl[0];
+
+    // check if HFR is enabled
+    const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
+    const char *hsrStr = params.get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+    if ((hsrStr != NULL) && strcmp(hsrStr, "off")) {
+        int32_t value = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hsrStr);
+        if ((value != NAME_NOT_FOUND) && (value > CAM_HFR_MODE_OFF)) {
+            // if HSR is enabled, change live snapshot size
+            for (size_t i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+                if (m_pCapability->hfr_tbl[i].mode == value) {
+                    livesnapshot_sizes_tbl_cnt =
+                            m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+                    livesnapshot_sizes_tbl =
+                            &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+                    hfrMode = m_pCapability->hfr_tbl[i].mode;
+                    break;
+                }
+            }
+        }
+    } else if ((hfrStr != NULL) && strcmp(hfrStr, "off")) {
+        int32_t value = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hfrStr);
+        if ((value != NAME_NOT_FOUND) && (value > CAM_HFR_MODE_OFF)) {
+            // if HFR is enabled, change live snapshot size
+            for (size_t i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+                if (m_pCapability->hfr_tbl[i].mode == value) {
+                    livesnapshot_sizes_tbl_cnt =
+                            m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+                    livesnapshot_sizes_tbl =
+                            &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+                    hfrMode = m_pCapability->hfr_tbl[i].mode;
+                    break;
+                }
+            }
+        }
+    }
+
+    if (useOptimal || hfrMode != CAM_HFR_MODE_OFF) {
+        bool found = false;
+
+        // first check if picture size is within the list of supported sizes
+        for (size_t i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+            if (m_LiveSnapshotSize.width == livesnapshot_sizes_tbl[i].width &&
+                m_LiveSnapshotSize.height == livesnapshot_sizes_tbl[i].height) {
+                found = true;
+                break;
+            }
+        }
+
+        if (!found) {
+            // use optimal live snapshot size from supported list,
+            // that has same preview aspect ratio
+            int width = 0, height = 0;
+            params.getPreviewSize(&width, &height);
+
+            double previewAspectRatio = (double)width / height;
+            for (size_t i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+                double ratio = (double)livesnapshot_sizes_tbl[i].width /
+                                livesnapshot_sizes_tbl[i].height;
+                if (fabs(previewAspectRatio - ratio) <= ASPECT_TOLERANCE) {
+                    m_LiveSnapshotSize = livesnapshot_sizes_tbl[i];
+                    found = true;
+                    break;
+                }
+            }
+
+            if (!found && hfrMode != CAM_HFR_MODE_OFF) {
+                // Cannot find matching aspect ration from supported live snapshot list
+                // choose the max dim from preview and video size
+                CDBG("%s: Cannot find matching aspect ratio, choose max of preview or video size", __func__);
+                params.getVideoSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+                if (m_LiveSnapshotSize.width < width && m_LiveSnapshotSize.height < height) {
+                    m_LiveSnapshotSize.width = width;
+                    m_LiveSnapshotSize.height = height;
+                }
+            }
+        }
+    }
+    CDBG("%s: live snapshot size %d x %d", __func__,
+          m_LiveSnapshotSize.width, m_LiveSnapshotSize.height);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRawSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRawSize(cam_dimension_t &dim)
+{
+    m_rawSize = dim;
+    return NO_ERROR;
+}
+/*===========================================================================
+ * FUNCTION   : setPreviewFormat
+ *
+ * DESCRIPTION: set preview format from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFormat(const QCameraParameters& params)
+{
+    const char *str = params.getPreviewFormat();
+    int32_t previewFormat = lookupAttr(PREVIEW_FORMATS_MAP,
+            PARAM_MAP_SIZE(PREVIEW_FORMATS_MAP), str);
+    if (previewFormat != NAME_NOT_FOUND) {
+        mPreviewFormat = (cam_format_t)previewFormat;
+
+        CameraParameters::setPreviewFormat(str);
+        CDBG("%s: format %d\n", __func__, mPreviewFormat);
+        return NO_ERROR;
+    }
+    ALOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureFormat
+ *
+ * DESCRIPTION: set picture format from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureFormat(const QCameraParameters& params)
+{
+    const char *str = params.getPictureFormat();
+    int32_t pictureFormat = lookupAttr(PICTURE_TYPES_MAP, PARAM_MAP_SIZE(PICTURE_TYPES_MAP), str);
+    if (pictureFormat != NAME_NOT_FOUND) {
+        mPictureFormat = pictureFormat;
+
+        CameraParameters::setPictureFormat(str);
+        CDBG("%s: format %d\n", __func__, mPictureFormat);
+        return NO_ERROR;
+    }
+    ALOGE("Invalid picture format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegThumbnailSize
+ *
+ * DESCRIPTION: set jpeg thumbnail size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegThumbnailSize(const QCameraParameters& params)
+{
+    int width = params.getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+    int height = params.getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+
+    CDBG("requested jpeg thumbnail size %d x %d", width, height);
+
+    set(KEY_JPEG_THUMBNAIL_WIDTH, width);
+    set(KEY_JPEG_THUMBNAIL_HEIGHT, height);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegQuality
+ *
+ * DESCRIPTION: set jpeg encpding quality from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegQuality(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int quality = params.getInt(KEY_JPEG_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        set(KEY_JPEG_QUALITY, quality);
+    } else {
+        ALOGE("%s: Invalid jpeg quality=%d", __func__, quality);
+        rc = BAD_VALUE;
+    }
+
+    quality = params.getInt(KEY_JPEG_THUMBNAIL_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        set(KEY_JPEG_THUMBNAIL_QUALITY, quality);
+    } else {
+        ALOGE("%s: Invalid jpeg thumbnail quality=%d", __func__, quality);
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOrientaion
+ *
+ * DESCRIPTION: set orientaion from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOrientation(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_ORIENTATION);
+
+    if (str != NULL) {
+        if (strcmp(str, portrait) == 0 || strcmp(str, landscape) == 0) {
+            // Camera service needs this to decide if the preview frames and raw
+            // pictures should be rotated.
+            set(KEY_QC_ORIENTATION, str);
+        } else {
+            ALOGE("%s: Invalid orientation value: %s", __func__, str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_AUTO_EXPOSURE);
+    const char *prev_str = get(KEY_QC_AUTO_EXPOSURE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAutoExposure(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFpsRange
+ *
+ * DESCRIPTION: set preview FPS range from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(const QCameraParameters& params)
+{
+    int minFps,maxFps;
+    int prevMinFps, prevMaxFps, vidMinFps, vidMaxFps;
+    int rc = NO_ERROR;
+    bool found = false, updateNeeded = false;
+
+    CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+    params.getPreviewFpsRange(&minFps, &maxFps);
+
+    CDBG_HIGH("%s: FpsRange Values:(%d, %d)", __func__, prevMinFps, prevMaxFps);
+    CDBG_HIGH("%s: Requested FpsRange Values:(%d, %d)", __func__, minFps, maxFps);
+
+    //first check if we need to change fps because of HFR mode change
+    updateNeeded = UpdateHFRFrameRate(params);
+    if(updateNeeded) {
+        m_bNeedRestart = true;
+        rc = setHighFrameRate(mHfrMode);
+        if (rc != NO_ERROR) goto end;
+    }
+    CDBG("%s: UpdateHFRFrameRate %d", __func__, updateNeeded);
+
+    vidMinFps = (int)m_hfrFpsRange.video_min_fps;
+    vidMaxFps = (int)m_hfrFpsRange.video_max_fps;
+
+    if(minFps == prevMinFps && maxFps == prevMaxFps) {
+        if ( m_bFixedFrameRateSet ) {
+            minFps = params.getPreviewFrameRate() * 1000;
+            maxFps = params.getPreviewFrameRate() * 1000;
+            m_bFixedFrameRateSet = false;
+        } else if (!updateNeeded) {
+            CDBG("%s: No change in FpsRange", __func__);
+            rc = NO_ERROR;
+            goto end;
+        }
+    }
+
+    for(size_t i = 0; i < m_pCapability->fps_ranges_tbl_cnt; i++) {
+        // if the value is in the supported list
+        if (minFps >= m_pCapability->fps_ranges_tbl[i].min_fps * 1000 &&
+                maxFps <= m_pCapability->fps_ranges_tbl[i].max_fps * 1000) {
+            found = true;
+            CDBG_HIGH("%s: FPS i=%d : minFps = %d, maxFps = %d"
+                    " vidMinFps = %d, vidMaxFps = %d",
+                    __func__, i, minFps, maxFps,
+                    (int)m_hfrFpsRange.video_min_fps,
+                    (int)m_hfrFpsRange.video_max_fps);
+            if ((0.0f >= m_hfrFpsRange.video_min_fps) ||
+                    (0.0f >= m_hfrFpsRange.video_max_fps)) {
+                vidMinFps = minFps;
+                vidMaxFps = maxFps;
+            }
+            else {
+                vidMinFps = (int)m_hfrFpsRange.video_min_fps;
+                vidMaxFps = (int)m_hfrFpsRange.video_max_fps;
+            }
+
+            setPreviewFpsRange(minFps, maxFps, vidMinFps, vidMaxFps);
+            break;
+        }
+    }
+    if(found == false){
+        ALOGE("%s: error: FPS range value not supported", __func__);
+        rc = BAD_VALUE;
+    }
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : UpdateHFRFrameRate
+ *
+ * DESCRIPTION: set preview FPS range based on HFR setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : bool true/false
+ *                  true -if HAL needs to overwrite FPS range set by app, false otherwise.
+ *==========================================================================*/
+
+bool QCameraParameters::UpdateHFRFrameRate(const QCameraParameters& params)
+{
+    bool updateNeeded = false;
+    int min_fps, max_fps;
+    int32_t hfrMode = CAM_HFR_MODE_OFF;
+    int32_t newHfrMode = CAM_HFR_MODE_OFF;
+
+    int parm_minfps,parm_maxfps;
+    int prevMinFps, prevMaxFps;
+    CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+    params.getPreviewFpsRange(&parm_minfps, &parm_maxfps);
+    CDBG("%s: CameraParameters - : minFps = %d, maxFps = %d ",
+                __func__, prevMinFps, prevMaxFps);
+    CDBG("%s: Requested params - : minFps = %d, maxFps = %d ",
+                __func__, parm_minfps, parm_maxfps);
+
+    const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    const char *hsrStr = params.get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+    const char *prev_hfrStr = CameraParameters::get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    const char *prev_hsrStr = CameraParameters::get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+    if ((hfrStr != NULL) && (prev_hfrStr != NULL) && strcmp(hfrStr, prev_hfrStr)) {
+        updateParamEntry(KEY_QC_VIDEO_HIGH_FRAME_RATE, hfrStr);
+    }
+
+    if ((hsrStr != NULL) && (prev_hsrStr != NULL) && strcmp(hsrStr, prev_hsrStr)) {
+        updateParamEntry(KEY_QC_VIDEO_HIGH_SPEED_RECORDING, hsrStr);
+
+    }
+
+    // check if HFR is enabled
+    if ((hfrStr != NULL) && strcmp(hfrStr, "off")) {
+        hfrMode = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hfrStr);
+        if(NAME_NOT_FOUND != hfrMode) newHfrMode = hfrMode;
+    }
+    // check if HSR is enabled
+    else if ((hsrStr != NULL) && strcmp(hsrStr, "off")) {
+        hfrMode = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hsrStr);
+        if(NAME_NOT_FOUND != hfrMode) newHfrMode = hfrMode;
+    }
+    CDBG("%s: prevHfrMode - %d, currentHfrMode = %d ",
+                __func__, mHfrMode, newHfrMode);
+
+    if (mHfrMode != newHfrMode) {
+        updateNeeded = true;
+        mHfrMode = newHfrMode;
+        switch(mHfrMode){
+            case CAM_HFR_MODE_60FPS:
+                min_fps = 60000;
+                max_fps = 60000;
+                break;
+            case CAM_HFR_MODE_90FPS:
+                min_fps = 90000;
+                max_fps = 90000;
+                break;
+            case CAM_HFR_MODE_120FPS:
+                min_fps = 120000;
+                max_fps = 120000;
+                break;
+            case CAM_HFR_MODE_150FPS:
+                min_fps = 150000;
+                max_fps = 150000;
+                break;
+            case CAM_HFR_MODE_OFF:
+            default:
+                // Set Video Fps to zero
+                min_fps = 0;
+                max_fps = 0;
+                break;
+        }
+        m_hfrFpsRange.video_min_fps = (float)min_fps;
+        m_hfrFpsRange.video_max_fps = (float)max_fps;
+
+        CDBG_HIGH("%s: HFR mode (%d) Set video FPS : minFps = %d, maxFps = %d ",
+                __func__, mHfrMode, min_fps, max_fps);
+    }
+
+    // Remember if HFR mode is ON
+    if ((mHfrMode > CAM_HFR_MODE_OFF) && (mHfrMode < CAM_HFR_MODE_MAX)){
+        CDBG_HIGH("HFR mode is ON");
+        m_bHfrMode = true;
+    }
+    else {
+        m_hfrFpsRange.video_min_fps = 0;
+        m_hfrFpsRange.video_max_fps = 0;
+        m_bHfrMode = false;
+        CDBG_HIGH("HFR mode is OFF");
+    }
+    return updateNeeded;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFrameRate
+ *
+ * DESCRIPTION: set preview frame rate from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFrameRate(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_PREVIEW_FRAME_RATE);
+    const char *prev_str = get(KEY_PREVIEW_FRAME_RATE);
+
+    if ( str ) {
+        if ( prev_str &&
+             strcmp(str, prev_str)) {
+            CDBG("%s: Requested Fixed Frame Rate %s", __func__, str);
+            updateParamEntry(KEY_PREVIEW_FRAME_RATE, str);
+            m_bFixedFrameRateSet = true;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEffect
+ *
+ * DESCRIPTION: set effect value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_EFFECT);
+    const char *prev_str = get(KEY_EFFECT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0 ||
+            m_bUpdateEffects == true ) {
+            m_bUpdateEffects = false;
+            return setEffect(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusMode
+ *
+ * DESCRIPTION: set focus mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FOCUS_MODE);
+    const char *prev_str = get(KEY_FOCUS_MODE);
+
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFocusMode(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusPosition
+ *
+ * DESCRIPTION: set focus position from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setFocusPosition(const QCameraParameters& params)
+{
+#ifndef VANILLA_HAL
+    const char *focus_str = params.get(KEY_FOCUS_MODE);
+    CDBG_HIGH("%s, current focus mode: %s", __func__, focus_str);
+    if (focus_str != NULL) {
+        if (strcmp(focus_str, FOCUS_MODE_MANUAL_POSITION)) {
+            CDBG("%s, dont set focus pos to back-end!", __func__);
+            return NO_ERROR;
+        }
+    }
+    const char *pos = params.get(KEY_QC_MANUAL_FOCUS_POSITION);
+    const char *prev_pos = get(KEY_QC_MANUAL_FOCUS_POSITION);
+    const char *type = params.get(KEY_QC_MANUAL_FOCUS_POS_TYPE);
+    const char *prev_type = get(KEY_QC_MANUAL_FOCUS_POS_TYPE);
+
+    if ((pos != NULL) && (type != NULL)) {
+        if (prev_pos  == NULL || (strcmp(pos, prev_pos) != 0) ||
+            prev_type == NULL || (strcmp(type, prev_type) != 0)) {
+            return setFocusPosition(type, pos);
+        }
+    }
+#endif
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBrightness
+ *
+ * DESCRIPTION: set brightness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(const QCameraParameters& params)
+{
+    int currentBrightness = getInt(KEY_QC_BRIGHTNESS);
+    int brightness = params.getInt(KEY_QC_BRIGHTNESS);
+
+    if(params.get(KEY_QC_BRIGHTNESS) == NULL) {
+       CDBG_HIGH("%s: Brigtness not set by App ",__func__);
+       return NO_ERROR;
+    }
+    if (currentBrightness !=  brightness) {
+        if (brightness >= m_pCapability->brightness_ctrl.min_value &&
+            brightness <= m_pCapability->brightness_ctrl.max_value) {
+            CDBG(" new brightness value : %d ", brightness);
+            return setBrightness(brightness);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, brightness,
+                  m_pCapability->brightness_ctrl.min_value,
+                  m_pCapability->brightness_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        CDBG("%s: No brightness value changed.", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(const QCameraParameters& params)
+{
+    int shaprness = params.getInt(KEY_QC_SHARPNESS);
+    int prev_sharp = getInt(KEY_QC_SHARPNESS);
+
+    if(params.get(KEY_QC_SHARPNESS) == NULL) {
+       CDBG_HIGH("%s: Sharpness not set by App ",__func__);
+       return NO_ERROR;
+    }
+    if (prev_sharp !=  shaprness) {
+        if((shaprness >= m_pCapability->sharpness_ctrl.min_value) &&
+           (shaprness <= m_pCapability->sharpness_ctrl.max_value)) {
+            CDBG(" new sharpness value : %d ", shaprness);
+            return setSharpness(shaprness);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, shaprness,
+                  m_pCapability->sharpness_ctrl.min_value,
+                  m_pCapability->sharpness_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        CDBG("%s: No value change in shaprness", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSkintoneEnahancement
+ *
+ * DESCRIPTION: set skin tone enhancement factor from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(const QCameraParameters& params)
+{
+    int sceFactor = params.getInt(KEY_QC_SCE_FACTOR);
+    int prev_sceFactor = getInt(KEY_QC_SCE_FACTOR);
+
+    if(params.get(KEY_QC_SCE_FACTOR) == NULL) {
+       CDBG_HIGH("%s: Skintone enhancement not set by App ",__func__);
+       return NO_ERROR;
+    }
+    if (prev_sceFactor != sceFactor) {
+        if((sceFactor >= m_pCapability->sce_ctrl.min_value) &&
+           (sceFactor <= m_pCapability->sce_ctrl.max_value)) {
+            CDBG(" new Skintone Enhancement value : %d ", sceFactor);
+            return setSkinToneEnhancement(sceFactor);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, sceFactor,
+                  m_pCapability->sce_ctrl.min_value,
+                  m_pCapability->sce_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        CDBG("%s: No value change in skintone enhancement factor", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSaturation
+ *
+ * DESCRIPTION: set saturation control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(const QCameraParameters& params)
+{
+    int saturation = params.getInt(KEY_QC_SATURATION);
+    int prev_sat = getInt(KEY_QC_SATURATION);
+
+    if(params.get(KEY_QC_SATURATION) == NULL) {
+       CDBG_HIGH("%s: Saturation not set by App ",__func__);
+       return NO_ERROR;
+    }
+    if (prev_sat !=  saturation) {
+        if((saturation >= m_pCapability->saturation_ctrl.min_value) &&
+           (saturation <= m_pCapability->saturation_ctrl.max_value)) {
+            CDBG(" new saturation value : %d ", saturation);
+            return setSaturation(saturation);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, saturation,
+                  m_pCapability->saturation_ctrl.min_value,
+                  m_pCapability->saturation_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        CDBG("%s: No value change in saturation factor", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setContrast
+ *
+ * DESCRIPTION: set contrast control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(const QCameraParameters& params)
+{
+    int contrast = params.getInt(KEY_QC_CONTRAST);
+    int prev_contrast = getInt(KEY_QC_CONTRAST);
+
+    if(params.get(KEY_QC_CONTRAST) == NULL) {
+       CDBG_HIGH("%s: Contrast not set by App ",__func__);
+       return NO_ERROR;
+    }
+    if (prev_contrast !=  contrast) {
+        if((contrast >= m_pCapability->contrast_ctrl.min_value) &&
+           (contrast <= m_pCapability->contrast_ctrl.max_value)) {
+            CDBG(" new contrast value : %d ", contrast);
+            int32_t rc = setContrast(contrast);
+            return rc;
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, contrast,
+                  m_pCapability->contrast_ctrl.min_value,
+                  m_pCapability->contrast_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        CDBG("%s: No value change in contrast", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(const QCameraParameters & params)
+{
+    int expComp = params.getInt(KEY_EXPOSURE_COMPENSATION);
+    int prev_expComp = getInt(KEY_EXPOSURE_COMPENSATION);
+
+    if(params.get(KEY_EXPOSURE_COMPENSATION) == NULL) {
+       CDBG_HIGH("%s: Exposure compensation not set by App ",__func__);
+       return NO_ERROR;
+    }
+    if (prev_expComp != expComp) {
+        if((expComp >= m_pCapability->exposure_compensation_min) &&
+           (expComp <= m_pCapability->exposure_compensation_max)) {
+            CDBG(" new Exposure Compensation value : %d ", expComp);
+            return setExposureCompensation(expComp);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, expComp,
+                  m_pCapability->exposure_compensation_min,
+                  m_pCapability->exposure_compensation_max);
+            return BAD_VALUE;
+        }
+    } else {
+        CDBG("%s: No value change in Exposure Compensation", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_WHITE_BALANCE);
+    const char *prev_str = get(KEY_WHITE_BALANCE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWhiteBalance(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWBManualCCT
+ *
+ * DESCRIPTION: set wb cct value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setWBManualCCT(const QCameraParameters& params)
+{
+#ifndef VANILLA_HAL
+    const char *wb_str = params.get(KEY_WHITE_BALANCE);
+    CDBG_HIGH("%s, current wb mode: %s", __func__, wb_str);
+    if (wb_str != NULL) {
+        if (strcmp(wb_str, WHITE_BALANCE_MANUAL_CCT)) {
+            CDBG("%s, dont set cct to back-end.", __func__);
+            return NO_ERROR;
+        }
+    }
+
+    const char *str = params.get(KEY_QC_WB_MANUAL_CCT);
+    const char *prev_str = get(KEY_QC_WB_MANUAL_CCT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWBManualCCT(str);
+        }
+    }
+#endif
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_ANTIBANDING);
+    const char *prev_str = get(KEY_ANTIBANDING);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAntibanding(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAlgoOptimizationsMask
+ *
+ * DESCRIPTION: get the value from persist file in Stats module that will
+ *              control funtionality in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAlgoOptimizationsMask()
+{
+    uint32_t mask = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.stats.opt.mask", value, "0");
+    mask = (uint32_t)atoi(value);
+
+    ALOGV("%s: algo opt ctrl mask :%d", __func__, mask);
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_ALGO_OPTIMIZATIONS_MASK,
+                                  sizeof(mask),
+                                  &mask);
+}
+
+/*===========================================================================
+ * FUNCTION   : setStatsDebugMask
+ *
+ * DESCRIPTION: get the value from persist file in Stats module that will
+ *              control funtionality in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStatsDebugMask()
+{
+    uint32_t mask = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.stats.debug.mask", value, "0");
+    mask = (uint32_t)atoi(value);
+
+    CDBG("%s: ctrl mask :%d", __func__, mask);
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_STATS_DEBUG_MASK,
+                                  sizeof(mask),
+                                  &mask);
+}
+
+/*===========================================================================
+ * FUNCTION   : setISPDebugMask
+ *
+ * DESCRIPTION: get the value from persist file in ISP module that will
+ *              control funtionality in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setISPDebugMask()
+{
+    uint32_t mask = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.ISP.debug.mask", value, "0");
+    mask = (uint32_t)atoi(value);
+    ALOGV("%s: ctrl mask :%d", __func__, mask);
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_ISP_DEBUG_MASK,
+                                  sizeof(mask),
+                                  &mask);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSensorDebugMask
+ *
+ * DESCRIPTION: get the value from persist file in Sensor module that will
+ *              control logging in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSensorDebugMask()
+{
+    uint32_t mask = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.sensor.debug.mask", value, "0");
+    mask = (uint32_t)atoi(value);
+    ALOGV("%s: ctrl mask :%d", __func__, mask);
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SENSOR_DEBUG_MASK,
+                                  sizeof(mask),
+                                  &mask);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SCENE_DETECT);
+    const char *prev_str = get(KEY_QC_SCENE_DETECT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setSceneDetect(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VIDEO_HDR);
+    const char *prev_str = get(KEY_QC_VIDEO_HDR);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setVideoHDR(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVtEnable
+ *
+ * DESCRIPTION: set vt Time Stamp enable from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVtEnable(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VT_ENABLE);
+    const char *prev_str = get(KEY_QC_VT_ENABLE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setVtEnable(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_FACE_RECOGNITION);
+    const char *prev_str = get(KEY_QC_FACE_RECOGNITION);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            uint32_t maxFaces = (uint32_t)params.getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+            return setFaceRecognition(str, maxFaces);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZoom
+ *
+ * DESCRIPTION: set zoom value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(const QCameraParameters& params)
+{
+    if ((m_pCapability->zoom_supported == 0 ||
+         m_pCapability->zoom_ratio_tbl_cnt == 0)) {
+        CDBG_HIGH("%s: no zoom support", __func__);
+        return NO_ERROR;
+    }
+
+    int zoomLevel = params.getInt(KEY_ZOOM);
+    if ((zoomLevel < 0) || (zoomLevel >= (int)m_pCapability->zoom_ratio_tbl_cnt)) {
+        ALOGE("%s: invalid value %d out of (%d, %d)",
+              __func__, zoomLevel,
+              0, m_pCapability->zoom_ratio_tbl_cnt-1);
+        return BAD_VALUE;
+    }
+
+    int prevZoomLevel = getInt(KEY_ZOOM);
+    if (prevZoomLevel == zoomLevel) {
+        CDBG("%s: No value change in contrast", __func__);
+        return NO_ERROR;
+    }
+
+    return setZoom(zoomLevel);
+}
+
+/*===========================================================================
+ * FUNCTION   : setISOValue
+ *
+ * DESCRIPTION: set ISO value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setISOValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_ISO_MODE);
+    const char *prev_str = get(KEY_QC_ISO_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setISOValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureTime
+ *
+ * DESCRIPTION: set exposure time from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setExposureTime(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_EXPOSURE_TIME);
+    const char *prev_str = get(KEY_QC_EXPOSURE_TIME);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setExposureTime(str);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoRotation(const QCameraParameters& params)
+{
+    int rotation = params.getInt(KEY_QC_VIDEO_ROTATION);
+    if (rotation != -1) {
+        if (rotation == 0 || rotation == 90 ||
+            rotation == 180 || rotation == 270) {
+            set(KEY_QC_VIDEO_ROTATION, rotation);
+        } else {
+            ALOGE("Invalid rotation value: %d", rotation);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRotation(const QCameraParameters& params)
+{
+    int rotation = params.getInt(KEY_ROTATION);
+    if (rotation != -1) {
+        if (rotation == 0 || rotation == 90 ||
+            rotation == 180 || rotation == 270) {
+            set(KEY_ROTATION, rotation);
+        } else {
+            ALOGE("Invalid rotation value: %d", rotation);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlash
+ *
+ * DESCRIPTION: set flash mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FLASH_MODE);
+    const char *prev_str = get(KEY_FLASH_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFlash(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_AUTO_EXPOSURE_LOCK);
+    const char *prev_str = get(KEY_AUTO_EXPOSURE_LOCK);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAecLock(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_AUTO_WHITEBALANCE_LOCK);
+    const char *prev_str = get(KEY_AUTO_WHITEBALANCE_LOCK);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAwbLock(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAutoHDR
+ *
+ * DESCRIPTION: Enable/disable auto HDR
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoHDR(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_AUTO_HDR_ENABLE);
+    const char *prev_str = get(KEY_QC_AUTO_HDR_ENABLE);
+    char prop[PROPERTY_VALUE_MAX];
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.auto.hdr.enable", prop, VALUE_DISABLE);
+    if (str != NULL) {
+       if (prev_str == NULL ||
+           strcmp(str, prev_str) != 0) {
+           CDBG("%s : Auto HDR set to: %s", __func__, str);
+           return updateParamEntry(KEY_QC_AUTO_HDR_ENABLE, str);
+       }
+    } else {
+       if (prev_str == NULL ||
+           strcmp(prev_str, prop) != 0 ) {
+           CDBG("%s : Auto HDR set to: %s", __func__, prop);
+           updateParamEntry(KEY_QC_AUTO_HDR_ENABLE, prop);
+       }
+    }
+
+       return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION   : isAutoHDREnabled
+*
+* DESCRIPTION: Query auto HDR status
+*
+* PARAMETERS : None
+*
+* RETURN     : bool true/false
+*==========================================================================*/
+bool QCameraParameters::isAutoHDREnabled()
+{
+    const char *str = get(KEY_QC_AUTO_HDR_ENABLE);
+    if (str != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+        if (value == NAME_NOT_FOUND) {
+            ALOGE("%s: Invalid Auto HDR value %s", __func__, str);
+            return false;
+        }
+
+        CDBG_HIGH("%s : Auto HDR status is: %d", __func__, value);
+        return value ? true : false;
+    }
+
+    CDBG_HIGH("%s : Auto HDR status not set!", __func__);
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+    const char *prev_str = get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setMCEValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setDISValue
+ *
+ * DESCRIPTION: enable/disable DIS from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_DIS);
+    const char *prev_str = get(KEY_QC_DIS);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setDISValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_LENSSHADE);
+    const char *prev_str = get(KEY_QC_LENSSHADE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setLensShadeValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FOCUS_AREAS);
+    if (str != NULL) {
+        int max_num_af_areas = getInt(KEY_MAX_NUM_FOCUS_AREAS);
+        if(max_num_af_areas == 0) {
+            ALOGE("%s: max num of AF area is 0, cannot set focus areas", __func__);
+            return BAD_VALUE;
+        }
+
+        const char *prev_str = get(KEY_FOCUS_AREAS);
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFocusAreas(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_METERING_AREAS);
+    if (str != NULL) {
+        int max_num_mtr_areas = getInt(KEY_MAX_NUM_METERING_AREAS);
+        if(max_num_mtr_areas == 0) {
+            ALOGE("%s: max num of metering areas is 0, cannot set focus areas", __func__);
+            return BAD_VALUE;
+        }
+
+        const char *prev_str = get(KEY_METERING_AREAS);
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setMeteringAreas(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneMode
+ *
+ * DESCRIPTION: set scenen mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const QCameraParameters& params)
+{
+    int rc = NO_ERROR;
+    const char *str = params.get(KEY_SCENE_MODE);
+    const char *prev_str = get(KEY_SCENE_MODE);
+    CDBG("%s: str - %s, prev_str - %s",__func__, str, prev_str);
+
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+
+            if(strcmp(str, SCENE_MODE_AUTO) == 0) {
+                m_bSceneTransitionAuto = true;
+            }
+            if (strcmp(str, SCENE_MODE_HDR) == 0) {
+                // If HDR is set from client and the feature is
+                // not enabled in the backend, ignore it.
+                if (m_pCapability->qcom_supported_feature_mask &
+                     CAM_QCOM_FEATURE_SENSOR_HDR) {
+                    CDBG_HIGH("%s: Sensor HDR mode Enabled",__func__);
+                    m_bSensorHDREnabled = true;
+                    m_bHDREnabled = false;
+                } else if (m_pCapability->qcom_supported_feature_mask &
+                            CAM_QCOM_FEATURE_HDR) {
+                    CDBG_HIGH("%s: S/W HDR Enabled",__func__);
+                    m_bSensorHDREnabled = false;
+                    m_bHDREnabled = true;
+                } else {
+                    m_bSensorHDREnabled = false;
+                    m_bHDREnabled = false;
+                    return NO_ERROR;
+                }
+            } else {
+                m_bHDREnabled = false;
+                if (m_bSensorHDREnabled) {
+                    m_bSensorHDREnabled = false;
+                    m_bNeedRestart = true;
+                    setSensorSnapshotHDR(VALUE_OFF);
+                }
+            }
+            if (m_bSensorHDREnabled) {
+                setSensorSnapshotHDR(VALUE_ON);
+                m_bNeedRestart = true;
+            } else if ((m_bHDREnabled) ||
+                ((prev_str != NULL) && (strcmp(prev_str, SCENE_MODE_HDR) == 0))) {
+                CDBG_HIGH("%s: scene mode changed between HDR and non-HDR, need restart", __func__);
+                m_bNeedRestart = true;
+            }
+
+            rc = setSceneMode(str);
+        }
+    }
+    if (m_bHDREnabled) {
+        str = params.get(KEY_QC_HDR_NEED_1X);
+        prev_str = get(KEY_QC_HDR_NEED_1X);
+        if (str != NULL) {
+            if (prev_str == NULL ||
+                strcmp(str, prev_str) != 0) {
+                if (strcmp(str,VALUE_ON) == 0) {
+                    m_bHDR1xFrameEnabled = true;
+                }
+                else {
+                    m_bHDR1xFrameEnabled = false;
+                }
+            updateParamEntry(KEY_QC_HDR_NEED_1X, str);
+            AddSetParmEntryToBatch(m_pParamBuf,
+                                   CAM_INTF_PARM_HDR_NEED_1X,
+                                   sizeof(m_bHDR1xFrameEnabled),
+                                   &m_bHDR1xFrameEnabled);
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone auto focus value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SELECTABLE_ZONE_AF);
+    const char *prev_str = get(KEY_QC_SELECTABLE_ZONE_AF);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setSelectableZoneAf(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const QCameraParameters& params)
+{
+    if (isHDREnabled()) {
+        ALOGE("%s: scene mode is HDR, overwrite AE bracket setting to off", __func__);
+        return setAEBracket(AE_BRACKET_OFF);
+    }
+
+    const char *expStr = params.get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+    if (NULL != expStr && strlen(expStr) > 0) {
+        set(KEY_QC_CAPTURE_BURST_EXPOSURE, expStr);
+    } else {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.capture.burst.exposures", prop, "");
+        if (strlen(prop) > 0) {
+            set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+        } else {
+            remove(KEY_QC_CAPTURE_BURST_EXPOSURE);
+        }
+    }
+
+    const char *str = params.get(KEY_QC_AE_BRACKET_HDR);
+    const char *prev_str = get(KEY_QC_AE_BRACKET_HDR);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAEBracket(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAFBracket
+ *
+ * DESCRIPTION: set AF bracket from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAFBracket(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_UBIFOCUS) == 0){
+        CDBG("%s: AF Bracketing is not supported",__func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_AF_BRACKET);
+    const char *prev_str = get(KEY_QC_AF_BRACKET);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setAFBracket(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMultiTouchFocus
+ *
+ * DESCRIPTION: set multi-touch focus value
+ *
+ * PARAMETERS :
+ *   @multiTouchFocusStr : multi-touch focus value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMultiTouchFocus(const char *multiTouchFocusStr)
+{
+    if(multiTouchFocusStr != NULL) {
+        int value = lookupAttr(MULTI_TOUCH_FOCUS_MODES_MAP,
+                PARAM_MAP_SIZE(MULTI_TOUCH_FOCUS_MODES_MAP), multiTouchFocusStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bMultiTouchFocusOn = (value != 0);
+            if (!m_bMultiTouchFocusOn) {
+                resetMultiTouchFocusParam();
+            }
+            updateParamEntry(KEY_QC_MULTI_TOUCH_FOCUS, multiTouchFocusStr);
+            return NO_ERROR;
+        }
+    }
+    CDBG_HIGH("Invalid multi-touch focus value: %s",
+            (multiTouchFocusStr == NULL) ? "NULL" : multiTouchFocusStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMultiTouchFocus
+ *
+ * DESCRIPTION: set multi-touch focus af bracket from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMultiTouchFocus(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_MULTI_TOUCH_FOCUS) == 0) {
+        CDBG_HIGH("%s: multi-touch focus is not supported",__func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_MULTI_TOUCH_FOCUS);
+    const char *prev_str = get(KEY_QC_MULTI_TOUCH_FOCUS);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setMultiTouchFocus(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTouchAFAEC
+ *
+ * DESCRIPTION: set touch af aec value
+ *
+ * PARAMETERS :
+ *   @touchAfAecStr : touch focus value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTouchAFAEC(const char *touchAfAecStr)
+{
+    if (touchAfAecStr != NULL) {
+        int value = lookupAttr(TOUCH_AF_AEC_MODES_MAP,
+                PARAM_MAP_SIZE(TOUCH_AF_AEC_MODES_MAP), touchAfAecStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bTouchFocusOn = (value != 0);
+            updateParamEntry(KEY_QC_TOUCH_AF_AEC, touchAfAecStr);
+            return NO_ERROR;
+        }
+    }
+    CDBG_HIGH("Invalid touch af aec value: %s",
+            (touchAfAecStr == NULL) ? "NULL" : touchAfAecStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTouchAFAEC
+ *
+ * DESCRIPTION: set touch AF from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTouchAFAEC(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_TOUCH_AF_AEC);
+    const char *prev_str = get(KEY_QC_TOUCH_AF_AEC);
+    CDBG("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            return setTouchAFAEC(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateMTFInfo
+ *
+ * DESCRIPTION: update lens position selected by user
+ *
+ * PARAMETERS :
+ *   @lensPos : current lens position to add into array
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+
+int32_t QCameraParameters::updateMTFInfo(const int32_t lensPos)
+{
+   CDBG_HIGH("%s: current lens position is: %d, burst count = %d",
+           __func__, lensPos, m_MTFBracketInfo.burst_count);
+   if (m_MTFBracketInfo.burst_count >= MAX_AF_BRACKETING_VALUES) {
+       return BAD_VALUE;
+   }
+   if (m_MTFBracketInfo.burst_count == 0) {
+       m_MTFBracketInfo.focus_steps[0] = lensPos;
+   } else {
+      for (int i = 0; i < m_MTFBracketInfo.burst_count; i++) {
+         if (lensPos > m_MTFBracketInfo.focus_steps[i]) {
+            for (int j = m_MTFBracketInfo.burst_count; j > i; j--) {
+               m_MTFBracketInfo.focus_steps[j] = m_MTFBracketInfo.focus_steps[j-1];
+            }
+            m_MTFBracketInfo.focus_steps[i] = lensPos;
+            break;
+         }
+      }
+   }
+   for (int i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+      CDBG_HIGH("%s: current focus_step[%d] = %d", __func__, i,
+          m_MTFBracketInfo.focus_steps[i]);
+   }
+
+   m_MTFBracketInfo.burst_count++;
+   return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setChromaFlash
+ *
+ * DESCRIPTION: set chroma flash from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setChromaFlash(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_CHROMA_FLASH) == 0) {
+        CDBG("%s: Chroma Flash is not supported",__func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_CHROMA_FLASH);
+    const char *prev_str = get(KEY_QC_CHROMA_FLASH);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setChromaFlash(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOptiZoom
+ *
+ * DESCRIPTION: set opti zoom from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOptiZoom(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_OPTIZOOM) == 0){
+        CDBG("%s: Opti Zoom is not supported",__func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_OPTI_ZOOM);
+    const char *prev_str = get(KEY_QC_OPTI_ZOOM);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setOptiZoom(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTruePortrait
+ *
+ * DESCRIPTION: set true portrait from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTruePortrait(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_TRUEPORTRAIT) == 0){
+        CDBG("%s: True Portrait is not supported",__func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_TRUE_PORTRAIT);
+    const char *prev_str = get(KEY_QC_TRUE_PORTRAIT);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setTruePortrait(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSeeMore
+ *
+ * DESCRIPTION: set see more (llvd) from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSeeMore(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+          CAM_QCOM_FEATURE_LLVD) == 0){
+      CDBG("%s: See more is not supported",__func__);
+      return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_SEE_MORE);
+    const char *prev_str = get(KEY_QC_SEE_MORE);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setSeeMore(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction setting from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_REDEYE_REDUCTION);
+    const char *prev_str = get(KEY_QC_REDEYE_REDUCTION);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setRedeyeReduction(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFssr
+ *
+ * DESCRIPTION: set fssr from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFssr(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_FSSR) == 0) {
+        CDBG_HIGH("%s: FSSR is not supported",__func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_FSSR);
+    const char *prev_str = get(KEY_QC_FSSR);
+    CDBG_HIGH("%s: str =%s & prev_str =%s",__func__, str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setFssr(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ *
+ * DESCRIPTION: set GPS location information from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setGpsLocation(const QCameraParameters& params)
+{
+    const char *method = params.get(KEY_GPS_PROCESSING_METHOD);
+    if (method) {
+        set(KEY_GPS_PROCESSING_METHOD, method);
+    }else {
+        remove(KEY_GPS_PROCESSING_METHOD);
+    }
+
+    const char *latitude = params.get(KEY_GPS_LATITUDE);
+    if (latitude) {
+        set(KEY_GPS_LATITUDE, latitude);
+    }else {
+        remove(KEY_GPS_LATITUDE);
+    }
+
+    const char *latitudeRef = params.get(KEY_QC_GPS_LATITUDE_REF);
+    if (latitudeRef) {
+        set(KEY_QC_GPS_LATITUDE_REF, latitudeRef);
+    }else {
+        remove(KEY_QC_GPS_LATITUDE_REF);
+    }
+
+    const char *longitude = params.get(KEY_GPS_LONGITUDE);
+    if (longitude) {
+        set(KEY_GPS_LONGITUDE, longitude);
+    }else {
+        remove(KEY_GPS_LONGITUDE);
+    }
+
+    const char *longitudeRef = params.get(KEY_QC_GPS_LONGITUDE_REF);
+    if (longitudeRef) {
+        set(KEY_QC_GPS_LONGITUDE_REF, longitudeRef);
+    }else {
+        remove(KEY_QC_GPS_LONGITUDE_REF);
+    }
+
+    const char *altitudeRef = params.get(KEY_QC_GPS_ALTITUDE_REF);
+    if (altitudeRef) {
+        set(KEY_QC_GPS_ALTITUDE_REF, altitudeRef);
+    }else {
+        remove(KEY_QC_GPS_ALTITUDE_REF);
+    }
+
+    const char *altitude = params.get(KEY_GPS_ALTITUDE);
+    if (altitude) {
+        set(KEY_GPS_ALTITUDE, altitude);
+    }else {
+        remove(KEY_GPS_ALTITUDE);
+    }
+
+    const char *status = params.get(KEY_QC_GPS_STATUS);
+    if (status) {
+        set(KEY_QC_GPS_STATUS, status);
+    } else {
+        remove(KEY_QC_GPS_STATUS);
+    }
+
+    const char *timestamp = params.get(KEY_GPS_TIMESTAMP);
+    if (timestamp) {
+        set(KEY_GPS_TIMESTAMP, timestamp);
+    }else {
+        remove(KEY_GPS_TIMESTAMP);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNumOfSnapshot
+ *
+ * DESCRIPTION: set number of snapshot per shutter from user setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNumOfSnapshot()
+{
+    int nBurstNum = getBurstNum();
+    int nExpnum = 0;
+
+    const char *bracket_str = get(KEY_QC_AE_BRACKET_HDR);
+    if (bracket_str != NULL && strlen(bracket_str) > 0) {
+        int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+                bracket_str);
+        switch (value) {
+        case CAM_EXP_BRACKETING_ON:
+            {
+                nExpnum = 0;
+                const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+                if ((str_val != NULL) && (strlen(str_val) > 0)) {
+                    char prop[PROPERTY_VALUE_MAX];
+                    memset(prop, 0, sizeof(prop));
+                    strcpy(prop, str_val);
+                    char *saveptr = NULL;
+                    char *token = strtok_r(prop, ",", &saveptr);
+                    while (token != NULL) {
+                        token = strtok_r(NULL, ",", &saveptr);
+                        nExpnum++;
+                    }
+                }
+                if (nExpnum == 0) {
+                    nExpnum = 1;
+                }
+            }
+            break;
+        default:
+            nExpnum = 1 + getNumOfExtraHDROutBufsIfNeeded();
+            break;
+        }
+    }
+
+    CDBG_HIGH("%s: nBurstNum = %d, nExpnum = %d", __func__, nBurstNum, nExpnum);
+    set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, nBurstNum * nExpnum);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHint
+ *
+ * DESCRIPTION: set recording hint value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRecordingHint(const QCameraParameters& params)
+{
+    const char * str = params.get(KEY_RECORDING_HINT);
+    const char *prev_str = get(KEY_RECORDING_HINT);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                    str);
+            if(value != NAME_NOT_FOUND){
+                updateParamEntry(KEY_RECORDING_HINT, str);
+                setRecordingHintValue(value);
+                return NO_ERROR;
+            } else {
+                ALOGE("Invalid recording hint value: %s", str);
+                return BAD_VALUE;
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNoDisplayMode
+ *
+ * DESCRIPTION: set no display mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoDisplayMode(const QCameraParameters& params)
+{
+    const char *str_val  = params.get(KEY_QC_NO_DISPLAY_MODE);
+    const char *prev_str = get(KEY_QC_NO_DISPLAY_MODE);
+    if(str_val && strlen(str_val) > 0) {
+        if (prev_str == NULL || strcmp(str_val, prev_str) != 0) {
+            m_bNoDisplayMode = atoi(str_val);
+            set(KEY_QC_NO_DISPLAY_MODE, str_val);
+            m_bNeedRestart = true;
+        }
+    } else {
+        m_bNoDisplayMode = false;
+    }
+    CDBG_HIGH("Param m_bNoDisplayMode = %d", m_bNoDisplayMode);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslMode
+ *
+ * DESCRIPTION: set ZSL mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslMode(const QCameraParameters& params)
+{
+    const char *str_val  = params.get(KEY_QC_ZSL);
+    const char *prev_val  = get(KEY_QC_ZSL);
+
+    if (str_val != NULL) {
+        if (prev_val == NULL || strcmp(str_val, prev_val) != 0) {
+            int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+                    str_val);
+            if (value != NAME_NOT_FOUND) {
+                set(KEY_QC_ZSL, str_val);
+                m_bZslMode_new = (value > 0)? true : false;
+
+                // ZSL mode changed, need restart preview
+                m_bNeedRestart = true;
+                CDBG_HIGH("%s: ZSL Mode  -> %s", __func__, m_bZslMode_new ? "Enabled" : "Disabled");
+
+                return AddSetParmEntryToBatch(m_pParamBuf,
+                                              CAM_INTF_PARM_ZSL_MODE,
+                                              sizeof(value),
+                                              &value);
+            } else {
+                ALOGE("Invalid ZSL mode value: %s", str_val);
+                return BAD_VALUE;
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_DENOISE);
+    const char *prev_str = get(KEY_QC_DENOISE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWaveletDenoise(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCameraMode
+ *
+ * DESCRIPTION: set camera mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCameraMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_CAMERA_MODE);
+    if (str != NULL) {
+        set(KEY_QC_CAMERA_MODE, str);
+    } else {
+        remove(KEY_QC_CAMERA_MODE);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslAttributes
+ *
+ * DESCRIPTION: set ZSL related attributes from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslAttributes(const QCameraParameters& params)
+{
+    // TODO: may switch to pure param instead of sysprop
+    char prop[PROPERTY_VALUE_MAX];
+
+    const char *str = params.get(KEY_QC_ZSL_BURST_INTERVAL);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_BURST_INTERVAL, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.interval", prop, "1");
+        set(KEY_QC_ZSL_BURST_INTERVAL, prop);
+    }
+
+    str = params.get(KEY_QC_ZSL_BURST_LOOKBACK);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_BURST_LOOKBACK, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.backlookcnt", prop, "2");
+        set(KEY_QC_ZSL_BURST_LOOKBACK, prop);
+    }
+
+    str = params.get(KEY_QC_ZSL_QUEUE_DEPTH);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_QUEUE_DEPTH, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.queuedepth", prop, "2");
+        set(KEY_QC_ZSL_QUEUE_DEPTH, prop);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlip
+ *
+ * DESCRIPTION: set preview/ video/ picture flip mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlip(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) == 0) {
+        CDBG_HIGH("%s: flip is not supported.", __func__);
+        return NO_ERROR;
+    }
+
+    //check preview flip setting
+    const char *str = params.get(KEY_QC_PREVIEW_FLIP);
+    const char *prev_val = get(KEY_QC_PREVIEW_FLIP);
+    if(str != NULL){
+        if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+            int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+            if(value != NAME_NOT_FOUND){
+                set(KEY_QC_PREVIEW_FLIP, str);
+                m_bPreviewFlipChanged = true;
+            }
+        }
+    }
+
+    // check video filp setting
+    str = params.get(KEY_QC_VIDEO_FLIP);
+    prev_val = get(KEY_QC_VIDEO_FLIP);
+    if(str != NULL){
+        if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+            int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+            if(value != NAME_NOT_FOUND){
+                set(KEY_QC_VIDEO_FLIP, str);
+                m_bVideoFlipChanged = true;
+            }
+        }
+    }
+
+    // check picture filp setting
+    str = params.get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+    prev_val = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+    if(str != NULL){
+        if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+            int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+            if(value != NAME_NOT_FOUND){
+                set(KEY_QC_SNAPSHOT_PICTURE_FLIP, str);
+                m_bSnapshotFlipChanged = true;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBurstNum
+ *
+ * DESCRIPTION: set burst number of snapshot
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBurstNum(const QCameraParameters& params)
+{
+    int nBurstNum = params.getInt(KEY_QC_SNAPSHOT_BURST_NUM);
+    if (nBurstNum <= 0) {
+        if (!isAdvCamFeaturesEnabled()) {
+            // if burst number is not set in parameters,
+            // read from sys prop
+            char prop[PROPERTY_VALUE_MAX];
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.snapshot.number", prop, "0");
+            nBurstNum = atoi(prop);
+        } else {
+            nBurstNum = 1;
+        }
+        if (nBurstNum <= 0) {
+            nBurstNum = 1;
+        }
+    } else {
+        set(KEY_QC_SNAPSHOT_BURST_NUM, nBurstNum);
+    }
+    m_nBurstNum = (uint8_t)nBurstNum;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSnapshotFDReq
+ *
+ * DESCRIPTION: set requirement of Face Detection Metadata in Snapshot mode.
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSnapshotFDReq(const QCameraParameters& params)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    const char *str = params.get(KEY_QC_SNAPSHOT_FD_DATA);
+
+    if(str != NULL){
+        set(KEY_QC_SNAPSHOT_FD_DATA, str);
+    }else{
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.snapshot.fd", prop, "0");
+        set(KEY_QC_SNAPSHOT_FD_DATA, prop);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMobicat
+ *
+ * DESCRIPTION: set Mobicat on/off.
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMobicat(const QCameraParameters& )
+{
+    char value [PROPERTY_VALUE_MAX];
+    property_get("persist.camera.mobicat", value, "0");
+    uint32_t enableMobi = (uint32_t) atoi(value);
+    int32_t ret = NO_ERROR;;
+
+    if (enableMobi) {
+        tune_cmd_t tune_cmd;
+        tune_cmd.type = 2;
+        tune_cmd.module = 0;
+        tune_cmd.value = 1;
+
+        ret = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_SET_VFE_COMMAND,
+                                sizeof(tune_cmd_t),
+                                &tune_cmd);
+        if (NO_ERROR != ret) {
+            return ret;
+        }
+        tune_cmd.module = 0;
+
+        ret = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_SET_PP_COMMAND,
+                                sizeof(tune_cmd_t),
+                                &tune_cmd);
+    }
+    if (NO_ERROR != ret) {
+        return ret;
+    }
+    ret = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_AF_MOBICAT_CMD,
+                                sizeof(enableMobi),
+                                &enableMobi);
+    m_bMobiMask = (uint8_t)enableMobi;
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLongshotParam
+ *
+ * DESCRIPTION: set Longshot on/off.
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLongshotParam(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_LONG_SHOT);
+    const char *prev_str = get(KEY_QC_LONG_SHOT);
+
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            set(KEY_QC_LONG_SHOT, str);
+            if (!strcmp(str, "off")) {
+                if (m_bLongshotEnabled == true) {
+                    // We restart here, to reset the FPS and no
+                    // of buffers as per the requirement of single snapshot usecase.
+                    m_bNeedRestart = true;
+                }
+                m_bLongshotEnabled = false;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParameters
+ *
+ * DESCRIPTION: update parameters from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *   @needRestart : [output] if preview need restart upon setting changes
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParameters(QCameraParameters& params,
+        bool &needRestart)
+{
+    int32_t final_rc = NO_ERROR;
+    int32_t rc;
+    m_bNeedRestart = false;
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table",__func__);
+        rc = BAD_TYPE;
+        goto UPDATE_PARAM_DONE;
+    }
+
+    if ((rc = setPreviewSize(params)))                  final_rc = rc;
+    if ((rc = setVideoSize(params)))                    final_rc = rc;
+    if ((rc = setPictureSize(params)))                  final_rc = rc;
+    if ((rc = setPreviewFormat(params)))                final_rc = rc;
+    if ((rc = setPictureFormat(params)))                final_rc = rc;
+    if ((rc = setJpegQuality(params)))                  final_rc = rc;
+    if ((rc = setOrientation(params)))                  final_rc = rc;
+    if ((rc = setRotation(params)))                     final_rc = rc;
+    if ((rc = setVideoRotation(params)))                final_rc = rc;
+    if ((rc = setNoDisplayMode(params)))                final_rc = rc;
+    if ((rc = setZslMode(params)))                      final_rc = rc;
+    if ((rc = setZslAttributes(params)))                final_rc = rc;
+    if ((rc = setCameraMode(params)))                   final_rc = rc;
+    if ((rc = setRecordingHint(params)))                final_rc = rc;
+
+    if ((rc = setPreviewFrameRate(params)))             final_rc = rc;
+    if ((rc = setPreviewFpsRange(params)))              final_rc = rc;
+    if ((rc = setAutoExposure(params)))                 final_rc = rc;
+    if ((rc = setEffect(params)))                       final_rc = rc;
+    if ((rc = setBrightness(params)))                   final_rc = rc;
+    if ((rc = setZoom(params)))                         final_rc = rc;
+    if ((rc = setSharpness(params)))                    final_rc = rc;
+    if ((rc = setSaturation(params)))                   final_rc = rc;
+    if ((rc = setContrast(params)))                     final_rc = rc;
+    if ((rc = setFocusMode(params)))                    final_rc = rc;
+    if ((rc = setISOValue(params)))                     final_rc = rc;
+    if ((rc = setExposureTime(params)))                 final_rc = rc;
+    if ((rc = setSkinToneEnhancement(params)))          final_rc = rc;
+    if ((rc = setFlash(params)))                        final_rc = rc;
+    if ((rc = setAecLock(params)))                      final_rc = rc;
+    if ((rc = setAwbLock(params)))                      final_rc = rc;
+    if ((rc = setLensShadeValue(params)))               final_rc = rc;
+    if ((rc = setMCEValue(params)))                     final_rc = rc;
+    if ((rc = setDISValue(params)))                     final_rc = rc;
+    if ((rc = setAntibanding(params)))                  final_rc = rc;
+    if ((rc = setExposureCompensation(params)))         final_rc = rc;
+    if ((rc = setWhiteBalance(params)))                 final_rc = rc;
+    if ((rc = setWBManualCCT(params)))                  final_rc = rc;
+    if ((rc = setSceneMode(params)))                    final_rc = rc;
+    if ((rc = setFocusAreas(params)))                   final_rc = rc;
+    if ((rc = setFocusPosition(params)))                final_rc = rc;
+    if ((rc = setMeteringAreas(params)))                final_rc = rc;
+    if ((rc = setSelectableZoneAf(params)))             final_rc = rc;
+    if ((rc = setRedeyeReduction(params)))              final_rc = rc;
+    if ((rc = setAEBracket(params)))                    final_rc = rc;
+    if ((rc = setAutoHDR(params)))                      final_rc = rc;
+    if ((rc = setGpsLocation(params)))                  final_rc = rc;
+    if ((rc = setWaveletDenoise(params)))               final_rc = rc;
+    if ((rc = setFaceRecognition(params)))              final_rc = rc;
+    if ((rc = setFlip(params)))                         final_rc = rc;
+    if ((rc = setVideoHDR(params)))                     final_rc = rc;
+    if ((rc = setVtEnable(params)))                     final_rc = rc;
+    if ((rc = setBurstNum(params)))                     final_rc = rc;
+    if ((rc = setSnapshotFDReq(params)))                final_rc = rc;
+    if ((rc = setTintlessValue(params)))                final_rc = rc;
+    if ((rc = setCDSMode(params)))                      final_rc = rc;
+
+    // update live snapshot size after all other parameters are set
+    if ((rc = setLiveSnapshotSize(params)))             final_rc = rc;
+    if ((rc = setJpegThumbnailSize(params)))            final_rc = rc;
+    if ((rc = setStatsDebugMask()))                     final_rc = rc;
+    if ((rc = setISPDebugMask()))                       final_rc = rc;
+    if ((rc = setAlgoOptimizationsMask()))              final_rc = rc;
+    if ((rc = setMobicat(params)))                      final_rc = rc;
+    if ((rc = setAFBracket(params)))                    final_rc = rc;
+    if ((rc = setChromaFlash(params)))                  final_rc = rc;
+    if ((rc = setOptiZoom(params)))                     final_rc = rc;
+    if ((rc = setFssr(params)))                         final_rc = rc;
+    if ((rc = setSeeMore(params)))                      final_rc = rc;
+    if ((rc = setMultiTouchFocus(params)))              final_rc = rc;
+    if ((rc = setTouchAFAEC(params)))                   final_rc = rc;
+    if ((rc = setLongshotParam(params)))                final_rc = rc;
+    if ((rc = setTruePortrait(params)))                 final_rc = rc;
+
+    if ((rc = updateFlash(false)))                      final_rc = rc;
+    if ((rc = setSensorDebugMask()))                    final_rc = rc;
+UPDATE_PARAM_DONE:
+    needRestart = m_bNeedRestart;
+    return final_rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParameters
+ *
+ * DESCRIPTION: commit parameter changes to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParameters()
+{
+    return commitSetBatch();
+}
+
+/*===========================================================================
+ * FUNCTION   : initDefaultParameters
+ *
+ * DESCRIPTION: initialize default parameters for the first time
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initDefaultParameters()
+{
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+    int32_t hal_version = CAM_HAL_V1;
+    AddSetParmEntryToBatch(m_pParamBuf,
+                           CAM_INTF_PARM_HAL_VERSION,
+                           sizeof(hal_version),
+                           &hal_version);
+
+    /*************************Initialize Values******************************/
+    // Set read only parameters from camera capability
+    set(KEY_SMOOTH_ZOOM_SUPPORTED,
+        m_pCapability->smooth_zoom_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_ZOOM_SUPPORTED,
+        m_pCapability->zoom_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_VIDEO_SNAPSHOT_SUPPORTED,
+        m_pCapability->video_snapshot_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_VIDEO_STABILIZATION_SUPPORTED,
+        m_pCapability->video_stablization_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_AUTO_EXPOSURE_LOCK_SUPPORTED,
+        m_pCapability->auto_exposure_lock_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED,
+        m_pCapability->auto_wb_lock_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_QC_SUPPORTED_CAMERA_FEATURES,
+            (int)m_pCapability->qcom_supported_feature_mask);
+    set(KEY_MAX_NUM_DETECTED_FACES_HW, m_pCapability->max_num_roi);
+    set(KEY_MAX_NUM_DETECTED_FACES_SW, m_pCapability->max_num_roi);
+    set(KEY_QC_MAX_NUM_REQUESTED_FACES, m_pCapability->max_num_roi);
+    // Set focal length, horizontal view angle, and vertical view angle
+    setFloat(KEY_FOCAL_LENGTH, m_pCapability->focal_length);
+    setFloat(KEY_HORIZONTAL_VIEW_ANGLE, m_pCapability->hor_view_angle);
+    setFloat(KEY_VERTICAL_VIEW_ANGLE, m_pCapability->ver_view_angle);
+    set(QCameraParameters::KEY_FOCUS_DISTANCES, "Infinity,Infinity,Infinity");
+    set(KEY_QC_AUTO_HDR_SUPPORTED,
+        (m_pCapability->auto_hdr_supported)? VALUE_TRUE : VALUE_FALSE);
+    // Set supported preview sizes
+    if (m_pCapability->preview_sizes_tbl_cnt > 0 &&
+        m_pCapability->preview_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 previewSizeValues = createSizesString(
+                m_pCapability->preview_sizes_tbl, m_pCapability->preview_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_PREVIEW_SIZES, previewSizeValues.string());
+        CDBG_HIGH("%s: supported preview sizes: %s", __func__, previewSizeValues.string());
+        // Set default preview size
+        CameraParameters::setPreviewSize(m_pCapability->preview_sizes_tbl[0].width,
+                                         m_pCapability->preview_sizes_tbl[0].height);
+    } else {
+        ALOGE("%s: supported preview sizes cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported video sizes
+    if (m_pCapability->video_sizes_tbl_cnt > 0 &&
+        m_pCapability->video_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 videoSizeValues = createSizesString(
+                m_pCapability->video_sizes_tbl, m_pCapability->video_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_VIDEO_SIZES, videoSizeValues.string());
+        CDBG_HIGH("%s: supported video sizes: %s", __func__, videoSizeValues.string());
+        // Set default video size
+        CameraParameters::setVideoSize(m_pCapability->video_sizes_tbl[0].width,
+                                       m_pCapability->video_sizes_tbl[0].height);
+
+        //Set preferred Preview size for video
+        String8 vSize = createSizesString(&m_pCapability->video_sizes_tbl[0], 1);
+        set(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, vSize.string());
+    } else {
+        ALOGE("%s: supported video sizes cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported picture sizes
+    if (m_pCapability->picture_sizes_tbl_cnt > 0 &&
+        m_pCapability->picture_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 pictureSizeValues = createSizesString(
+                m_pCapability->picture_sizes_tbl, m_pCapability->picture_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_PICTURE_SIZES, pictureSizeValues.string());
+        CDBG_HIGH("%s: supported pic sizes: %s", __func__, pictureSizeValues.string());
+        // Set default picture size to the smallest resolution
+        CameraParameters::setPictureSize(
+           m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].width,
+           m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].height);
+    } else {
+        ALOGE("%s: supported picture sizes cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Need check if scale should be enabled
+    if (m_pCapability->scale_picture_sizes_cnt > 0 &&
+        m_pCapability->scale_picture_sizes_cnt <= MAX_SCALE_SIZES_CNT){
+        //get scale size, enable scaling. And re-set picture size table with scale sizes
+        m_reprocScaleParam.setScaleEnable(true);
+        int rc_s = m_reprocScaleParam.setScaleSizeTbl(
+            m_pCapability->scale_picture_sizes_cnt, m_pCapability->scale_picture_sizes,
+            m_pCapability->picture_sizes_tbl_cnt, m_pCapability->picture_sizes_tbl);
+        if(rc_s == NO_ERROR){
+            String8 scaledPictureSizeValues = createSizesString(
+                m_pCapability->scale_picture_sizes, m_pCapability->scale_picture_sizes_cnt);
+            set(KEY_QC_SCALED_PICTURE_SIZES, scaledPictureSizeValues.string());
+            ALOGE("%s: scaled supported pic sizes: %s", __func__, scaledPictureSizeValues.string());
+        }else{
+            m_reprocScaleParam.setScaleEnable(false);
+            ALOGE("%s: reset scaled picture size table failed.", __func__);
+        }
+    }else{
+        m_reprocScaleParam.setScaleEnable(false);
+    }
+
+    // Set supported thumbnail sizes
+    String8 thumbnailSizeValues = createSizesString(
+            THUMBNAIL_SIZES_MAP,
+            PARAM_MAP_SIZE(THUMBNAIL_SIZES_MAP));
+    set(KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, thumbnailSizeValues.string());
+    // Set default thumnail size
+    set(KEY_JPEG_THUMBNAIL_WIDTH, THUMBNAIL_SIZES_MAP[0].width);
+    set(KEY_JPEG_THUMBNAIL_HEIGHT, THUMBNAIL_SIZES_MAP[0].height);
+
+    // Set supported livesnapshot sizes
+    if (m_pCapability->livesnapshot_sizes_tbl_cnt > 0 &&
+        m_pCapability->livesnapshot_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 liveSnpashotSizeValues = createSizesString(
+                m_pCapability->livesnapshot_sizes_tbl,
+                m_pCapability->livesnapshot_sizes_tbl_cnt);
+        set(KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES, liveSnpashotSizeValues.string());
+        CDBG("%s: supported live snapshot sizes: %s", __func__, liveSnpashotSizeValues.string());
+        m_LiveSnapshotSize =
+            m_pCapability->livesnapshot_sizes_tbl[m_pCapability->livesnapshot_sizes_tbl_cnt-1];
+    }
+
+    // Set supported preview formats
+    String8 previewFormatValues = createValuesString(
+            m_pCapability->supported_preview_fmts,
+            m_pCapability->supported_preview_fmt_cnt,
+            PREVIEW_FORMATS_MAP,
+            PARAM_MAP_SIZE(PREVIEW_FORMATS_MAP));
+    set(KEY_SUPPORTED_PREVIEW_FORMATS, previewFormatValues.string());
+    // Set default preview format
+    CameraParameters::setPreviewFormat(PIXEL_FORMAT_YUV420SP);
+
+    // Set default Video Format
+    set(KEY_VIDEO_FRAME_FORMAT, PIXEL_FORMAT_YUV420SP);
+
+    // Set supported picture formats
+    String8 pictureTypeValues(PIXEL_FORMAT_JPEG);
+    String8 str = createValuesString(
+            m_pCapability->supported_raw_fmts,
+            m_pCapability->supported_raw_fmt_cnt,
+            PICTURE_TYPES_MAP,
+            PARAM_MAP_SIZE(PICTURE_TYPES_MAP));
+    if (str.string() != NULL) {
+        pictureTypeValues.append(",");
+        pictureTypeValues.append(str);
+    }
+
+    set(KEY_SUPPORTED_PICTURE_FORMATS, pictureTypeValues.string());
+    // Set default picture Format
+    CameraParameters::setPictureFormat(PIXEL_FORMAT_JPEG);
+    // Set raw image size
+    char raw_size_str[32];
+    snprintf(raw_size_str, sizeof(raw_size_str), "%dx%d",
+             m_pCapability->raw_dim.width, m_pCapability->raw_dim.height);
+    set(KEY_QC_RAW_PICUTRE_SIZE, raw_size_str);
+
+    //set default jpeg quality and thumbnail quality
+    set(KEY_JPEG_QUALITY, 85);
+    set(KEY_JPEG_THUMBNAIL_QUALITY, 85);
+
+    // Set FPS ranges
+    if (m_pCapability->fps_ranges_tbl_cnt > 0 &&
+        m_pCapability->fps_ranges_tbl_cnt <= MAX_SIZES_CNT) {
+        int default_fps_index = 0;
+        String8 fpsRangeValues = createFpsRangeString(m_pCapability->fps_ranges_tbl,
+                                                      m_pCapability->fps_ranges_tbl_cnt,
+                                                      default_fps_index);
+        set(KEY_SUPPORTED_PREVIEW_FPS_RANGE, fpsRangeValues.string());
+        CDBG_HIGH("%s: supported fps ranges: %s", __func__, fpsRangeValues.string());
+
+        int min_fps =
+            int(m_pCapability->fps_ranges_tbl[default_fps_index].min_fps * 1000);
+        int max_fps =
+            int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps * 1000);
+        m_default_fps_range = m_pCapability->fps_ranges_tbl[default_fps_index];
+        //Set video fps same as preview fps
+        setPreviewFpsRange(min_fps, max_fps, min_fps, max_fps);
+
+        // Set legacy preview fps
+        String8 fpsValues = createFpsString(m_pCapability->fps_ranges_tbl[default_fps_index]);
+        set(KEY_SUPPORTED_PREVIEW_FRAME_RATES, fpsValues.string());
+        CDBG_HIGH("%s: supported fps rates: %s", __func__, fpsValues.string());
+        CameraParameters::setPreviewFrameRate(int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps));
+    } else {
+        ALOGE("%s: supported fps ranges cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported focus modes
+    if (m_pCapability->supported_focus_modes_cnt > 0) {
+        String8 focusModeValues = createValuesString(
+                m_pCapability->supported_focus_modes,
+                m_pCapability->supported_focus_modes_cnt,
+                FOCUS_MODES_MAP,
+                PARAM_MAP_SIZE(FOCUS_MODES_MAP));
+        set(KEY_SUPPORTED_FOCUS_MODES, focusModeValues);
+
+        // Set default focus mode and update corresponding parameter buf
+        const char *focusMode = lookupNameByValue(FOCUS_MODES_MAP,
+                PARAM_MAP_SIZE(FOCUS_MODES_MAP),
+                m_pCapability->supported_focus_modes[0]);
+        if (focusMode != NULL) {
+            setFocusMode(focusMode);
+        } else {
+            setFocusMode(FOCUS_MODE_FIXED);
+        }
+    } else {
+        ALOGE("%s: supported focus modes cnt is 0!!!", __func__);
+    }
+
+    // Set focus areas
+    if (m_pCapability->max_num_focus_areas > MAX_ROI) {
+        m_pCapability->max_num_focus_areas = MAX_ROI;
+    }
+    set(KEY_MAX_NUM_FOCUS_AREAS, m_pCapability->max_num_focus_areas);
+    if (m_pCapability->max_num_focus_areas > 0) {
+        setFocusAreas(DEFAULT_CAMERA_AREA);
+    }
+
+    // Set metering areas
+    if (m_pCapability->max_num_metering_areas > MAX_ROI) {
+        m_pCapability->max_num_metering_areas = MAX_ROI;
+    }
+    set(KEY_MAX_NUM_METERING_AREAS, m_pCapability->max_num_metering_areas);
+    if (m_pCapability->max_num_metering_areas > 0) {
+        setMeteringAreas(DEFAULT_CAMERA_AREA);
+    }
+
+    // set focus position, we should get them from m_pCapability
+    m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX] = 40;
+    m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX] = 60;
+    set(KEY_QC_MIN_FOCUS_POS_INDEX, m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX]);
+    set(KEY_QC_MAX_FOCUS_POS_INDEX, m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX]);
+
+    m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE] = 0;
+    m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE] = 1023;
+    set(KEY_QC_MIN_FOCUS_POS_DAC, m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE]);
+    set(KEY_QC_MIN_FOCUS_POS_DAC, m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE]);
+
+    // Set Saturation
+    set(KEY_QC_MIN_SATURATION, m_pCapability->saturation_ctrl.min_value);
+    set(KEY_QC_MAX_SATURATION, m_pCapability->saturation_ctrl.max_value);
+    set(KEY_QC_SATURATION_STEP, m_pCapability->saturation_ctrl.step);
+    setSaturation(m_pCapability->saturation_ctrl.def_value);
+
+    // Set Sharpness
+    set(KEY_QC_MIN_SHARPNESS, m_pCapability->sharpness_ctrl.min_value);
+    set(KEY_QC_MAX_SHARPNESS, m_pCapability->sharpness_ctrl.max_value);
+    set(KEY_QC_SHARPNESS_STEP, m_pCapability->sharpness_ctrl.step);
+    setSharpness(m_pCapability->sharpness_ctrl.def_value);
+
+    // Set Contrast
+    set(KEY_QC_MIN_CONTRAST, m_pCapability->contrast_ctrl.min_value);
+    set(KEY_QC_MAX_CONTRAST, m_pCapability->contrast_ctrl.max_value);
+    set(KEY_QC_CONTRAST_STEP, m_pCapability->contrast_ctrl.step);
+    setContrast(m_pCapability->contrast_ctrl.def_value);
+
+    // Set SCE factor
+    set(KEY_QC_MIN_SCE_FACTOR, m_pCapability->sce_ctrl.min_value); // -100
+    set(KEY_QC_MAX_SCE_FACTOR, m_pCapability->sce_ctrl.max_value); // 100
+    set(KEY_QC_SCE_FACTOR_STEP, m_pCapability->sce_ctrl.step);     // 10
+    setSkinToneEnhancement(m_pCapability->sce_ctrl.def_value);     // 0
+
+    // Set Brightness
+    set(KEY_QC_MIN_BRIGHTNESS, m_pCapability->brightness_ctrl.min_value); // 0
+    set(KEY_QC_MAX_BRIGHTNESS, m_pCapability->brightness_ctrl.max_value); // 6
+    set(KEY_QC_BRIGHTNESS_STEP, m_pCapability->brightness_ctrl.step);     // 1
+    setBrightness(m_pCapability->brightness_ctrl.def_value);
+
+    // Set Auto exposure
+    String8 autoExposureValues = createValuesString(
+            m_pCapability->supported_aec_modes,
+            m_pCapability->supported_aec_modes_cnt,
+            AUTO_EXPOSURE_MAP,
+            PARAM_MAP_SIZE(AUTO_EXPOSURE_MAP));
+    set(KEY_QC_SUPPORTED_AUTO_EXPOSURE, autoExposureValues.string());
+    setAutoExposure(AUTO_EXPOSURE_CENTER_WEIGHTED);
+
+    // Set Exposure Compensation
+    set(KEY_MAX_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_max); // 12
+    set(KEY_MIN_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_min); // -12
+    setFloat(KEY_EXPOSURE_COMPENSATION_STEP, m_pCapability->exposure_compensation_step); // 1/6
+    setExposureCompensation(m_pCapability->exposure_compensation_default); // 0
+
+    // Set Antibanding
+    String8 antibandingValues = createValuesString(
+            m_pCapability->supported_antibandings,
+            m_pCapability->supported_antibandings_cnt,
+            ANTIBANDING_MODES_MAP,
+            PARAM_MAP_SIZE(ANTIBANDING_MODES_MAP));
+    set(KEY_SUPPORTED_ANTIBANDING, antibandingValues);
+    setAntibanding(ANTIBANDING_AUTO);
+
+    // Set Effect
+    String8 effectValues = createValuesString(
+            m_pCapability->supported_effects,
+            m_pCapability->supported_effects_cnt,
+            EFFECT_MODES_MAP,
+            PARAM_MAP_SIZE(EFFECT_MODES_MAP));
+
+    if (m_pCapability->supported_effects_cnt > 0) {
+        set(KEY_SUPPORTED_EFFECTS, effectValues);
+    } else {
+        ALOGE("Color effects are not available");
+        set(KEY_SUPPORTED_EFFECTS, EFFECT_NONE);
+    }
+    setEffect(EFFECT_NONE);
+
+    // Set WhiteBalance
+    String8 whitebalanceValues = createValuesString(
+            m_pCapability->supported_white_balances,
+            m_pCapability->supported_white_balances_cnt,
+            WHITE_BALANCE_MODES_MAP,
+            PARAM_MAP_SIZE(WHITE_BALANCE_MODES_MAP));
+    set(KEY_SUPPORTED_WHITE_BALANCE, whitebalanceValues);
+    setWhiteBalance(WHITE_BALANCE_AUTO);
+
+    // set supported wb cct, we should get them from m_pCapability
+    m_pCapability->min_wb_cct = 2000;
+    m_pCapability->max_wb_cct = 8000;
+    set(KEY_QC_MIN_WB_CCT, m_pCapability->min_wb_cct);
+    set(KEY_QC_MAX_WB_CCT, m_pCapability->max_wb_cct);
+
+    // Set Flash mode
+    if(m_pCapability->supported_flash_modes_cnt > 0) {
+       String8 flashValues = createValuesString(
+               m_pCapability->supported_flash_modes,
+               m_pCapability->supported_flash_modes_cnt,
+               FLASH_MODES_MAP,
+               PARAM_MAP_SIZE(FLASH_MODES_MAP));
+       set(KEY_SUPPORTED_FLASH_MODES, flashValues);
+       setFlash(FLASH_MODE_OFF);
+    } else {
+        ALOGE("%s: supported flash modes cnt is 0!!!", __func__);
+    }
+
+    // Set Scene Mode
+    String8 sceneModeValues = createValuesString(
+            m_pCapability->supported_scene_modes,
+            m_pCapability->supported_scene_modes_cnt,
+            SCENE_MODES_MAP,
+            PARAM_MAP_SIZE(SCENE_MODES_MAP));
+    set(KEY_SUPPORTED_SCENE_MODES, sceneModeValues);
+    setSceneMode(SCENE_MODE_AUTO);
+
+    // Set ISO Mode
+    String8 isoValues = createValuesString(
+            m_pCapability->supported_iso_modes,
+            m_pCapability->supported_iso_modes_cnt,
+            ISO_MODES_MAP,
+            PARAM_MAP_SIZE(ISO_MODES_MAP));
+    set(KEY_QC_SUPPORTED_ISO_MODES, isoValues);
+    setISOValue(ISO_AUTO);
+
+    // Set exposure time, we should get them from m_pCapability
+    m_pCapability->min_exposure_time = 200;
+    m_pCapability->max_exposure_time = 2000000;
+    set(KEY_QC_MIN_EXPOSURE_TIME, m_pCapability->min_exposure_time);
+    set(KEY_QC_MAX_EXPOSURE_TIME, m_pCapability->max_exposure_time);
+    //setExposureTime("0");
+
+    // Set HFR
+    String8 hfrValues = createHfrValuesString(
+            m_pCapability->hfr_tbl,
+            m_pCapability->hfr_tbl_cnt,
+            HFR_MODES_MAP,
+            PARAM_MAP_SIZE(HFR_MODES_MAP));
+    set(KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES, hfrValues.string());
+    set(KEY_QC_VIDEO_HIGH_SPEED_RECORDING, "off");
+    set(KEY_QC_VIDEO_HIGH_FRAME_RATE, "off");
+    String8 hfrSizeValues = createHfrSizesString(
+            m_pCapability->hfr_tbl,
+            m_pCapability->hfr_tbl_cnt);
+    set(KEY_QC_SUPPORTED_HFR_SIZES, hfrSizeValues.string());
+    setHighFrameRate(CAM_HFR_MODE_OFF);
+
+    // Set Focus algorithms
+    String8 focusAlgoValues = createValuesString(
+            m_pCapability->supported_focus_algos,
+            m_pCapability->supported_focus_algos_cnt,
+            FOCUS_ALGO_MAP,
+            PARAM_MAP_SIZE(FOCUS_ALGO_MAP));
+    set(KEY_QC_SUPPORTED_FOCUS_ALGOS, focusAlgoValues);
+    setSelectableZoneAf(FOCUS_ALGO_AUTO);
+
+    // Set Zoom Ratios
+    if (m_pCapability->zoom_supported > 0) {
+        String8 zoomRatioValues = createZoomRatioValuesString(
+                m_pCapability->zoom_ratio_tbl,
+                m_pCapability->zoom_ratio_tbl_cnt);
+        set(KEY_ZOOM_RATIOS, zoomRatioValues);
+        set(KEY_MAX_ZOOM, (int)(m_pCapability->zoom_ratio_tbl_cnt - 1));
+        setZoom(0);
+    }
+
+    // Set Bracketing/HDR
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.capture.burst.exposures", prop, "");
+    if (strlen(prop) > 0) {
+        set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+    }
+    String8 bracketingValues = createValuesStringFromMap(
+            BRACKETING_MODES_MAP,
+            PARAM_MAP_SIZE(BRACKETING_MODES_MAP));
+    set(KEY_QC_SUPPORTED_AE_BRACKET_MODES, bracketingValues);
+    setAEBracket(AE_BRACKET_OFF);
+
+    //Set AF Bracketing.
+    for (size_t i = 0; i < m_pCapability->supported_focus_modes_cnt; i++) {
+        if ((CAM_FOCUS_MODE_AUTO == m_pCapability->supported_focus_modes[i]) &&
+                ((m_pCapability->qcom_supported_feature_mask &
+                        CAM_QCOM_FEATURE_UBIFOCUS) > 0)) {
+            String8 afBracketingValues = createValuesStringFromMap(
+                    AF_BRACKETING_MODES_MAP,
+                    PARAM_MAP_SIZE(AF_BRACKETING_MODES_MAP));
+            set(KEY_QC_SUPPORTED_AF_BRACKET_MODES, afBracketingValues);
+            setAFBracket(AF_BRACKET_OFF);
+         }
+    }
+
+    //Set Chroma Flash.
+    if ((m_pCapability->supported_flash_modes_cnt > 0) &&
+            (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_CHROMA_FLASH) > 0) {
+        String8 chromaFlashValues = createValuesStringFromMap(
+                CHROMA_FLASH_MODES_MAP,
+                PARAM_MAP_SIZE(CHROMA_FLASH_MODES_MAP));
+        set(KEY_QC_SUPPORTED_CHROMA_FLASH_MODES, chromaFlashValues);
+        setChromaFlash(CHROMA_FLASH_OFF);
+    }
+
+    //Set Opti Zoom.
+    if (m_pCapability->zoom_supported &&
+            (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_OPTIZOOM) > 0){
+        String8 optiZoomValues = createValuesStringFromMap(
+                OPTI_ZOOM_MODES_MAP,
+                PARAM_MAP_SIZE(OPTI_ZOOM_MODES_MAP));
+        set(KEY_QC_SUPPORTED_OPTI_ZOOM_MODES, optiZoomValues);
+        setOptiZoom(OPTI_ZOOM_OFF);
+    }
+
+    //Set True Portrait
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_TRUEPORTRAIT) > 0) {
+        String8 truePortraitValues = createValuesStringFromMap(
+                TRUE_PORTRAIT_MODES_MAP,
+                PARAM_MAP_SIZE(TRUE_PORTRAIT_MODES_MAP));
+        set(KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES, truePortraitValues);
+    }
+
+   //Set FSSR.
+    if ((m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_FSSR) > 0) {
+        String8 fssrValues = createValuesStringFromMap(FSSR_MODES_MAP,
+                PARAM_MAP_SIZE(FSSR_MODES_MAP));
+        set(KEY_QC_SUPPORTED_FSSR_MODES, fssrValues);
+        setFssr(FSSR_OFF);
+    }
+
+    //Set Multi-touch Focus.
+    if ((m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_MULTI_TOUCH_FOCUS) > 0){
+        String8 multiTouchFocusValues = createValuesStringFromMap(MULTI_TOUCH_FOCUS_MODES_MAP,
+                PARAM_MAP_SIZE(MULTI_TOUCH_FOCUS_MODES_MAP));
+        set(KEY_QC_SUPPORTED_MULTI_TOUCH_FOCUS_MODES, multiTouchFocusValues);
+        setMultiTouchFocus(MULTI_TOUCH_FOCUS_OFF);
+    }
+
+    // Set Denoise
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_DENOISE2D) > 0){
+    String8 denoiseValues = createValuesStringFromMap(
+        DENOISE_ON_OFF_MODES_MAP, PARAM_MAP_SIZE(DENOISE_ON_OFF_MODES_MAP));
+    set(KEY_QC_SUPPORTED_DENOISE, denoiseValues.string());
+#ifdef DEFAULT_DENOISE_MODE_ON
+    setWaveletDenoise(DENOISE_ON);
+#else
+    setWaveletDenoise(DENOISE_OFF);
+#endif
+    }
+
+    // Set feature enable/disable
+    String8 enableDisableValues = createValuesStringFromMap(
+            ENABLE_DISABLE_MODES_MAP, PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP));
+
+    // Set Lens Shading
+    set(KEY_QC_SUPPORTED_LENSSHADE_MODES, enableDisableValues);
+    setLensShadeValue(VALUE_ENABLE);
+
+    // Set MCE
+    set(KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES, enableDisableValues);
+    setMCEValue(VALUE_ENABLE);
+
+    // Set DIS
+    set(KEY_QC_SUPPORTED_DIS_MODES, enableDisableValues);
+    setDISValue(VALUE_DISABLE);
+
+    // Set Histogram
+    set(KEY_QC_SUPPORTED_HISTOGRAM_MODES,
+        m_pCapability->histogram_supported ? enableDisableValues : "");
+    set(KEY_QC_HISTOGRAM, VALUE_DISABLE);
+
+    //Set Red Eye Reduction
+    set(KEY_QC_SUPPORTED_REDEYE_REDUCTION, enableDisableValues);
+    setRedeyeReduction(VALUE_DISABLE);
+
+    //Set SkinTone Enhancement
+    set(KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES, enableDisableValues);
+
+    // Set feature on/off
+    String8 onOffValues = createValuesStringFromMap(
+            ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+
+    //Set See more (LLVD)
+    if (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_LLVD) {
+        set(KEY_QC_SUPPORTED_SEE_MORE_MODES, onOffValues);
+        setSeeMore(VALUE_OFF);
+    }
+
+    //Set Scene Detection
+    set(KEY_QC_SUPPORTED_SCENE_DETECT, onOffValues);
+    setSceneDetect(VALUE_OFF);
+    m_bHDREnabled = false;
+    m_bHDR1xFrameEnabled = false;
+
+    m_bHDRThumbnailProcessNeeded = false;
+    m_bHDR1xExtraBufferNeeded = true;
+    for (uint32_t i=0; i<m_pCapability->hdr_bracketing_setting.num_frames; i++) {
+        if (0 == m_pCapability->hdr_bracketing_setting.exp_val.values[i]) {
+            m_bHDR1xExtraBufferNeeded = false;
+            break;
+        }
+    }
+
+    // Set HDR output scaling
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.hdr.outcrop", value, VALUE_DISABLE);
+    if (strncmp(VALUE_ENABLE, value, sizeof(VALUE_ENABLE))) {
+      m_bHDROutputCropEnabled = false;
+    } else {
+      m_bHDROutputCropEnabled = true;
+    }
+
+    //Set Face Detection
+    set(KEY_QC_SUPPORTED_FACE_DETECTION, onOffValues);
+    set(KEY_QC_FACE_DETECTION, VALUE_OFF);
+
+    //Set Face Recognition
+    //set(KEY_QC_SUPPORTED_FACE_RECOGNITION, onOffValues);
+    //set(KEY_QC_FACE_RECOGNITION, VALUE_OFF);
+
+    //Set ZSL
+    set(KEY_QC_SUPPORTED_ZSL_MODES, onOffValues);
+#ifdef DEFAULT_ZSL_MODE_ON
+    set(KEY_QC_ZSL, VALUE_ON);
+    m_bZslMode = true;
+#else
+    set(KEY_QC_ZSL, VALUE_OFF);
+    m_bZslMode = false;
+#endif
+    m_bZslMode_new = m_bZslMode;
+
+    //Set video HDR
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_VIDEO_HDR) > 0) {
+        set(KEY_QC_SUPPORTED_VIDEO_HDR_MODES, onOffValues);
+        set(KEY_QC_VIDEO_HDR, VALUE_OFF);
+    }
+
+    //Set HW Sensor Snapshot HDR
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SENSOR_HDR)> 0) {
+        set(KEY_QC_SUPPORTED_SENSOR_HDR_MODES, onOffValues);
+        set(KEY_QC_SENSOR_HDR, VALUE_OFF);
+        m_bSensorHDREnabled = false;
+    }
+
+    // Set VT TimeStamp
+    set(KEY_QC_VT_ENABLE, VALUE_DISABLE);
+    //Set Touch AF/AEC
+    String8 touchValues = createValuesStringFromMap(
+            TOUCH_AF_AEC_MODES_MAP, PARAM_MAP_SIZE(TOUCH_AF_AEC_MODES_MAP));
+
+    set(KEY_QC_SUPPORTED_TOUCH_AF_AEC, touchValues);
+    set(KEY_QC_TOUCH_AF_AEC, TOUCH_AF_AEC_OFF);
+
+    //set flip mode
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) > 0) {
+        String8 flipModes = createValuesStringFromMap(
+                FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP));
+        set(KEY_QC_SUPPORTED_FLIP_MODES, flipModes);
+        set(KEY_QC_PREVIEW_FLIP, FLIP_MODE_OFF);
+        set(KEY_QC_VIDEO_FLIP, FLIP_MODE_OFF);
+        set(KEY_QC_SNAPSHOT_PICTURE_FLIP, FLIP_MODE_OFF);
+    }
+
+    // Set default Auto Exposure lock value
+    setAecLock(VALUE_FALSE);
+
+    // Set default AWB_LOCK lock value
+    setAwbLock(VALUE_FALSE);
+
+    // Set default Camera mode
+    set(KEY_QC_CAMERA_MODE, 0);
+
+    // Add support for internal preview restart
+    set(KEY_INTERNAL_PERVIEW_RESTART, VALUE_TRUE);
+
+    // Set default longshot mode
+    set(KEY_QC_LONG_SHOT, "off");
+
+    //Get RAM size and disable features which are memory rich
+    struct sysinfo info;
+    sysinfo(&info);
+
+    CDBG_HIGH("%s: totalram = %ld, freeram = %ld ", __func__, info.totalram,
+        info.freeram);
+    if (info.totalram > TOTAL_RAM_SIZE_512MB) {
+        set(KEY_QC_LONGSHOT_SUPPORTED, VALUE_TRUE);
+        set(KEY_QC_ZSL_HDR_SUPPORTED, VALUE_TRUE);
+    } else {
+        set(KEY_QC_LONGSHOT_SUPPORTED, VALUE_FALSE);
+        set(KEY_QC_ZSL_HDR_SUPPORTED, VALUE_FALSE);
+    }
+
+    int32_t rc = commitParameters();
+    if (rc == NO_ERROR) {
+        rc = setNumOfSnapshot();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize parameter obj
+ *
+ * PARAMETERS :
+ *   @capabilities  : ptr to camera capabilities
+ *   @mmops         : ptr to memory ops table for mapping/unmapping
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::init(cam_capability_t *capabilities,
+                                mm_camera_vtbl_t *mmOps,
+                                QCameraAdjustFPS *adjustFPS,
+                                QCameraTorchInterface *torch)
+{
+    int32_t rc = NO_ERROR;
+
+    m_pCapability = capabilities;
+    m_pCamOpsTbl = mmOps;
+    m_AdjustFPS = adjustFPS;
+    m_pTorch = torch;
+
+    //Allocate Set Param Buffer
+    m_pParamHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    rc = m_pParamHeap->allocate(1, ONE_MB_OF_PARAMS);
+    if(rc != OK) {
+        rc = NO_MEMORY;
+        ALOGE("Failed to allocate SETPARM Heap memory");
+        goto TRANS_INIT_ERROR1;
+    }
+
+    //Map memory for parameters buffer
+    rc = m_pCamOpsTbl->ops->map_buf(m_pCamOpsTbl->camera_handle,
+                             CAM_MAPPING_BUF_TYPE_PARM_BUF,
+                             m_pParamHeap->getFd(0),
+                             ONE_MB_OF_PARAMS);
+    if(rc < 0) {
+        ALOGE("%s:failed to map SETPARM buffer",__func__);
+        rc = FAILED_TRANSACTION;
+        goto TRANS_INIT_ERROR2;
+    }
+    m_pParamBuf = (parm_buffer_new_t*) DATA_PTR(m_pParamHeap,0);
+
+    initDefaultParameters();
+
+    m_bInited = true;
+
+    goto TRANS_INIT_DONE;
+
+TRANS_INIT_ERROR2:
+    m_pParamHeap->deallocate();
+
+TRANS_INIT_ERROR1:
+    delete m_pParamHeap;
+    m_pParamHeap = NULL;
+
+TRANS_INIT_DONE:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: deinitialize
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::deinit()
+{
+    if (!m_bInited) {
+        return;
+    }
+
+    //clear all entries in the map
+    String8 emptyStr;
+    QCameraParameters::unflatten(emptyStr);
+
+    if (NULL != m_pCamOpsTbl) {
+        m_pCamOpsTbl->ops->unmap_buf(
+                             m_pCamOpsTbl->camera_handle,
+                             CAM_MAPPING_BUF_TYPE_PARM_BUF);
+        m_pCamOpsTbl = NULL;
+    }
+    m_pCapability = NULL;
+    if (NULL != m_pParamHeap) {
+        m_pParamHeap->deallocate();
+        delete m_pParamHeap;
+        m_pParamHeap = NULL;
+        m_pParamBuf = NULL;
+    }
+
+    m_AdjustFPS = NULL;
+
+    m_tempMap.clear();
+
+    m_bInited = false;
+}
+
+/*===========================================================================
+ * FUNCTION   : parse_pair
+ *
+ * DESCRIPTION: helper function to parse string like "640x480" or "10000,20000"
+ *
+ * PARAMETERS :
+ *   @str     : input string to be parse
+ *   @first   : [output] first value of the pair
+ *   @second  : [output]  second value of the pair
+ *   @delim   : [input] delimeter to seperate the pair
+ *   @endptr  : [output] ptr to the end of the pair string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parse_pair(const char *str,
+                                      int *first,
+                                      int *second,
+                                      char delim,
+                                      char **endptr = NULL)
+{
+    // Find the first integer.
+    char *end;
+    int w = (int)strtol(str, &end, 10);
+    // If a delimeter does not immediately follow, give up.
+    if (*end != delim) {
+        ALOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+        return BAD_VALUE;
+    }
+
+    // Find the second integer, immediately after the delimeter.
+    int h = (int)strtol(end+1, &end, 10);
+
+    *first = w;
+    *second = h;
+
+    if (endptr) {
+        *endptr = end;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseSizesList
+ *
+ * DESCRIPTION: helper function to parse string containing sizes
+ *
+ * PARAMETERS :
+ *   @sizesStr: [input] input string to be parse
+ *   @sizes   : [output] reference to store parsed sizes
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::parseSizesList(const char *sizesStr, Vector<Size> &sizes)
+{
+    if (sizesStr == 0) {
+        return;
+    }
+
+    char *sizeStartPtr = (char *)sizesStr;
+
+    while (true) {
+        int width, height;
+        int success = parse_pair(sizeStartPtr, &width, &height, 'x',
+                                 &sizeStartPtr);
+        if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) {
+            ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+            return;
+        }
+        sizes.push(Size(width, height));
+
+        if (*sizeStartPtr == '\0') {
+            return;
+        }
+        sizeStartPtr++;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getSupportedHfrSizes
+ *
+ * DESCRIPTION: return supported HFR sizes
+ *
+ * PARAMETERS :
+ *   @sizes  : [output] reference to a vector storing supported HFR sizes
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getSupportedHfrSizes(Vector<Size> &sizes)
+{
+    const char *hfrSizesStr = get(KEY_QC_SUPPORTED_HFR_SIZES);
+    parseSizesList(hfrSizesStr, sizes);
+}
+
+/*===========================================================================
+ * FUNCTION   : adjustPreviewFpsRanges
+ *
+ * DESCRIPTION: adjust preview FPS ranges
+ *              according to external events
+ *
+ * PARAMETERS :
+ *   @minFPS  : min FPS value
+ *   @maxFPS  : max FPS value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::adjustPreviewFpsRange(cam_fps_range_t *fpsRange)
+{
+    if ( fpsRange == NULL ) {
+        return BAD_VALUE;
+    }
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    int32_t rc = initBatchUpdate(m_pParamBuf);
+    if ( rc != NO_ERROR ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return rc;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_FPS_RANGE,
+                                  sizeof(cam_fps_range_t),
+                                  fpsRange);
+    if ( rc != NO_ERROR ) {
+        ALOGE("%s: Parameters batch failed",__func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if ( rc != NO_ERROR ) {
+        ALOGE("%s:Failed to commit batch parameters", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFpsRanges
+ *
+ * DESCRIPTION: set preview FPS ranges
+ *
+ * PARAMETERS :
+ *   @minFPS  : min FPS value
+ *   @maxFPS  : max FPS value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(int min_fps,
+        int max_fps, int vid_min_fps,int vid_max_fps)
+{
+    char str[32];
+    char value[PROPERTY_VALUE_MAX];
+    int fixedFpsValue;
+    /*This property get value should be the fps that user needs*/
+    property_get("persist.debug.set.fixedfps", value, "0");
+    fixedFpsValue = atoi(value);
+
+    CDBG("%s: E minFps = %d, maxFps = %d , vid minFps = %d, vid maxFps = %d",
+                __func__, min_fps, max_fps, vid_min_fps, vid_max_fps);
+
+    if(fixedFpsValue != 0) {
+      min_fps = (int)fixedFpsValue*1000;
+      max_fps = (int)fixedFpsValue*1000;
+    }
+    snprintf(str, sizeof(str), "%d,%d", min_fps, max_fps);
+    CDBG("%s: Setting preview fps range %s", __func__, str);
+    updateParamEntry(KEY_PREVIEW_FPS_RANGE, str);
+    cam_fps_range_t fps_range;
+    memset(&fps_range, 0x00, sizeof(cam_fps_range_t));
+    fps_range.min_fps = (float)min_fps / 1000.0f;
+    fps_range.max_fps = (float)max_fps / 1000.0f;
+    fps_range.video_min_fps = (float)vid_min_fps / 1000.0f;
+    fps_range.video_max_fps = (float)vid_max_fps / 1000.0f;
+
+    CDBG_HIGH("%s: Updated: minFps = %d, maxFps = %d ,"
+            " vid minFps = %d, vid maxFps = %d",
+            __func__, min_fps, max_fps, vid_min_fps, vid_max_fps);
+
+    if ( NULL != m_AdjustFPS ) {
+        m_AdjustFPS->recalcFPSRange(min_fps, max_fps, fps_range);
+        CDBG_HIGH("%s: Thermal adjusted Preview fps range %3.2f,%3.2f, %3.2f, %3.2f",
+              __func__, fps_range.min_fps, fps_range.max_fps,
+              fps_range.video_min_fps, fps_range.video_max_fps);
+    }
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_FPS_RANGE,
+                                  sizeof(cam_fps_range_t),
+                                  &fps_range);
+}
+
+
+
+/*===========================================================================
+ * FUNCTION   : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure
+ *
+ * PARAMETERS :
+ *   @autoExp : auto exposure value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const char *autoExp)
+{
+    if (autoExp != NULL) {
+        int32_t value = lookupAttr(AUTO_EXPOSURE_MAP, PARAM_MAP_SIZE(AUTO_EXPOSURE_MAP), autoExp);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting auto exposure %s", __func__, autoExp);
+            updateParamEntry(KEY_QC_AUTO_EXPOSURE, autoExp);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_AEC_ALGO_TYPE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid auto exposure value: %s", (autoExp == NULL) ? "NULL" : autoExp);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEffect
+ *
+ * DESCRIPTION: set effect
+ *
+ * PARAMETERS :
+ *   @effect  : effect value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const char *effect)
+{
+    if (effect != NULL) {
+        int32_t value = lookupAttr(EFFECT_MODES_MAP, PARAM_MAP_SIZE(EFFECT_MODES_MAP), effect);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting effect %s", __func__, effect);
+            updateParamEntry(KEY_EFFECT, effect);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_EFFECT,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid effect value: %s", (effect == NULL) ? "NULL" : effect);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBrightness
+ *
+ * DESCRIPTION: set brightness control value
+ *
+ * PARAMETERS :
+ *   @brightness  : brightness control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(int brightness)
+{
+    char val[16];
+    sprintf(val, "%d", brightness);
+    updateParamEntry(KEY_QC_BRIGHTNESS, val);
+
+    int32_t value = brightness;
+    CDBG("%s: Setting brightness %s", __func__, val);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_BRIGHTNESS,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusMode
+ *
+ * DESCRIPTION: set focus mode
+ *
+ * PARAMETERS :
+ *   @focusMode  : focus mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const char *focusMode)
+{
+    int32_t rc;
+    if (focusMode != NULL) {
+        int32_t value = lookupAttr(FOCUS_MODES_MAP, PARAM_MAP_SIZE(FOCUS_MODES_MAP), focusMode);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting focus mode %s", __func__, focusMode);
+            mFocusMode = (cam_focus_mode_type)value;
+
+            updateParamEntry(KEY_FOCUS_MODE, focusMode);
+            rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_FOCUS_MODE,
+                                          sizeof(value),
+                                          &value);
+            if (strcmp(focusMode,"infinity")==0){
+                set(QCameraParameters::KEY_FOCUS_DISTANCES, "Infinity,Infinity,Infinity");
+            }
+            return rc;
+        }
+    }
+    ALOGE("Invalid focus mode value: %s", (focusMode == NULL) ? "NULL" : focusMode);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusPosition
+ *
+ * DESCRIPTION: set focus position
+ *
+ * PARAMETERS :
+ *   @typeStr : focus position type, index or dac_code
+ *   @posStr : focus positon.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setFocusPosition(const char *typeStr, const char *posStr)
+{
+    CDBG_HIGH("%s, type:%s, pos: %s", __func__, typeStr, posStr);
+    int32_t type = atoi(typeStr);
+    int32_t pos  = atoi(posStr);
+
+    if ((type >= CAM_MANUAL_FOCUS_MODE_INDEX) &&
+        (type < CAM_MANUAL_FOCUS_MODE_MAX)) {
+
+        // get max and min focus position from m_pCapability
+        int32_t minFocusPos = m_pCapability->min_focus_pos[type];
+        int32_t maxFocusPos = m_pCapability->max_focus_pos[type];
+        CDBG_HIGH("%s, focusPos min: %d, max: %d", __func__, minFocusPos, maxFocusPos);
+
+        if (pos >= minFocusPos && pos <= maxFocusPos) {
+            m_curFocusPos = pos;
+            updateParamEntry(KEY_QC_MANUAL_FOCUS_POS_TYPE, typeStr);
+            updateParamEntry(KEY_QC_MANUAL_FOCUS_POSITION, posStr);
+
+            cam_manual_focus_parm_t manual_focus;
+            manual_focus.flag = (cam_manual_focus_mode_type)type;
+            manual_focus.af_manual_lens_position = pos;
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_MANUAL_FOCUS_POS,
+                                          sizeof(manual_focus),
+                                          &manual_focus);
+        }
+    }
+
+    ALOGE("%s, invalid params, type:%d, pos: %d", __func__, type, pos);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateCurrentFocusPosition
+ *
+ * DESCRIPTION: update current focus position from metadata callback
+ *
+ * PARAMETERS :
+ *   @pos : current focus position
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::updateCurrentFocusPosition(int32_t pos)
+{
+    if (pos != m_curFocusPos) {
+        ALOGE("update focus position. old:%d, now:%d", m_curFocusPos, pos);
+        m_curFocusPos = pos;
+        set(KEY_QC_MANUAL_FOCUS_POSITION, pos);
+    }
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value
+ *
+ * PARAMETERS :
+ *   @sharpness  : sharpness control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(int sharpness)
+{
+    char val[16];
+    sprintf(val, "%d", sharpness);
+    updateParamEntry(KEY_QC_SHARPNESS, val);
+    CDBG("%s: Setting sharpness %s", __func__, val);
+
+    int32_t value = sharpness;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SHARPNESS,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSkinToneEnhancement
+ *
+ * DESCRIPTION: set skin tone enhancement value
+ *
+ * PARAMETERS :
+ *   @sceFactore  : skin tone enhancement factor value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(int sceFactor)
+{
+    char val[16];
+    sprintf(val, "%d", sceFactor);
+    updateParamEntry(KEY_QC_SCE_FACTOR, val);
+    CDBG("%s: Setting skintone enhancement %s", __func__, val);
+
+    int32_t value = sceFactor;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SCE_FACTOR,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSaturation
+ *
+ * DESCRIPTION: set saturation control value
+ *
+ * PARAMETERS :
+ *   @saturation : saturation control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(int saturation)
+{
+    char val[16];
+    sprintf(val, "%d", saturation);
+    updateParamEntry(KEY_QC_SATURATION, val);
+    CDBG("%s: Setting saturation %s", __func__, val);
+
+    int32_t value = saturation;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SATURATION,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setContrast
+ *
+ * DESCRIPTION: set contrast control value
+ *
+ * PARAMETERS :
+ *   @contrast : contrast control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(int contrast)
+{
+    char val[16];
+    sprintf(val, "%d", contrast);
+    updateParamEntry(KEY_QC_CONTRAST, val);
+    CDBG("%s: Setting contrast %s", __func__, val);
+
+    int32_t value = contrast;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_CONTRAST,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value
+ *
+ * PARAMETERS :
+ *   @sceneDetect  : scene detect value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const char *sceneDetect)
+{
+    if (sceneDetect != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+                sceneDetect);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting Scene Detect %s", __func__, sceneDetect);
+            updateParamEntry(KEY_QC_SCENE_DETECT, sceneDetect);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ASD_ENABLE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid Scene Detect value: %s",
+          (sceneDetect == NULL) ? "NULL" : sceneDetect);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSensorSnapshotHDR
+ *
+ * DESCRIPTION: set snapshot HDR value
+ *
+ * PARAMETERS :
+ *   @snapshotHDR  : snapshot HDR value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSensorSnapshotHDR(const char *snapshotHDR)
+{
+    if (snapshotHDR != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+                snapshotHDR);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting Sensor Snapshot HDR %s", __func__, snapshotHDR);
+            updateParamEntry(KEY_QC_SENSOR_HDR, snapshotHDR);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_SENSOR_HDR,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    CDBG_HIGH("Invalid Snapshot HDR value: %s",
+          (snapshotHDR == NULL) ? "NULL" : snapshotHDR);
+    return BAD_VALUE;
+
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value
+ *
+ * PARAMETERS :
+ *   @videoHDR  : svideo HDR value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const char *videoHDR)
+{
+    if (videoHDR != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), videoHDR);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting Video HDR %s", __func__, videoHDR);
+            updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_VIDEO_HDR,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid Video HDR value: %s",
+          (videoHDR == NULL) ? "NULL" : videoHDR);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVtEnable
+ *
+ * DESCRIPTION: set vt Enable value
+ *
+ * PARAMETERS :
+ *   @videoHDR  : svtEnable value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVtEnable(const char *vtEnable)
+{
+    if (vtEnable != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), vtEnable);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting Vt Enable %s", __func__, vtEnable);
+            m_bAVTimerEnabled = true;
+            updateParamEntry(KEY_QC_VT_ENABLE, vtEnable);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_VT,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid Vt Enable value: %s",
+          (vtEnable == NULL) ? "NULL" : vtEnable);
+    m_bAVTimerEnabled = false;
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition value
+ *
+ * PARAMETERS :
+ *   @faceRecog  : face recognition value string
+ *   @maxFaces   : number of max faces to be detected/recognized
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const char *faceRecog,
+        uint32_t maxFaces)
+{
+    if (faceRecog != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), faceRecog);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting face recognition %s", __func__, faceRecog);
+            updateParamEntry(KEY_QC_FACE_RECOGNITION, faceRecog);
+
+            uint32_t faceProcMask = m_nFaceProcMask;
+            if (value > 0) {
+                faceProcMask |= CAM_FACE_PROCESS_MASK_RECOGNITION;
+            } else {
+                faceProcMask &= (uint32_t)(~CAM_FACE_PROCESS_MASK_RECOGNITION);
+            }
+
+            if(m_nFaceProcMask == faceProcMask) {
+                CDBG_HIGH("%s: face process mask not changed, no ops here", __func__);
+                return NO_ERROR;
+            }
+            m_nFaceProcMask = faceProcMask;
+            CDBG_HIGH("%s: FaceProcMask -> %d", __func__, m_nFaceProcMask);
+
+            // set parm for face process
+            cam_fd_set_parm_t fd_set_parm;
+            memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+            fd_set_parm.fd_mode = m_nFaceProcMask;
+            fd_set_parm.num_fd = maxFaces;
+
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                        CAM_INTF_PARM_FD,
+                                        sizeof(fd_set_parm),
+                                        &fd_set_parm);
+        }
+    }
+    ALOGE("Invalid face recognition value: %s", (faceRecog == NULL) ? "NULL" : faceRecog);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZoom
+ *
+ * DESCRIPTION: set zoom level
+ *
+ * PARAMETERS :
+ *   @zoom_level : zoom level
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(int zoom_level)
+{
+    char val[16];
+    sprintf(val, "%d", zoom_level);
+    updateParamEntry(KEY_ZOOM, val);
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_ZOOM,
+                                  sizeof(zoom_level),
+                                  &zoom_level);
+}
+
+/*===========================================================================
+ * FUNCTION   : setISOValue
+ *
+ * DESCRIPTION: set ISO value
+ *
+ * PARAMETERS :
+ *   @isoValue : ISO value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setISOValue(const char *isoValue)
+{
+    char iso[PROPERTY_VALUE_MAX];
+    int32_t continous_iso = 0;
+    // Check if continuous ISO is set
+    property_get("persist.camera.continuous.iso", iso, "0");
+    continous_iso = atoi(iso);
+
+    if(continous_iso != 0) {
+        CDBG("%s: Setting continuous ISO value %d", __func__, continous_iso);
+        return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ISO,
+                                          sizeof(continous_iso),
+                                          &continous_iso);
+    } else if (isoValue != NULL) {
+        int32_t value = lookupAttr(ISO_MODES_MAP, PARAM_MAP_SIZE(ISO_MODES_MAP), isoValue);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting ISO value %s", __func__, isoValue);
+            updateParamEntry(KEY_QC_ISO_MODE, isoValue);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ISO,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid ISO value: %s",
+          (isoValue == NULL) ? "NULL" : isoValue);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureTime
+ *
+ * DESCRIPTION: set exposure time
+ *
+ * PARAMETERS :
+ *   @expTimeStr : string of exposure time, range (1/5, 2000) in ms
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setExposureTime(const char *expTimeStr)
+{
+    if (expTimeStr != NULL) {
+        int32_t expTimeUs = atoi(expTimeStr);
+        int32_t min_exp_time = m_pCapability->min_exposure_time; /* 200 */
+        int32_t max_exp_time = m_pCapability->max_exposure_time; /* 2000000 */
+
+        // expTime == 0 means not to use manual exposure time.
+        if (expTimeUs == 0 ||
+            (expTimeUs >= min_exp_time && expTimeUs <= max_exp_time)) {
+            CDBG_HIGH("%s, exposure time: %d", __func__, expTimeUs);
+            updateParamEntry(KEY_QC_EXPOSURE_TIME, expTimeStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_EXPOSURE_TIME,
+                                          sizeof(expTimeUs),
+                                          &expTimeUs);
+        }
+    }
+
+    ALOGE("Invalid exposure time, value: %s",
+          (expTimeStr == NULL) ? "NULL" : expTimeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFlashValue
+ *
+ * DESCRIPTION: get fash mode
+ *
+ * PARAMETERS :
+ *   @flashStr : none
+ *
+ * RETURN     : int32_t type of status
+ *
+ *==========================================================================*/
+int32_t QCameraParameters::getFlashValue()
+{
+    const char *flash_str = get(QCameraParameters::KEY_FLASH_MODE);
+    int32_t flash_index = lookupAttr(FLASH_MODES_MAP, PARAM_MAP_SIZE(FLASH_MODES_MAP),
+            flash_str);
+
+  return flash_index;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getSupportedFlashModes
+ *
+ * DESCRIPTION: get supported fash modes
+ *
+ * PARAMETERS :
+ *   @flash_modes : none
+ *
+ * RETURN     : int32_t type of status
+ *
+ *==========================================================================*/
+int32_t QCameraParameters::getSupportedFlashModes()
+{
+  const char *flash_modes = get(QCameraParameters::KEY_SUPPORTED_FLASH_MODES);
+  int flash_presence = -1;
+
+  if (flash_modes != NULL)
+    flash_presence = FLASH_FUNC_PRESENT;
+  else
+    flash_presence = NO_FLASH_FUNC;
+
+  return flash_presence;
+}
+
+/*===========================================================================
+ * FUNCTION   : getRedEyeValue
+ *
+ * DESCRIPTION: get redeye mode
+ *
+ * PARAMETERS :
+ *   @redeyeStr : none
+ *
+ * RETURN     : int32_t type of status
+ *
+ *==========================================================================*/
+int32_t QCameraParameters::getRedEyeValue()
+{
+  const char *redEye_str = get(QCameraParameters::KEY_QC_REDEYE_REDUCTION);
+  int32_t redEye = lookupAttr(ENABLE_DISABLE_MODES_MAP, PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP),
+          redEye_str);
+
+  return redEye;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLongshotEnable
+ *
+ * DESCRIPTION: set a flag indicating longshot mode
+ *
+ * PARAMETERS :
+ *   @enable  : true - Longshot enabled
+ *              false - Longshot disabled
+ *==========================================================================*/
+int32_t QCameraParameters::setLongshotEnable(bool enable)
+{
+    int32_t rc = NO_ERROR;
+    int8_t value = enable;
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+          CAM_INTF_PARM_LONGSHOT_ENABLE,
+          sizeof(value),
+          &value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to parameter changes", __func__);
+        return rc;
+    }
+
+    if (enable == true) m_bLongshotEnabled = enable;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlash
+ *
+ * DESCRIPTION: set flash mode
+ *
+ * PARAMETERS :
+ *   @flashStr : LED flash mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const char *flashStr)
+{
+    if (flashStr != NULL) {
+        int32_t value = lookupAttr(FLASH_MODES_MAP, PARAM_MAP_SIZE(FLASH_MODES_MAP), flashStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting Flash value %s", __func__, flashStr);
+
+            if ( NULL != m_pTorch ) {
+                if ( value == CAM_FLASH_MODE_TORCH && !m_bRecordingHint_new) {
+                    m_pTorch->prepareTorchCamera();
+                } else {
+                    m_bReleaseTorchCamera = true;
+                }
+            }
+
+            updateParamEntry(KEY_FLASH_MODE, flashStr);
+            mFlashValue = value;
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid flash value: %s", (flashStr == NULL) ? "NULL" : flashStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value
+ *
+ * PARAMETERS :
+ *   @aecLockStr : AEC lock value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const char *aecLockStr)
+{
+    if (aecLockStr != NULL) {
+        int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                aecLockStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting AECLock value %s", __func__, aecLockStr);
+            updateParamEntry(KEY_AUTO_EXPOSURE_LOCK, aecLockStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_AEC_LOCK,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid AECLock value: %s",
+        (aecLockStr == NULL) ? "NULL" : aecLockStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock value
+ *
+ * PARAMETERS :
+ *   @awbLockStr : AWB lock value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const char *awbLockStr)
+{
+    if (awbLockStr != NULL) {
+        int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                awbLockStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting AWBLock value %s", __func__, awbLockStr);
+            updateParamEntry(KEY_AUTO_WHITEBALANCE_LOCK, awbLockStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_AWB_LOCK,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid AWBLock value: %s", (awbLockStr == NULL) ? "NULL" : awbLockStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value
+ *
+ * PARAMETERS :
+ *   @mceStr : MCE value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const char *mceStr)
+{
+    if (mceStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), mceStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting AWBLock value %s", __func__, mceStr);
+            updateParamEntry(KEY_QC_MEMORY_COLOR_ENHANCEMENT, mceStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_MCE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid MCE value: %s", (mceStr == NULL) ? "NULL" : mceStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTintlessValue
+ *
+ * DESCRIPTION: enable/disable tintless from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTintlessValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_TINTLESS_ENABLE);
+    const char *prev_str = get(KEY_QC_TINTLESS_ENABLE);
+    char prop[PROPERTY_VALUE_MAX];
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.tintless", prop, VALUE_DISABLE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setTintlessValue(str);
+        }
+    } else {
+        if (prev_str == NULL ||
+            strcmp(prev_str, prop) != 0 ) {
+            setTintlessValue(prop);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTintlessValue
+ *
+ * DESCRIPTION: set tintless value
+ *
+ * PARAMETERS :
+ *   @tintStr : Tintless value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTintlessValue(const char *tintStr)
+{
+    if (tintStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), tintStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting Tintless value %s", __func__, tintStr);
+            updateParamEntry(KEY_QC_TINTLESS_ENABLE, tintStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_TINTLESS,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid Tintless value: %s", (tintStr == NULL) ? "NULL" : tintStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCDSMode
+ *
+ * DESCRIPTION: Set CDS mode
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCDSMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_CDS_MODE);
+    const char *prev_str = get(KEY_QC_CDS_MODE);
+    char *cds_mode_str = NULL;
+    int32_t rc = NO_ERROR;
+    char prop[PROPERTY_VALUE_MAX];
+
+    if (str) {
+        if (!prev_str || !strcmp(str, prev_str)) {
+            cds_mode_str = (char *)str;
+        }
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.CDS", prop, CDS_MODE_OFF);
+        cds_mode_str = prop;
+    }
+
+    if (cds_mode_str) {
+        CDBG("%s: Set CDS mode = %s", __func__, cds_mode_str);
+        int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP), cds_mode_str);
+        if (NAME_NOT_FOUND != cds_mode) {
+            rc = AddSetParmEntryToBatch(m_pParamBuf, CAM_INTF_PARM_CDS_MODE,
+                    sizeof(cds_mode), &cds_mode);
+            if (rc != NO_ERROR) {
+                ALOGE("%s:Failed CDS MODE to update table", __func__);
+            }
+        } else {
+            ALOGE("%s: Invalid argument for CDS MODE %s", __func__,  cds_mode_str);
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setDISValue
+ *
+ * DESCRIPTION: set DIS value
+ *
+ * PARAMETERS :
+ *   @disStr : DIS value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const char *disStr)
+{
+    if (disStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), disStr);
+        if (value != NAME_NOT_FOUND) {
+            //For some IS types (like EIS 2.0), when DIS value is changed, we need to restart
+            //preview because of topology change in backend. But, for now, restart preview
+            //for all IS types.
+            m_bNeedRestart = true;
+            CDBG_HIGH("%s: Setting DIS value %s", __func__, disStr);
+            updateParamEntry(KEY_QC_DIS, disStr);
+            if (!(strcmp(disStr,"enable"))) {
+                m_bDISEnabled = true;
+            } else {
+                m_bDISEnabled = false;
+            }
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_DIS_ENABLE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid DIS value: %s", (disStr == NULL) ? "NULL" : disStr);
+    m_bDISEnabled = false;
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHighFrameRate
+ *
+ * DESCRIPTION: set high frame rate
+ *
+ * PARAMETERS :
+ *   @hfrMode : HFR mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHighFrameRate(const int32_t hfrMode)
+{
+    int32_t value = hfrMode;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_HFR,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value
+ *
+ * PARAMETERS :
+ *   @lensSahdeStr : lens shade value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const char *lensShadeStr)
+{
+    if (lensShadeStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), lensShadeStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting LensShade value %s", __func__, lensShadeStr);
+            updateParamEntry(KEY_QC_LENSSHADE, lensShadeStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ROLLOFF,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid LensShade value: %s",
+          (lensShadeStr == NULL) ? "NULL" : lensShadeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value
+ *
+ * PARAMETERS :
+ *   @expComp : exposure compensation value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(int expComp)
+{
+    char val[16];
+    sprintf(val, "%d", expComp);
+    updateParamEntry(KEY_EXPOSURE_COMPENSATION, val);
+
+    // Don't need to pass step as part of setParameter because
+    // camera daemon is already aware of it.
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_EXPOSURE_COMPENSATION,
+                                  sizeof(expComp),
+                                  &expComp);
+}
+
+/*===========================================================================
+ * FUNCTION   : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance mode
+ *
+ * PARAMETERS :
+ *   @wbStr   : white balance mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const char *wbStr)
+{
+    if (wbStr != NULL) {
+        int32_t value = lookupAttr(WHITE_BALANCE_MODES_MAP,
+                PARAM_MAP_SIZE(WHITE_BALANCE_MODES_MAP), wbStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting WhiteBalance value %s", __func__, wbStr);
+            updateParamEntry(KEY_WHITE_BALANCE, wbStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_WHITE_BALANCE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid WhiteBalance value: %s", (wbStr == NULL) ? "NULL" : wbStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWBManualCCT
+ *
+ * DESCRIPTION: set setWBManualCCT time
+ *
+ * PARAMETERS :
+ *   @cctStr : string of wb cct, range (2000, 8000) in K.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setWBManualCCT(const char *cctStr)
+{
+    if (cctStr != NULL) {
+        int32_t cctVal = atoi(cctStr);
+        int32_t minCct = m_pCapability->min_wb_cct; /* 2000K */
+        int32_t maxCct = m_pCapability->max_wb_cct; /* 8000K */
+
+        if (cctVal >= minCct && cctVal <= maxCct) {
+            CDBG_HIGH("%s, cct value: %d", __func__, cctVal);
+            m_curCCT = cctVal;
+            updateParamEntry(KEY_QC_WB_MANUAL_CCT, cctStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_WHITE_BALANCE,
+                                          sizeof(cctVal),
+                                          &cctVal);
+        }
+    }
+
+    ALOGE("Invalid cct, value: %s",
+          (cctStr == NULL) ? "NULL" : cctStr);
+    return BAD_VALUE;
+}
+
+int32_t QCameraParameters::updateCCTValue(int32_t cct)
+{
+    if (cct != m_curCCT) {
+        CDBG_HIGH("update current cct value. old:%d, now:%d", m_curCCT, cct);
+        m_curCCT = cct;
+        set(KEY_QC_WB_MANUAL_CCT, cct);
+    }
+
+    return NO_ERROR;
+}
+
+int QCameraParameters::getAutoFlickerMode()
+{
+    /* Enable Advanced Auto Antibanding where we can set
+       any of the following option
+       ie. CAM_ANTIBANDING_MODE_AUTO
+           CAM_ANTIBANDING_MODE_AUTO_50HZ
+           CAM_ANTIBANDING_MODE_AUTO_60HZ
+      Currently setting it to default    */
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.set.afd", prop, "3");
+    return atoi(prop);
+}
+
+/*===========================================================================
+ * FUNCTION   : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value
+ *
+ * PARAMETERS :
+ *   @antiBandingStr : antibanding value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const char *antiBandingStr)
+{
+    if (antiBandingStr != NULL) {
+        int32_t value = lookupAttr(ANTIBANDING_MODES_MAP, PARAM_MAP_SIZE(ANTIBANDING_MODES_MAP),
+                antiBandingStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting AntiBanding value %s", __func__, antiBandingStr);
+            updateParamEntry(KEY_ANTIBANDING, antiBandingStr);
+            if(value == CAM_ANTIBANDING_MODE_AUTO) {
+               value = getAutoFlickerMode();
+            }
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ANTIBANDING,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid AntiBanding value: %s",
+          (antiBandingStr == NULL) ? "NULL" : antiBandingStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas
+ *
+ * PARAMETERS :
+ *   @focusAreasStr : focus areas value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const char *focusAreasStr)
+{
+    if (m_pCapability->max_num_focus_areas == 0 ||
+        focusAreasStr == NULL) {
+        CDBG("%s: Parameter string is null", __func__);
+        return NO_ERROR;
+    }
+
+    cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+    if (NULL == areas) {
+        ALOGE("%s: No memory for areas", __func__);
+        return NO_MEMORY;
+    }
+    memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+    int num_areas_found = 0;
+    if (parseCameraAreaString(focusAreasStr,
+                              m_pCapability->max_num_focus_areas,
+                              areas,
+                              num_areas_found) != NO_ERROR) {
+        ALOGE("%s: Failed to parse the string: %s", __func__, focusAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    if (validateCameraAreas(areas, num_areas_found) == false) {
+        ALOGE("%s: invalid areas specified : %s", __func__, focusAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    updateParamEntry(KEY_FOCUS_AREAS, focusAreasStr);
+
+    //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+    //so no action is takenby the lower layer
+    if (num_areas_found == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        num_areas_found = 0;
+    }
+
+    int previewWidth, previewHeight;
+    getPreviewSize(&previewWidth, &previewHeight);
+    cam_roi_info_t af_roi_value;
+    memset(&af_roi_value, 0, sizeof(cam_roi_info_t));
+    af_roi_value.num_roi = (uint8_t)num_areas_found;
+    for (int i = 0; i < num_areas_found; i++) {
+        CDBG_HIGH("%s: FocusArea[%d] = (%d, %d, %d, %d)",
+              __func__, i, (areas[i].rect.top), (areas[i].rect.left),
+              (areas[i].rect.width), (areas[i].rect.height));
+
+        // Transform the coords from (-1000, 1000)
+        // to (0, previewWidth or previewHeight).
+        af_roi_value.roi[i].left =
+                (int32_t)(((double)areas[i].rect.left + 1000.0) *
+                    ((double)previewWidth / 2000.0));
+        af_roi_value.roi[i].top =
+                (int32_t)(((double)areas[i].rect.top + 1000.0) *
+                    ((double)previewHeight / 2000.0));
+        af_roi_value.roi[i].width =
+                (int32_t)((double)areas[i].rect.width *
+                    (double)previewWidth / 2000.0);
+        af_roi_value.roi[i].height =
+                (int32_t)((double)areas[i].rect.height *
+                    (double)previewHeight / 2000.0);
+        af_roi_value.weight[i] = areas[i].weight;
+    }
+    free(areas);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_AF_ROI,
+                                  sizeof(af_roi_value),
+                                  &af_roi_value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas value
+ *
+ * PARAMETERS :
+ *   @meteringAreasStr : metering areas value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const char *meteringAreasStr)
+{
+    if (m_pCapability->max_num_metering_areas == 0 ||
+        meteringAreasStr == NULL) {
+        CDBG("%s: Parameter string is null", __func__);
+        return NO_ERROR;
+    }
+
+    cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+    if (NULL == areas) {
+        ALOGE("%s: No memory for areas", __func__);
+        return NO_MEMORY;
+    }
+    memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+    int num_areas_found = 0;
+    if (parseCameraAreaString(meteringAreasStr,
+                              m_pCapability->max_num_metering_areas,
+                              areas,
+                              num_areas_found) < 0) {
+        ALOGE("%s: Failed to parse the string: %s", __func__, meteringAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    if (validateCameraAreas(areas, num_areas_found) == false) {
+        ALOGE("%s: invalid areas specified : %s", __func__, meteringAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    updateParamEntry(KEY_METERING_AREAS, meteringAreasStr);
+
+    //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+    //so no action is takenby the lower layer
+    if (num_areas_found == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        num_areas_found = 0;
+    }
+    cam_set_aec_roi_t aec_roi_value;
+    int previewWidth, previewHeight;
+    getPreviewSize(&previewWidth, &previewHeight);
+
+    memset(&aec_roi_value, 0, sizeof(cam_set_aec_roi_t));
+    if (num_areas_found > 0) {
+        aec_roi_value.aec_roi_enable = CAM_AEC_ROI_ON;
+        aec_roi_value.aec_roi_type = CAM_AEC_ROI_BY_COORDINATE;
+
+        for (int i = 0; i < num_areas_found; i++) {
+            CDBG_HIGH("%s: MeteringArea[%d] = (%d, %d, %d, %d)",
+                  __func__, i, (areas[i].rect.top), (areas[i].rect.left),
+                  (areas[i].rect.width), (areas[i].rect.height));
+
+            // Transform the coords from (-1000, 1000) to
+            // (0, previewWidth or previewHeight).
+            aec_roi_value.cam_aec_roi_position.coordinate[i].x =
+                    (uint32_t)((((double)areas[i].rect.left +
+                        (double)areas[i].rect.width / 2.0) + 1000.0) *
+                            (double)previewWidth / 2000.0);
+            aec_roi_value.cam_aec_roi_position.coordinate[i].y =
+                    (uint32_t)((((double)areas[i].rect.top +
+                        (double)areas[i].rect.height / 2.0) + 1000.0) *
+                            (double)previewHeight / 2000.0);
+        }
+    } else {
+        aec_roi_value.aec_roi_enable = CAM_AEC_ROI_OFF;
+    }
+    free(areas);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_AEC_ROI,
+                                  sizeof(aec_roi_value),
+                                  &aec_roi_value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneMode
+ *
+ * DESCRIPTION: set scene mode
+ *
+ * PARAMETERS :
+ *   @sceneModeStr : scene mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const char *sceneModeStr)
+{
+    if (sceneModeStr != NULL) {
+        int32_t value = lookupAttr(SCENE_MODES_MAP, PARAM_MAP_SIZE(SCENE_MODES_MAP), sceneModeStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting SceneMode %s", __func__, sceneModeStr);
+            updateParamEntry(KEY_SCENE_MODE, sceneModeStr);
+            if (m_bSensorHDREnabled) {
+              // Incase of HW HDR mode, we do not update the same as Best shot mode.
+              CDBG("%s: H/W HDR mode enabled. Do not set Best Shot Mode", __func__);
+              return NO_ERROR;
+            }
+            int32_t rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                                CAM_INTF_PARM_BESTSHOT_MODE,
+                                                sizeof(value),
+                                                &value);
+            return rc;
+        }
+    }
+    ALOGE("%s: Invalid Secene Mode: %s",
+          __func__, (sceneModeStr == NULL) ? "NULL" : sceneModeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone AF algorithm
+ *
+ * PARAMETERS :
+ *   @selZoneAFStr : selectable zone AF algorithm value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const char *selZoneAFStr)
+{
+    if (selZoneAFStr != NULL) {
+        int32_t value = lookupAttr(FOCUS_ALGO_MAP, PARAM_MAP_SIZE(FOCUS_ALGO_MAP), selZoneAFStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting Selectable Zone AF value %s", __func__, selZoneAFStr);
+            updateParamEntry(KEY_QC_SELECTABLE_ZONE_AF, selZoneAFStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_FOCUS_ALGO_TYPE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("%s: Invalid selectable zone af value: %s",
+          __func__, (selZoneAFStr == NULL) ? "NULL" : selZoneAFStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : isAEBracketEnabled
+ *
+ * DESCRIPTION: checks if AE bracketing is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : TRUE/FALSE
+ *==========================================================================*/
+bool QCameraParameters::isAEBracketEnabled()
+{
+    const char *str = get(KEY_QC_AE_BRACKET_HDR);
+    if (str != NULL) {
+        if (strcmp(str, AE_BRACKET_OFF) != 0) {
+            return true;
+        }
+    }
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket value
+ *
+ * PARAMETERS :
+ *   @aecBracketStr : AE bracket value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const char *aecBracketStr)
+{
+    if (aecBracketStr == NULL) {
+        CDBG("%s: setAEBracket with NULL value", __func__);
+        return NO_ERROR;
+    }
+
+    cam_exp_bracketing_t expBracket;
+    memset(&expBracket, 0, sizeof(expBracket));
+
+    int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+            aecBracketStr);
+    switch (value) {
+    case CAM_EXP_BRACKETING_ON:
+        {
+            CDBG_HIGH("%s, EXP_BRACKETING_ON", __func__);
+            const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+            if ((str_val != NULL) && (strlen(str_val)>0)) {
+                expBracket.mode = CAM_EXP_BRACKETING_ON;
+                m_bAeBracketingEnabled = true;
+                strlcpy(expBracket.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+                CDBG("%s: setting Exposure Bracketing value of %s",
+                      __func__, expBracket.values);
+            }
+            else {
+                /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+                CDBG("%s: capture-burst-exposures not set, back to HDR OFF mode", __func__);
+                m_bAeBracketingEnabled = false;
+                expBracket.mode = CAM_EXP_BRACKETING_OFF;
+            }
+        }
+        break;
+    default:
+        {
+            CDBG_HIGH("%s, EXP_BRACKETING_OFF", __func__);
+            m_bAeBracketingEnabled = false;
+            expBracket.mode = CAM_EXP_BRACKETING_OFF;
+        }
+        break;
+    }
+
+    // Cache client AE bracketing configuration
+    memcpy(&m_AEBracketingClient, &expBracket, sizeof(cam_exp_bracketing_t));
+
+    /* save the value*/
+    updateParamEntry(KEY_QC_AE_BRACKET_HDR, aecBracketStr);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : set3ALock
+ *
+ * DESCRIPTION: enable/disable 3A lock.
+ *
+ * PARAMETERS :
+ *   @lockStr : lock value string.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::set3ALock(const char *lockStr)
+{
+    int32_t rc = NO_ERROR;
+    if (lockStr != NULL) {
+        int value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                lockStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG_HIGH("%s: Setting Lock lockStr =%s", __func__, lockStr);
+            if(initBatchUpdate(m_pParamBuf) < 0 ) {
+                ALOGE("%s:Failed to initialize group update table", __func__);
+                return BAD_TYPE;
+            }
+            int32_t focus_mode;
+            if (value == 1) {
+                if (isUbiFocusEnabled() || isMultiTouchFocusEnabled()) {
+                    //For Ubi focus and Multi-touch Focus move focus to infinity.
+                    focus_mode = CAM_FOCUS_MODE_INFINITY;
+                } else if (isOptiZoomEnabled() || isfssrEnabled()){
+                    //For optizoom set focus as fixed.
+                    focus_mode = CAM_FOCUS_MODE_FIXED;
+                }
+            } else {
+                // retrieve previous focus value.
+                const char *focus = get(KEY_FOCUS_MODE);
+                int val = lookupAttr(FOCUS_MODES_MAP, PARAM_MAP_SIZE(FOCUS_MODES_MAP), focus);
+                if (val != NAME_NOT_FOUND) {
+                    focus_mode = (int32_t) val;
+                    CDBG("%s: focus mode %s", __func__, focus);
+                }
+            }
+            //Lock AWB
+            rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                    CAM_INTF_PARM_AWB_LOCK,
+                                    sizeof(value),
+                                    &value);
+            if (NO_ERROR != rc) {
+                return rc;
+            }
+            //Lock AEC
+            rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                    CAM_INTF_PARM_AEC_LOCK,
+                                    sizeof(value),
+                                    &value);
+            if (NO_ERROR != rc) {
+                return rc;
+            }
+            rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                    CAM_INTF_PARM_FOCUS_MODE,
+                                    sizeof(focus_mode),
+                                    &focus_mode);
+            if (NO_ERROR != rc) {
+                return rc;
+            }
+
+            rc = commitSetBatch();
+            if (rc != NO_ERROR) {
+                ALOGE("%s:Failed to commit batch", __func__);
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAndCommitZoom
+ *
+ * DESCRIPTION: set zoom.
+ *
+ * PARAMETERS :
+ *     @zoom_level : zoom level to set.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAndCommitZoom(int zoom_level)
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+        if(initBatchUpdate(m_pParamBuf) < 0 ) {
+            ALOGE("%s:Failed to initialize group update table", __func__);
+            return BAD_TYPE;
+        }
+        rc = AddSetParmEntryToBatch(m_pParamBuf,
+                              CAM_INTF_PARM_ZOOM,
+                              sizeof(zoom_level),
+                              &zoom_level);
+
+        if (rc != NO_ERROR) {
+             ALOGE("%s:Failed to update table", __func__);
+             return rc;
+        }
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set Flash value", __func__);
+    }
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isOptiZoomEnabled
+ *
+ * DESCRIPTION: checks whether optizoom is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - enabled, false - disabled
+ *
+ *==========================================================================*/
+bool QCameraParameters::isOptiZoomEnabled()
+{
+    if (m_bOptiZoomOn) {
+        uint8_t zoom_level = (uint8_t) getInt(CameraParameters::KEY_ZOOM);
+        cam_opti_zoom_t *opti_zoom_settings_need =
+                &(m_pCapability->opti_zoom_settings_need);
+        uint8_t zoom_threshold = opti_zoom_settings_need->zoom_threshold;
+        CDBG_HIGH("%s: current zoom level =%d & zoom_threshold =%d",
+                __func__, zoom_level, zoom_threshold);
+
+        if (zoom_level >= zoom_threshold) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : isDifferentFlipZSL
+ *
+ * DESCRIPTION: checks whether live preview and snapshot flip differ
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - different, false - equal
+ *
+ *==========================================================================*/
+bool QCameraParameters::isDifferentFlipZSL()
+{
+    return (getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) !=
+        getFlipMode(CAM_STREAM_TYPE_PREVIEW))
+            && m_bZslMode;
+}
+
+/*===========================================================================
+ * FUNCTION   : isMultiTouchFocusEnabled
+ *
+ * DESCRIPTION: checks whether Multi-touch Focus is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - enabled, false - disabled
+ *
+ *==========================================================================*/
+bool QCameraParameters::isMultiTouchFocusEnabled()
+{
+    if (m_bMultiTouchFocusOn &&
+            (m_MTFBracketInfo.burst_count > 1 || m_currNumBufMTF > 1)) {
+       return true;
+    }
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitAFBracket
+ *
+ * DESCRIPTION: commit AF Bracket.
+ *
+ * PARAMETERS :
+ *   @AFBracket : AF bracketing configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitAFBracket(cam_af_bracketing_t afBracket)
+{
+
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+            CAM_INTF_PARM_FOCUS_BRACKETING,
+            sizeof(afBracket),
+            &afBracket);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to commit batch", __func__);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitMTFBracket
+ *
+ * DESCRIPTION: commit multi-touch focus Bracket.
+ *
+ * PARAMETERS :
+ *   @mtfBracket : AF bracketing configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitMTFBracket(cam_af_bracketing_t mtfBracket)
+{
+    int32_t rc = NO_ERROR;
+    if (initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+            CAM_INTF_PARM_MULTI_TOUCH_FOCUS_BRACKETING,
+            sizeof(mtfBracket),
+            &mtfBracket);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to commit batch", __func__);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : resetMultiTouchFocusParam
+ *
+ * DESCRIPTION: reset MTF params to invalid state.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCameraParameters::resetMultiTouchFocusParam()
+{
+    m_MTFBracketInfo.burst_count = 0;
+    for (int i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+        m_MTFBracketInfo.focus_steps[i] = -1;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : commitFlashBracket
+ *
+ * DESCRIPTION: commit Flash Bracket.
+ *
+ * PARAMETERS :
+ *   @AFBracket : Flash bracketing configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitFlashBracket(cam_flash_bracketing_t flashBracket)
+{
+    CDBG_HIGH("%s: E",__func__);
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+            CAM_INTF_PARM_FLASH_BRACKETING,
+            sizeof(flashBracket),
+            &flashBracket);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to commit batch", __func__);
+    }
+
+    CDBG_HIGH("%s: X",__func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAFBracket
+ *
+ * DESCRIPTION: set AF bracket value
+ *
+ * PARAMETERS :
+ *   @afBracketStr : AF bracket value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAFBracket(const char *afBracketStr)
+{
+    CDBG_HIGH("%s: afBracketStr =%s",__func__,afBracketStr);
+
+    if(afBracketStr != NULL) {
+        int value = lookupAttr(AF_BRACKETING_MODES_MAP, PARAM_MAP_SIZE(AF_BRACKETING_MODES_MAP),
+                afBracketStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bAFBracketingOn = (value != 0);
+            updateParamEntry(KEY_QC_AF_BRACKET, afBracketStr);
+
+            return NO_ERROR;
+        }
+    }
+
+    ALOGE("Invalid af bracket value: %s",
+        (afBracketStr == NULL) ? "NULL" : afBracketStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setChromaFlash
+ *
+ * DESCRIPTION: set chroma flash value
+ *
+ * PARAMETERS :
+ *   @aecBracketStr : chroma flash value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setChromaFlash(const char *chromaFlashStr)
+{
+    CDBG_HIGH("%s: chromaFlashStr =%s",__func__,chromaFlashStr);
+    if(chromaFlashStr != NULL) {
+        int value = lookupAttr(CHROMA_FLASH_MODES_MAP, PARAM_MAP_SIZE(CHROMA_FLASH_MODES_MAP),
+                chromaFlashStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bChromaFlashOn = (value != 0);
+            updateParamEntry(KEY_QC_CHROMA_FLASH, chromaFlashStr);
+
+            return NO_ERROR;
+        }
+    }
+
+    ALOGE("Invalid chroma flash value: %s",
+        (chromaFlashStr == NULL) ? "NULL" : chromaFlashStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOptiZoom
+ *
+ * DESCRIPTION: set opti zoom value
+ *
+ * PARAMETERS :
+ *   @optiZoomStr : opti zoom value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOptiZoom(const char *optiZoomStr)
+{
+    CDBG_HIGH("%s: optiZoomStr =%s",__func__,optiZoomStr);
+    if(optiZoomStr != NULL) {
+        int value = lookupAttr(OPTI_ZOOM_MODES_MAP, PARAM_MAP_SIZE(OPTI_ZOOM_MODES_MAP),
+                optiZoomStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bOptiZoomOn = (value != 0);
+            updateParamEntry(KEY_QC_OPTI_ZOOM, optiZoomStr);
+
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid opti zoom value: %s",
+        (optiZoomStr == NULL) ? "NULL" : optiZoomStr);
+    return BAD_VALUE;
+}
+
+ /*===========================================================================
+ * FUNCTION   : setFssr
+ *
+ * DESCRIPTION: set fssr value
+ *
+ * PARAMETERS :
+ *   @aecBracketStr : fssr value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFssr(const char *fssrStr)
+{
+    if (fssrStr != NULL) {
+        CDBG_HIGH("%s: fssrStr = %s", __func__, fssrStr);
+        int value = lookupAttr(FSSR_MODES_MAP, PARAM_MAP_SIZE(FSSR_MODES_MAP), fssrStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bFssrOn = (value != 0);
+            updateParamEntry(KEY_QC_FSSR, fssrStr);
+            return NO_ERROR;
+        }
+    }
+
+    CDBG_HIGH("Invalid fssr value: %s", (fssrStr == NULL) ? "NULL" : fssrStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTruePortrait
+ *
+ * DESCRIPTION: set true portrait value
+ *
+ * PARAMETERS :
+ *   @optiZoomStr : true portrait value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTruePortrait(const char *truePortraitStr)
+{
+    CDBG_HIGH("%s: truePortraitStr =%s",__func__,truePortraitStr);
+    if(truePortraitStr != NULL) {
+        int value = lookupAttr(TRUE_PORTRAIT_MODES_MAP, PARAM_MAP_SIZE(TRUE_PORTRAIT_MODES_MAP),
+                truePortraitStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bTruePortraitOn = (value != 0);
+            updateParamEntry(KEY_QC_TRUE_PORTRAIT, truePortraitStr);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid true portrait value: %s",
+        (truePortraitStr == NULL) ? "NULL" : truePortraitStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSeeMore
+ *
+ * DESCRIPTION: set see more value
+ *
+ * PARAMETERS :
+ *   @seeMoreStr : see more value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSeeMore(const char *seeMoreStr)
+{
+    CDBG_HIGH("%s: seeMoreStr =%s",__func__,seeMoreStr);
+    if(seeMoreStr != NULL) {
+        int value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), seeMoreStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bSeeMoreOn = (value != 0);
+            updateParamEntry(KEY_QC_SEE_MORE, seeMoreStr);
+
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid see more value: %s",
+        (seeMoreStr == NULL) ? "NULL" : seeMoreStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracketing
+ *
+ * DESCRIPTION: enables AE bracketing
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracketing()
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+            CAM_INTF_PARM_HDR,
+            sizeof(m_AEBracketingClient),
+            &m_AEBracketingClient);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update AE bracketing", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to configure AE bracketing", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRAEBracket
+ *
+ * DESCRIPTION: enables AE bracketing for HDR
+ *
+ * PARAMETERS :
+ *   @hdrBracket : HDR bracketing configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRAEBracket(cam_exp_bracketing_t hdrBracket)
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+            CAM_INTF_PARM_HDR,
+            sizeof(hdrBracket),
+            &hdrBracket);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to configure HDR bracketing", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : restoreAEBracket
+ *
+ * DESCRIPTION: restores client AE bracketing configuration after HDR is done
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::stopAEBracket()
+{
+  cam_exp_bracketing_t bracketing;
+
+  bracketing.mode = CAM_EXP_BRACKETING_OFF;
+
+  return setHDRAEBracket(bracketing);
+}
+
+/*===========================================================================
+ * FUNCTION   : updateFlash
+ *
+ * DESCRIPTION: restores client flash configuration or disables flash
+ *
+ * PARAMETERS :
+ *   @commitSettings : flag indicating whether settings need to be commited
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFlash(bool commitSettings)
+{
+    int32_t rc = NO_ERROR;
+    int32_t value;
+
+    if (commitSettings) {
+      if(initBatchUpdate(m_pParamBuf) < 0 ) {
+          ALOGE("%s:Failed to initialize group update table", __func__);
+          return BAD_TYPE;
+      }
+    }
+
+    if (isHDREnabled() || m_bAeBracketingEnabled || m_bAFBracketingOn ||
+          m_bOptiZoomOn || m_bFssrOn || m_bSensorHDREnabled) {
+        value = CAM_FLASH_MODE_OFF;
+    } else if (m_bChromaFlashOn) {
+        value = CAM_FLASH_MODE_ON;
+    } else {
+        value = mFlashValue;
+    }
+
+    if (value != mFlashDaemonValue) {
+
+        ALOGV("%s: Setting Flash value %d", __func__, value);
+        rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                      CAM_INTF_PARM_LED_MODE,
+                                      sizeof(value),
+                                      &value);
+        if (rc != NO_ERROR) {
+            rc = BAD_VALUE;
+            ALOGE("%s:Failed to set led mode", __func__);
+            return rc;
+        }
+
+        mFlashDaemonValue = value;
+    } else {
+        rc = NO_ERROR;
+    }
+
+    if (commitSettings) {
+      rc = commitSetBatch();
+      if (rc != NO_ERROR) {
+          ALOGE("%s:Failed to configure HDR bracketing", __func__);
+          return rc;
+      }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction value
+ *
+ * PARAMETERS :
+ *   @redeyeStr : red eye reduction value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const char *redeyeStr)
+{
+    if (redeyeStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), redeyeStr);
+        if (value != NAME_NOT_FOUND) {
+            CDBG("%s: Setting RedEye Reduce value %s", __func__, redeyeStr);
+            updateParamEntry(KEY_QC_REDEYE_REDUCTION, redeyeStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_REDEYE_REDUCTION,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("%s: Invalid RedEye Reduce value: %s",
+          __func__, (redeyeStr == NULL) ? "NULL" : redeyeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : getWaveletDenoiseProcessPlate
+ *
+ * DESCRIPTION: query wavelet denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : WNR prcocess plate vlaue
+ *==========================================================================*/
+cam_denoise_process_type_t QCameraParameters::getWaveletDenoiseProcessPlate()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    cam_denoise_process_type_t processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+    property_get("persist.denoise.process.plates", prop, "");
+    if (strlen(prop) > 0) {
+        switch(atoi(prop)) {
+        case 0:
+            processPlate = CAM_WAVELET_DENOISE_YCBCR_PLANE;
+            break;
+        case 1:
+            processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+            break;
+        case 2:
+            processPlate = CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+            break;
+        case 3:
+            processPlate = CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+            break;
+        default:
+            processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+            break;
+        }
+    }
+    return processPlate;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value
+ *
+ * PARAMETERS :
+ *   @wnrStr : wavelet denoise value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const char *wnrStr)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_DENOISE2D) == 0){
+        CDBG_HIGH("%s: WNR is not supported",__func__);
+        return NO_ERROR;
+    }
+
+    if (wnrStr != NULL) {
+        int value = lookupAttr(DENOISE_ON_OFF_MODES_MAP, PARAM_MAP_SIZE(DENOISE_ON_OFF_MODES_MAP),
+                wnrStr);
+        if (value != NAME_NOT_FOUND) {
+            updateParamEntry(KEY_QC_DENOISE, wnrStr);
+
+            cam_denoise_param_t temp;
+            memset(&temp, 0, sizeof(temp));
+            temp.denoise_enable = (uint8_t) value;
+            m_bWNROn = (value != 0);
+            if (m_bWNROn) {
+                temp.process_plates = getWaveletDenoiseProcessPlate();
+            }
+            CDBG("%s: Denoise enable=%d, plates=%d",
+                  __func__, temp.denoise_enable, temp.process_plates);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_WAVELET_DENOISE,
+                                          sizeof(temp),
+                                          &temp);
+        }
+    }
+    ALOGE("%s: Invalid Denoise value: %s", __func__, (wnrStr == NULL) ? "NULL" : wnrStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFrameRateMode
+ *
+ * DESCRIPTION: set preview frame rate mode
+ *
+ * PARAMETERS :
+ *   @mode    : preview frame rate mode
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setPreviewFrameRateMode(const char *mode)
+{
+    set(KEY_QC_PREVIEW_FRAME_RATE_MODE, mode);
+}
+
+/*===========================================================================
+ * FUNCTION   : getPreviewFrameRateMode
+ *
+ * DESCRIPTION: get preview frame rate mode
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : preview frame rate mode string
+ *==========================================================================*/
+const char *QCameraParameters::getPreviewFrameRateMode() const
+{
+    return get(KEY_QC_PREVIEW_FRAME_RATE_MODE);
+}
+
+/*===========================================================================
+ * FUNCTION   : setTouchIndexAec
+ *
+ * DESCRIPTION: set touch index AEC
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setTouchIndexAec(int x, int y)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%dx%d", x, y);
+    set(KEY_QC_TOUCH_INDEX_AEC, str);
+}
+
+/*===========================================================================
+ * FUNCTION   : getTouchIndexAec
+ *
+ * DESCRIPTION: get touch index AEC
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getTouchIndexAec(int *x, int *y)
+{
+    *x = -1;
+    *y = -1;
+
+    // Get the current string, if it doesn't exist, leave the -1x-1
+    const char *p = get(KEY_QC_TOUCH_INDEX_AEC);
+    if (p == 0)
+        return;
+
+    int tempX, tempY;
+    if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+        *x = tempX;
+        *y = tempY;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setTouchIndexAf
+ *
+ * DESCRIPTION: set touch index AF
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setTouchIndexAf(int x, int y)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%dx%d", x, y);
+    set(KEY_QC_TOUCH_INDEX_AF, str);
+}
+
+/*===========================================================================
+ * FUNCTION   : getTouchIndexAf
+ *
+ * DESCRIPTION: get touch index AF
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getTouchIndexAf(int *x, int *y)
+{
+    *x = -1;
+    *y = -1;
+
+    // Get the current string, if it doesn't exist, leave the -1x-1
+    const char *p = get(KEY_QC_TOUCH_INDEX_AF);
+    if (p == 0)
+        return;
+
+    int tempX, tempY;
+    if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+        *x = tempX;
+        *y = tempY;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamFormat
+ *
+ * DESCRIPTION: get stream format by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @format     : [output] stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamRotation(cam_stream_type_t streamType,
+                                            cam_pp_feature_config_t &featureConfig,
+                                            cam_dimension_t &dim)
+{
+    int32_t ret = NO_ERROR;
+    int rotationParam = getInt(KEY_QC_VIDEO_ROTATION);
+    featureConfig.rotation = ROTATE_0;
+    int swapDim = 0;
+    switch (streamType) {
+    case CAM_STREAM_TYPE_VIDEO:
+           switch(rotationParam) {
+            case 90:
+               featureConfig.feature_mask |= CAM_QCOM_FEATURE_CPP;
+               featureConfig.rotation = ROTATE_90;
+               swapDim = 1;
+               break;
+            case 180:
+               featureConfig.feature_mask |= CAM_QCOM_FEATURE_CPP;
+               featureConfig.rotation = ROTATE_180;
+               break;
+            case 270:
+               featureConfig.feature_mask |= CAM_QCOM_FEATURE_CPP;
+               featureConfig.rotation = ROTATE_270;
+               swapDim = 1;
+              break;
+            default:
+               featureConfig.rotation = ROTATE_0;
+        }
+        break;
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_POSTVIEW:
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        break;
+    }
+
+    if (swapDim > 0) {
+        int w = 0;
+        w = dim.width;
+        dim.width = dim.height;
+        dim.height = w;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamFormat
+ *
+ * DESCRIPTION: get stream format by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @format     : [output] stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamFormat(cam_stream_type_t streamType,
+                                            cam_format_t &format)
+{
+    int32_t ret = NO_ERROR;
+
+    format = CAM_FORMAT_MAX;
+    switch (streamType) {
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_POSTVIEW:
+        format = mPreviewFormat;
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        if ( mPictureFormat == CAM_FORMAT_YUV_422_NV16 ) {
+            format = CAM_FORMAT_YUV_422_NV16;
+        } else {
+            char prop[PROPERTY_VALUE_MAX];
+            int snapshotFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.snap.format", prop, "0");
+            snapshotFormat = atoi(prop);
+            if(snapshotFormat == 1) {
+                format = CAM_FORMAT_YUV_422_NV61;
+            } else {
+                format = CAM_FORMAT_YUV_420_NV21;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        format = CAM_FORMAT_YUV_420_NV12;
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        if (mPictureFormat >= CAM_FORMAT_YUV_RAW_8BIT_YUYV) {
+            format = (cam_format_t)mPictureFormat;
+        } else {
+            char raw_format[PROPERTY_VALUE_MAX];
+            int rawFormat;
+            memset(raw_format, 0, sizeof(raw_format));
+            /*Default value is CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG*/
+            property_get("persist.camera.raw.format", raw_format, "16");
+            rawFormat = atoi(raw_format);
+            format = (cam_format_t)rawFormat;
+            CDBG_HIGH("%s: Raw stream format %d bundled with snapshot",
+                   __func__, format);
+        }
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        break;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFlipMode
+ *
+ * DESCRIPTION: get flip mode
+ *
+ * PARAMETERS :
+ *   @cam_intf_parm_type_t : [input] stream type
+ *
+ * RETURN     : int type of flip mode
+ *              0 - no filp
+ *              1 - FLIP_H
+ *              2 - FLIP_V
+ *              3 - FLIP_H | FLIP_V
+ *==========================================================================*/
+int QCameraParameters::getFlipMode(cam_stream_type_t type)
+{
+    const char *str = NULL;
+    int flipMode = 0; // no flip
+
+    switch(type){
+    case CAM_STREAM_TYPE_PREVIEW:
+        str = get(KEY_QC_PREVIEW_FLIP);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        str = get(KEY_QC_VIDEO_FLIP);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_POSTVIEW:
+        str = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+        break;
+    default:
+        CDBG("%s: No flip mode for stream type %d", __func__, type);
+        break;
+    }
+
+    if(str != NULL){
+        //Need give corresponding filp value based on flip mode strings
+        int value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+        if(value != NAME_NOT_FOUND)
+            flipMode = value;
+        }
+
+    CDBG("%s: the filp mode of stream type %d is %d .", __func__, type, flipMode);
+    return flipMode;
+}
+
+/*===========================================================================
+ * FUNCTION   : isSnapshotFDNeeded
+ *
+ * DESCRIPTION: check whether Face Detection Metadata is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : bool type of status
+ *              0 - need
+ *              1 - not need
+ *==========================================================================*/
+bool QCameraParameters::isSnapshotFDNeeded()
+{
+    return getInt(KEY_QC_SNAPSHOT_FD_DATA);
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamDimension
+ *
+ * DESCRIPTION: get stream dimension by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @dim        : [output] stream dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamDimension(cam_stream_type_t streamType,
+                                               cam_dimension_t &dim)
+{
+    int32_t ret = NO_ERROR;
+    memset(&dim, 0, sizeof(cam_dimension_t));
+
+    switch (streamType) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        getPreviewSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        getPreviewSize(&dim.width, &dim.height);
+        //For CTS testPreviewPictureSizesCombination
+        int cur_pic_width, cur_pic_height;
+        CameraParameters::getPictureSize(&cur_pic_width, &cur_pic_height);
+        {
+            int minDimension;
+            if((dim.width*dim.height) > (cur_pic_width*cur_pic_height)) {
+                minDimension = (cur_pic_width * cur_pic_height);
+            } else {
+                minDimension = (dim.width*dim.height);
+            }
+
+            double exp_aspectRatio = (double)cur_pic_width / (double)cur_pic_height;
+
+            if (minDimension >= ISP_SEC_SCALAR_MAX_LIMIT) {
+                size_t i;
+                double preview_aspectRatio = 0.0f;
+                double preview_height = 0.0f;
+                double preview_width = 0.0f;
+                for (i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+                    if ((m_pCapability->preview_sizes_tbl[i].width *
+                            m_pCapability->preview_sizes_tbl[i].height)
+                            < ISP_SEC_SCALAR_MAX_LIMIT) {
+                        preview_height = (double)m_pCapability->preview_sizes_tbl[i].height;
+                        preview_width = (double)m_pCapability->preview_sizes_tbl[i].width;
+                        preview_aspectRatio = preview_width / preview_height;
+                        if (fabs(preview_aspectRatio - exp_aspectRatio) <= ASPECT_TOLERANCE) {
+                            dim.width = m_pCapability->preview_sizes_tbl[i].width;
+                            dim.height = m_pCapability->preview_sizes_tbl[i].height;
+                            break;
+                        }
+                    }
+                }
+
+                if (i == m_pCapability->preview_sizes_tbl_cnt) {
+                    // Fallback to next supported preview size less than scalar limitation
+                    for (i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+                        if ((m_pCapability->preview_sizes_tbl[i].width *
+                                m_pCapability->preview_sizes_tbl[i].height)
+                                <= ISP_SEC_SCALAR_MAX_LIMIT) {
+                            dim.width = m_pCapability->preview_sizes_tbl[i].width;
+                            dim.height = m_pCapability->preview_sizes_tbl[i].height;
+                            break;
+                        }
+                    }
+                    break;
+                }
+                break;
+            }
+        }
+
+        if ((dim.width > cur_pic_width && dim.height < cur_pic_height)
+                || (dim.width < cur_pic_width && dim.height > cur_pic_height)) {
+            size_t k;
+            for (k = 0; k < m_pCapability->preview_sizes_tbl_cnt; ++k) {
+                if (cur_pic_width >= m_pCapability->preview_sizes_tbl[k].width
+                    && cur_pic_height >= m_pCapability->preview_sizes_tbl[k].height) {
+                    dim.width = m_pCapability->preview_sizes_tbl[k].width;
+                    dim.height = m_pCapability->preview_sizes_tbl[k].height;
+                    CDBG("%s:re-set size, pic_width=%d, pic_height=%d, pre_width=%d,pre_height=%d",
+                            __func__,cur_pic_width,cur_pic_height, dim.width, dim.height);
+                    break;
+                 }
+            }
+            if (k == m_pCapability->preview_sizes_tbl_cnt) {
+                // Assign the Picture size to Preview size
+                dim.width = cur_pic_width;
+                dim.height = cur_pic_height;
+                CDBG("%s: re-set size, pic_width=%d, pic_height=%d, pre_width=%d, pre_height=%d.",
+                        __func__,cur_pic_width, cur_pic_height, dim.width, dim.height);
+           }
+
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        if (getRecordingHintValue() == true) {
+            // live snapshot
+            getLiveSnapshotSize(dim);
+        } else {
+            getPictureSize(&dim.width, &dim.height);
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        getVideoSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        //dim = m_pCapability->raw_dim;
+        getRawSize(dim);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        dim.width = (int32_t)sizeof(cam_metadata_info_t);
+        dim.height = 1;
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        ALOGE("%s: no dimension for unsupported stream type %d",
+              __func__, streamType);
+        ret = BAD_VALUE;
+        break;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPreviewHalPixelFormat
+ *
+ * DESCRIPTION: get preview HAL pixel format
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : HAL pixel format
+ *==========================================================================*/
+int QCameraParameters::getPreviewHalPixelFormat() const
+{
+    int32_t halPixelFormat;
+
+    switch (mPreviewFormat) {
+    case CAM_FORMAT_YUV_420_NV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+    default:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    }
+    CDBG_HIGH("%s: format %d\n", __func__, halPixelFormat);
+    return halPixelFormat;
+}
+
+/*===========================================================================
+ * FUNCTION   : getthumbnailSize
+ *
+ * DESCRIPTION: get thumbnail size
+ *
+ * PARAMETERS :
+ *   @width, height : [output] thumbnail width and height
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getThumbnailSize(int *width, int *height) const
+{
+    *width = getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+    *height = getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLBurstInterval
+ *
+ * DESCRIPTION: get ZSL burst interval setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL burst interval value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLBurstInterval()
+{
+    int interval = getInt(KEY_QC_ZSL_BURST_INTERVAL);
+    if (interval < 0) {
+        interval = 1;
+    }
+    return (uint8_t)interval;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLQueueDepth
+ *
+ * DESCRIPTION: get ZSL queue depth
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL queue depth value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLQueueDepth()
+{
+    int qdepth = getInt(KEY_QC_ZSL_QUEUE_DEPTH);
+    if (qdepth < 0) {
+        qdepth = 2;
+    }
+    return (uint8_t)qdepth;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLBackLookCount
+ *
+ * DESCRIPTION: get ZSL backlook count setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL backlook count value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLBackLookCount()
+{
+    int look_back = getInt(KEY_QC_ZSL_BURST_LOOKBACK);
+    if (look_back < 0) {
+        look_back = 2;
+    }
+    return (uint8_t)look_back;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLMaxUnmatchedFrames
+ *
+ * DESCRIPTION: get allowed ZSL max unmatched frames number
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL backlook count value
+ *==========================================================================*/
+uint8_t QCameraParameters::getMaxUnmatchedFramesInQueue()
+{
+    return (uint8_t)(m_pCapability->min_num_pp_bufs + (m_nBurstNum / 10));
+}
+
+/**
+ * ===========================================================================
+ * FUNCTION   : getMinPPBufs
+ *
+ * DESCRIPTION: get minimum extra buffers needed by pproc which HAL has to allocate
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : min pp buf count
+ * ==========================================================================
+ */
+int QCameraParameters::getMinPPBufs()
+{
+    // Ideally we should be getting this from m_pCapability->min_num_pp_bufs. But as of now
+    // this number reported by backend is wrong. It simply adds all the ppbuf requirement by
+    // each module irrespective of whether its connected or not. This has to be enhanced later
+    // to get the exact requirement from backend.
+    return MIN_PP_BUF_CNT;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHintValue
+ *
+ * DESCRIPTION: set recording hint
+ *
+ * PARAMETERS :
+ *   @value   : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::setRecordingHintValue(int32_t value)
+{
+    CDBG("%s: VideoHint = %d", __func__, value);
+    bool newValue = (value > 0)? true : false;
+
+    if ( m_bRecordingHint != newValue ) {
+        m_bNeedRestart = true;
+        m_bRecordingHint_new = newValue;
+    } else {
+        m_bRecordingHint_new = m_bRecordingHint;
+    }
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_RECORDING_HINT,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfSnapshots
+ *
+ * DESCRIPTION: get number of snapshot per shutter
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of snapshot per shutter
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfSnapshots()
+{
+    uint8_t numOfSnapshot = 1;
+    int val = getInt(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER);
+    if (0 < val) {
+        numOfSnapshot = (uint8_t)val;
+    }
+
+    /* update the count for refocus */
+   if (isUbiRefocus()) {
+       numOfSnapshot = (uint8_t) (numOfSnapshot + UfOutputCount());
+   }
+
+    return numOfSnapshot;
+}
+
+/*===========================================================================
+ * FUNCTION   : MTFOutputCount
+ *
+ * DESCRIPTION: find # of output for MTF feature
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : # of output for MTF feature
+ *==========================================================================*/
+uint32_t QCameraParameters::MTFOutputCount()
+{
+   return (uint32_t) (m_currNumBufMTF + 1);
+}
+
+/*===========================================================================
+ * FUNCTION   : getBurstCountForAdvancedCapture
+ *
+ * DESCRIPTION: get burst count for advanced capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of snapshot required for advanced capture.
+ *==========================================================================*/
+uint8_t QCameraParameters::getBurstCountForAdvancedCapture()
+{
+    uint32_t burstCount = 0;
+    if (isUbiFocusEnabled()) {
+        //number of snapshots required for Ubi Focus.
+        burstCount = m_pCapability->ubifocus_af_bracketing_need.burst_count;
+    } else if (isOptiZoomEnabled()) {
+        //number of snapshots required for Opti Zoom.
+        burstCount = m_pCapability->opti_zoom_settings_need.burst_count;
+    } else if (isfssrEnabled()) {
+        //number of snapshots required for fssr.
+        burstCount = m_pCapability->fssr_settings_need.burst_count;
+    } else if (isChromaFlashEnabled()) {
+        //number of snapshots required for Chroma Flash.
+        //TODO: remove hardcoded value, add in capability.
+        burstCount = 2;
+    } else if (isMultiTouchFocusEnabled()) {
+        //number of snapshots required by multi-touch focus.
+        burstCount = m_currNumBufMTF;
+    } else if (isHDREnabled()) {
+        //number of snapshots required for HDR.
+        burstCount = m_pCapability->hdr_bracketing_setting.num_frames;
+    } else if (isAEBracketEnabled()) {
+      burstCount = 0;
+      const char *str_val = m_AEBracketingClient.values;
+      if ((str_val != NULL) && (strlen(str_val) > 0)) {
+          char prop[PROPERTY_VALUE_MAX];
+          memset(prop, 0, sizeof(prop));
+          strcpy(prop, str_val);
+          char *saveptr = NULL;
+          char *token = strtok_r(prop, ",", &saveptr);
+          while (token != NULL) {
+              token = strtok_r(NULL, ",", &saveptr);
+              burstCount++;
+          }
+      }
+    }
+
+    if (burstCount <= 0) {
+        burstCount = 1;
+    }
+    return (uint8_t)burstCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraHDRInBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra input buffers needed by HDR
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDRInBufsIfNeeded()
+{
+    unsigned int numOfBufs = 0;
+
+    if (isHDREnabled()) {
+        numOfBufs += m_pCapability->hdr_bracketing_setting.num_frames;
+        if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+            numOfBufs++;
+        }
+        numOfBufs--; // Only additional buffers need to be returned
+    }
+
+    return (uint8_t)(numOfBufs * getBurstNum());
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraHDROutBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra output buffers needed by HDR
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDROutBufsIfNeeded()
+{
+    int numOfBufs = 0;
+
+    if (isHDREnabled() && isHDR1xFrameEnabled()) {
+        numOfBufs++;
+    }
+
+    return (uint8_t)(numOfBufs * getBurstNum());
+}
+
+/*===========================================================================
+ * FUNCTION   : getBurstNum
+ *
+ * DESCRIPTION: get burst number of snapshot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of burst
+ *==========================================================================*/
+uint8_t QCameraParameters::getBurstNum()
+{
+    return m_nBurstNum;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegQuality
+ *
+ * DESCRIPTION: get jpeg encoding quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg encoding quality
+ *==========================================================================*/
+uint32_t QCameraParameters::getJpegQuality()
+{
+    int quality = getInt(KEY_JPEG_QUALITY);
+    if (quality < 0) {
+        quality = 85; // set to default quality value
+    }
+    return (uint32_t)quality;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getJpegRotation
+ *
+ * DESCRIPTION: get rotation value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getJpegRotation() {
+    int rotation = getInt(KEY_ROTATION);
+    if (rotation < 0) {
+        rotation = 0;
+    }
+    return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : getEffectValue
+ *
+ * DESCRIPTION: get effect value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : effect value
+ *==========================================================================*/
+int32_t QCameraParameters::getEffectValue()
+{
+    uint32_t cnt = 0;
+    const char *effect = get(KEY_EFFECT);
+    if (effect) {
+        while (NULL != EFFECT_MODES_MAP[cnt].desc) {
+            if (!strcmp(EFFECT_MODES_MAP[cnt].desc, effect)) {
+                return EFFECT_MODES_MAP[cnt].val;
+            }
+            cnt++;
+        }
+    } else {
+        ALOGE("%s: Missing effect value", __func__);
+    }
+    return CAM_EFFECT_MODE_OFF;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseGPSCoordinate
+ *
+ * DESCRIPTION: parse GPS coordinate string
+ *
+ * PARAMETERS :
+ *   @coord_str : [input] coordinate string
+ *   @coord     : [output]  ptr to struct to store coordinate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::parseGPSCoordinate(const char *coord_str, rat_t* coord)
+{
+    if(coord == NULL) {
+        ALOGE("%s: error, invalid argument coord == NULL", __func__);
+        return BAD_VALUE;
+    }
+    double degF = atof(coord_str);
+    if (degF < 0) {
+        degF = -degF;
+    }
+    double minF = (degF - (double)(int) degF) * 60.0;
+    double secF = (minF - (double)(int) minF) * 60.0;
+
+    getRational(&coord[0], (int)degF, 1);
+    getRational(&coord[1], (int)minF, 1);
+    getRational(&coord[2], (int)(secF * 10000.0), 10000);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifDateTime
+ *
+ * DESCRIPTION: query exif date time
+ *
+ * PARAMETERS :
+ *   @dateTime : String to store exif date time.
+ *               Should be leaved unchanged in case of error.
+ *   @subsecTime : subsecond time
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifDateTime(String8 &dateTime, String8 &subsecTime)
+{
+    int32_t ret = NO_ERROR;
+    struct timeval tv;
+    //get time and date from system
+    time_t rawtime = time(NULL);
+    if (((time_t) 0) <= rawtime) {
+        struct tm *timeinfo = localtime (&rawtime);
+        gettimeofday(&tv, NULL);
+        if (NULL != timeinfo) {
+            //Write datetime according to EXIF Spec
+            //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+            dateTime = String8::format("%04d:%02d:%02d %02d:%02d:%02d",
+                    timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+                    timeinfo->tm_mday, timeinfo->tm_hour,
+                    timeinfo->tm_min, timeinfo->tm_sec);
+
+            subsecTime = String8::format("%06ld", tv.tv_usec);
+        } else {
+            ALOGE("%s: localtime() error: %s", __func__, strerror(errno));
+            ret = UNKNOWN_ERROR;
+        }
+    } else {
+        ALOGE("%s: rawtime() error: %s", __func__, strerror(errno));
+        ret = UNKNOWN_ERROR;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getRational
+ *
+ * DESCRIPTION: compose rational struct
+ *
+ * PARAMETERS :
+ *   @rat     : ptr to struct to store rational info
+ *   @num     :num of the rational
+ *   @denom   : denom of the rational
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getRational(rat_t *rat, int num, int denom)
+{
+    if ((0 > num) || (0 > denom)) {
+        ALOGE("%s: Negative values", __func__);
+        return BAD_VALUE;
+    }
+    if (NULL == rat) {
+        ALOGE("%s: NULL rat input", __func__);
+        return BAD_VALUE;
+    }
+    rat->num = (uint32_t)num;
+    rat->denom = (uint32_t)denom;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifFocalLength
+ *
+ * DESCRIPTION: get exif focal lenght
+ *
+ * PARAMETERS :
+ *   @focalLength : ptr to rational strcut to store focal lenght
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifFocalLength(rat_t *focalLength)
+{
+    int focalLengthValue =
+        (int)(getFloat(QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+    return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifIsoSpeed
+ *
+ * DESCRIPTION: get exif ISO speed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ISO speed value
+ *==========================================================================*/
+uint16_t QCameraParameters::getExifIsoSpeed()
+{
+    uint16_t isoSpeed = 0;
+    const char *iso_str = get(QCameraParameters::KEY_QC_ISO_MODE);
+    int iso_index = lookupAttr(ISO_MODES_MAP, PARAM_MAP_SIZE(ISO_MODES_MAP), iso_str);
+    switch (iso_index) {
+    case CAM_ISO_MODE_AUTO:
+        isoSpeed = 0;
+        break;
+    case CAM_ISO_MODE_DEBLUR:
+        isoSpeed = 1;
+        break;
+    case CAM_ISO_MODE_100:
+        isoSpeed = 100;
+        break;
+    case CAM_ISO_MODE_200:
+        isoSpeed = 200;
+        break;
+    case CAM_ISO_MODE_400:
+        isoSpeed = 400;
+        break;
+    case CAM_ISO_MODE_800:
+        isoSpeed = 800;
+        break;
+    case CAM_ISO_MODE_1600:
+        isoSpeed = 1600;
+        break;
+    case CAM_ISO_MODE_3200:
+        isoSpeed = 3200;
+        break;
+    }
+    return isoSpeed;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsProcessingMethod
+ *
+ * DESCRIPTION: get GPS processing method
+ *
+ * PARAMETERS :
+ *   @gpsProcessingMethod : string to store GPS process method
+ *   @count               : lenght of the string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsProcessingMethod(char *gpsProcessingMethod,
+                                                      uint32_t &count)
+{
+    const char *str = get(KEY_GPS_PROCESSING_METHOD);
+    if(str != NULL) {
+        memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+        count = EXIF_ASCII_PREFIX_SIZE;
+        strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str, strlen(str));
+        count += (uint32_t)strlen(str);
+        gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLatitude
+ *
+ * DESCRIPTION: get exif latitude
+ *
+ * PARAMETERS :
+ *   @latitude : ptr to rational struct to store latitude info
+ *   @ladRef   : charater to indicate latitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLatitude(rat_t *latitude,
+                                           char *latRef)
+{
+    const char *str = get(KEY_GPS_LATITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, latitude);
+
+        //set Latitude Ref
+        float latitudeValue = getFloat(KEY_GPS_LATITUDE);
+        if(latitudeValue < 0.0f) {
+            latRef[0] = 'S';
+        } else {
+            latRef[0] = 'N';
+        }
+        latRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLongitude
+ *
+ * DESCRIPTION: get exif longitude
+ *
+ * PARAMETERS :
+ *   @longitude : ptr to rational struct to store longitude info
+ *   @lonRef    : charater to indicate longitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLongitude(rat_t *longitude,
+                                            char *lonRef)
+{
+    const char *str = get(KEY_GPS_LONGITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, longitude);
+
+        //set Longitude Ref
+        float longitudeValue = getFloat(KEY_GPS_LONGITUDE);
+        if(longitudeValue < 0.0f) {
+            lonRef[0] = 'W';
+        } else {
+            lonRef[0] = 'E';
+        }
+        lonRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifAltitude
+ *
+ * DESCRIPTION: get exif altitude
+ *
+ * PARAMETERS :
+ *   @altitude : ptr to rational struct to store altitude info
+ *   @altRef   : charater to indicate altitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifAltitude(rat_t *altitude,
+                                           char *altRef)
+{
+    const char *str = get(KEY_GPS_ALTITUDE);
+    if(str != NULL) {
+        double value = atof(str);
+        *altRef = 0;
+        if(value < 0){
+            *altRef = 1;
+            value = -value;
+        }
+        return getRational(altitude, (int)(value*1000), 1000);
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsDateTimeStamp
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ *   @gpsDateStamp : GPS date time stamp string
+ *   @bufLen       : length of the string
+ *   @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsDateTimeStamp(char *gpsDateStamp,
+                                                   uint32_t bufLen,
+                                                   rat_t *gpsTimeStamp)
+{
+    const char *str = get(KEY_GPS_TIMESTAMP);
+    if(str != NULL) {
+        time_t unixTime = (time_t)atol(str);
+        struct tm *UTCTimestamp = gmtime(&unixTime);
+        if (UTCTimestamp != NULL) {
+            strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
+
+            getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
+            getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
+            getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
+        }
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : updateFocusDistances
+ *
+ * DESCRIPTION: update focus distances
+ *
+ * PARAMETERS :
+ *   @focusDistances : ptr to focus distance info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFocusDistances(cam_focus_distances_info_t *focusDistances)
+{
+    String8 str;
+    char buffer[32] = {0};
+    //set all distances to infinity if focus mode is infinity
+    if(mFocusMode == CAM_FOCUS_MODE_INFINITY) {
+        str.append("Infinity,Infinity,Infinity");
+    } else {
+        snprintf(buffer, sizeof(buffer), "%f", focusDistances->focus_distance[0]);
+        str.append(buffer);
+        snprintf(buffer, sizeof(buffer), ",%f", focusDistances->focus_distance[1]);
+        str.append(buffer);
+        snprintf(buffer, sizeof(buffer), ",%f", focusDistances->focus_distance[2]);
+        str.append(buffer);
+    }
+    CDBG_HIGH("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+    set(QCameraParameters::KEY_FOCUS_DISTANCES, str.string());
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateRecordingHintValue
+ *
+ * DESCRIPTION: update recording hint locally and to daemon
+ *
+ * PARAMETERS :
+ *   @value   : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateRecordingHintValue(int32_t value)
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = setRecordingHintValue(value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update recording hint", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHistogram
+ *
+ * DESCRIPTION: set histogram
+ *
+ * PARAMETERS :
+ *   @enabled : if histogram is enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHistogram(bool enabled)
+{
+    if(m_bHistogramEnabled == enabled) {
+        CDBG("%s: histogram flag not changed, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    // set parm for histogram
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    int32_t value = enabled;
+    int32_t rc = NO_ERROR;
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_HISTOGRAM,
+                                sizeof(value),
+                                &value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set histogram", __func__);
+        return rc;
+    }
+
+    m_bHistogramEnabled = enabled;
+
+    CDBG_HIGH(" Histogram -> %s", m_bHistogramEnabled ? "Enabled" : "Disabled");
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setIntEvent
+ *
+ * DESCRIPTION: set setIntEvent
+ *
+ * PARAMETERS :
+ *   @params : image size and dimensions
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setIntEvent(cam_int_evt_params_t params)
+{
+    int32_t rc = NO_ERROR;
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_INT_EVT,
+                                sizeof(params),
+                                &params);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set frameskip info parm", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetection
+ *
+ * DESCRIPTION: set face detection
+ *
+ * PARAMETERS :
+ *   @enabled : if face detection is enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceDetection(bool enabled)
+{
+    uint32_t faceProcMask = m_nFaceProcMask;
+    // set face detection mask
+    if (enabled) {
+        faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+    } else {
+        faceProcMask &= ~CAM_FACE_PROCESS_MASK_DETECTION;
+    }
+
+    if(m_nFaceProcMask == faceProcMask) {
+        CDBG_HIGH("%s: face process mask not changed, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    m_nFaceProcMask = faceProcMask;
+
+    // set parm for face detection
+    uint32_t requested_faces = (uint32_t)getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+    cam_fd_set_parm_t fd_set_parm;
+    memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+    fd_set_parm.fd_mode = faceProcMask;
+    fd_set_parm.num_fd = requested_faces;
+
+    CDBG_HIGH("[KPI Perf] %s: PROFILE_FACE_DETECTION_VALUE = %d num_fd = %d",
+          __func__, faceProcMask,requested_faces);
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+    int32_t rc = NO_ERROR;
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_FD,
+                                sizeof(fd_set_parm),
+                                &fd_set_parm);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set face detection parm", __func__);
+        return rc;
+    }
+
+    CDBG("%s: FaceProcMask -> %d", __func__, m_nFaceProcMask);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameSkip
+ *
+ * DESCRIPTION: send ISP frame skip pattern to camera daemon
+ *
+ * PARAMETERS :
+ *   @pattern : skip pattern for ISP
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFrameSkip(enum msm_vfe_frame_skip_pattern pattern)
+{
+    int32_t rc = NO_ERROR;
+    int32_t value = (int32_t)pattern;
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_FRAMESKIP,
+                                sizeof(value),
+                                &value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set frameskip info parm", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+int32_t QCameraParameters::updateRAW(cam_dimension_t max_dim)
+{
+    int32_t rc = NO_ERROR;
+    cam_dimension_t raw_dim;
+
+    if (max_dim.width == 0 || max_dim.height == 0) {
+        max_dim = m_pCapability->raw_dim;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_MAX_DIMENSION,
+                                sizeof(cam_dimension_t),
+                                &max_dim);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION ", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set lock CAM_INTF_PARM_MAX_DIMENSION parm", __func__);
+        return rc;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddGetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_RAW_DIMENSION);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
+        return rc;
+    }
+
+    rc = commitGetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to get commit CAM_INTF_PARM_RAW_DIMENSION", __func__);
+        return rc;
+    }
+    memcpy(&raw_dim,POINTER_OF_PARAM(CAM_INTF_PARM_RAW_DIMENSION,m_pParamBuf),sizeof(cam_dimension_t));
+    CDBG_HIGH("%s : RAW Dimension = %d X %d",__func__,raw_dim.width,raw_dim.height);
+    if (raw_dim.width == 0 || raw_dim.height == 0) {
+        ALOGE("%s: Error getting RAW size. Setting to Capability value",__func__);
+        raw_dim = m_pCapability->raw_dim;
+    }
+    setRawSize(raw_dim);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRSceneEnable
+ *
+ * DESCRIPTION: sets hdr scene deteced flag
+ *
+ * PARAMETERS :
+ *   @bflag : hdr scene deteced
+ *
+ * RETURN     : nothing
+ *==========================================================================*/
+void QCameraParameters::setHDRSceneEnable(bool bflag)
+{
+    bool bupdate = false;
+    if (m_HDRSceneEnabled != bflag) {
+        bupdate = true;
+    }
+    m_HDRSceneEnabled = bflag;
+
+    if (bupdate) {
+        updateFlash(true);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getASDStateString
+ *
+ * DESCRIPTION: get ASD result in string format
+ *
+ * PARAMETERS :
+ *   @scene : selected scene mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+ const char *QCameraParameters::getASDStateString(cam_auto_scene_t scene)
+{
+    switch (scene) {
+      case S_NORMAL :
+        return "Normal";
+      case S_SCENERY:
+        return "Scenery";
+      case S_PORTRAIT:
+        return "Portrait";
+      case S_PORTRAIT_BACKLIGHT:
+        return "Portrait-Backlight";
+      case S_SCENERY_BACKLIGHT:
+        return "Scenery-Backlight";
+      case S_BACKLIGHT:
+        return "Backlight";
+      default:
+        return "<Unknown!>";
+      }
+}
+
+/*===========================================================================
+ * FUNCTION   : parseNDimVector
+ *
+ * DESCRIPTION: helper function to parse a string like "(1, 2, 3, 4, ..., N)"
+ *              into N-dimension vector
+ *
+ * PARAMETERS :
+ *   @str     : string to be parsed
+ *   @num     : output array of size N to store vector element values
+ *   @N       : number of dimension
+ *   @delim   : delimeter to seperete string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+    char *start, *end;
+    if (num == NULL) {
+        ALOGE("%s: Invalid output array (num == NULL)", __func__);
+        return BAD_VALUE;
+    }
+
+    //check if string starts and ends with parantheses
+    if(str[0] != '(' || str[strlen(str)-1] != ')') {
+        ALOGE("%s: Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)",
+              __func__, str);
+        return BAD_VALUE;
+    }
+    start = (char*) str;
+    start++;
+    for(int i=0; i<N; i++) {
+        *(num+i) = (int) strtol(start, &end, 10);
+        if(*end != delim && i < N-1) {
+            ALOGE("%s: Cannot find delimeter '%c' in string \"%s\". end = %c",
+                  __func__, delim, str, *end);
+            return -1;
+        }
+        start = end+1;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseCameraAreaString
+ *
+ * DESCRIPTION: helper function to parse a string of camera areas like
+ *              "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+ *
+ * PARAMETERS :
+ *   @str             : string to be parsed
+ *   @max_num_areas   : max number of areas
+ *   @pAreas          : ptr to struct to store areas
+ *   @num_areas_found : number of areas found
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseCameraAreaString(const char *str,
+                                                 int max_num_areas,
+                                                 cam_area_t *pAreas,
+                                                 int& num_areas_found)
+{
+    char area_str[32];
+    const char *start, *end, *p;
+    start = str; end = NULL;
+    int values[5], index=0;
+    num_areas_found = 0;
+
+    memset(values, 0, sizeof(values));
+    while(start != NULL) {
+       if(*start != '(') {
+            ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+            return BAD_VALUE;
+       }
+       end = strchr(start, ')');
+       if(end == NULL) {
+            ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+            return BAD_VALUE;
+       }
+       int i;
+       for (i=0,p=start; p<=end; p++, i++) {
+           area_str[i] = *p;
+       }
+       area_str[i] = '\0';
+       if(parseNDimVector(area_str, values, 5) < 0){
+            ALOGE("%s: error: Failed to parse the area string: %s", __func__, area_str);
+            return BAD_VALUE;
+       }
+       // no more areas than max_num_areas are accepted.
+       if(index >= max_num_areas) {
+            ALOGE("%s: error: too many areas specified %s", __func__, str);
+            return BAD_VALUE;
+       }
+       pAreas[index].rect.left = values[0];
+       pAreas[index].rect.top = values[1];
+       pAreas[index].rect.width = values[2] - values[0];
+       pAreas[index].rect.height = values[3] - values[1];
+       pAreas[index].weight = values[4];
+
+       index++;
+       start = strchr(end, '('); // serach for next '('
+    }
+    num_areas_found = index;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateCameraAreas
+ *
+ * DESCRIPTION: helper function to validate camera areas within (-1000, 1000)
+ *
+ * PARAMETERS :
+ *   @areas     : ptr to array of areas
+ *   @num_areas : number of areas
+ *
+ * RETURN     : true --  area is in valid range
+ *              false -- not valid
+ *==========================================================================*/
+bool QCameraParameters::validateCameraAreas(cam_area_t *areas, int num_areas)
+{
+    // special case: default area
+    if (num_areas == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        return true;
+    }
+
+    for(int i = 0; i < num_areas; i++) {
+        // left should be >= -1000
+        if(areas[i].rect.left < -1000) {
+            return false;
+        }
+
+        // top  should be >= -1000
+        if(areas[i].rect.top < -1000) {
+            return false;
+        }
+
+        // width or height should be > 0
+        if (areas[i].rect.width <= 0 || areas[i].rect.height <= 0) {
+            return false;
+        }
+
+        // right  should be <= 1000
+        if(areas[i].rect.left + areas[i].rect.width > 1000) {
+            return false;
+        }
+
+        // bottom should be <= 1000
+        if(areas[i].rect.top + areas[i].rect.height > 1000) {
+            return false;
+        }
+
+        // weight should be within (1, 1000)
+        if (areas[i].weight < 1 || areas[i].weight > 1000) {
+            return false;
+        }
+    }
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : isYUVFrameInfoNeeded
+ *
+ * DESCRIPTION: In AE-Bracket mode, we need set yuv buffer information for up-layer
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::isYUVFrameInfoNeeded()
+{
+    //In AE-Bracket mode, we need set raw buffer information for up-layer
+    if(!isNV21PictureFormat() && !isNV16PictureFormat()){
+        return false;
+    }
+    const char *aecBracketStr =  get(KEY_QC_AE_BRACKET_HDR);
+
+    int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+            aecBracketStr);
+    CDBG_HIGH("%s: aecBracketStr=%s, value=%d.", __func__, aecBracketStr, value);
+    return (value == CAM_EXP_BRACKETING_ON);
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameFmtString
+ *
+ * DESCRIPTION: get string name of frame format
+ *
+ * PARAMETERS :
+ *   @frame   : frame format
+ *
+ * RETURN     : string name of frame format
+ *==========================================================================*/
+const char *QCameraParameters::getFrameFmtString(cam_format_t fmt)
+{
+    return lookupNameByValue(PICTURE_TYPES_MAP, PARAM_MAP_SIZE(PICTURE_TYPES_MAP), fmt);
+}
+
+/*===========================================================================
+ * FUNCTION   : initBatchUpdate
+ *
+ * DESCRIPTION: init camera parameters buf entries
+ *
+ * PARAMETERS :
+ *   @p_table : ptr to parameter buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initBatchUpdate(void *p_table)
+{
+
+    m_tempMap.clear();
+    CDBG("%s:Initializing batch parameter set",__func__);
+
+    parm_buffer_new_t *param_buf = (parm_buffer_new_t *)p_table;
+    memset(param_buf, 0, sizeof(ONE_MB_OF_PARAMS));
+    param_buf->num_entry = 0;
+    param_buf->curr_size = 0;
+    param_buf->tot_rem_size = ONE_MB_OF_PARAMS - sizeof(parm_buffer_new_t);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : AddSetParmEntryToBatch
+ *
+ * DESCRIPTION: add set parameter entry into batch
+ *
+ * PARAMETERS :
+ *   @p_table     : ptr to parameter buffer
+ *   @paramType   : parameter type
+ *   @paramLength : length of parameter value
+ *   @paramValue  : ptr to parameter value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::AddSetParmEntryToBatch(void *p_table,
+        cam_intf_parm_type_t paramType, size_t paramLength, void *paramValue)
+{
+    size_t j = 0;
+    parm_buffer_new_t *param_buf = (parm_buffer_new_t *)p_table;
+    size_t num_entry = param_buf->num_entry;
+    size_t size_req = paramLength + sizeof(parm_entry_type_new_t);
+    size_t aligned_size_req = (size_req + 3U) & (~3U);
+    parm_entry_type_new_t *curr_param = (parm_entry_type_new_t *)
+            (void *) &param_buf->entry[0];
+
+    /* first search if the key is already present in the batch list
+     * this is a search penalty but as the batch list is never more
+     * than a few tens of entries at most,it should be ok.
+     * if search performance becomes a bottleneck, we can
+     * think of implementing a hashing mechanism.
+     * but it is still better than the huge memory required for
+     * direct indexing
+     */
+    for (j = 0; j < num_entry; j++) {
+      if (paramType == curr_param->entry_type) {
+        CDBG_HIGH("%s:Batch parameter overwrite for param: %d",
+                                                __func__, paramType);
+        break;
+      }
+      curr_param = GET_NEXT_PARAM(curr_param, parm_entry_type_new_t);
+    }
+
+    //new param, search not found
+    if (j == num_entry) {
+      if (aligned_size_req > param_buf->tot_rem_size) {
+        ALOGE("%s:Batch buffer running out of size, commit and resend",__func__);
+        commitSetBatch();
+        initBatchUpdate(p_table);
+      }
+
+      curr_param = (parm_entry_type_new_t *)
+          (void *) (&param_buf->entry[0] + param_buf->curr_size);
+      param_buf->curr_size += aligned_size_req;
+      param_buf->tot_rem_size -= aligned_size_req;
+      param_buf->num_entry++;
+    }
+
+    curr_param->entry_type = paramType;
+    curr_param->size = paramLength;
+    curr_param->aligned_size = aligned_size_req;
+    memcpy(&curr_param->data[0], paramValue, paramLength);
+    CDBG("%s: num_entry: %d, paramType: %d, paramLength: %d, aligned_size_req: %d",
+            __func__, param_buf->num_entry, paramType, paramLength, aligned_size_req);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : AddGetParmEntryToBatch
+ *
+ * DESCRIPTION: add get parameter entry into batch
+ *
+ * PARAMETERS :
+ *   @p_table     : ptr to parameter buffer
+ *   @paramType   : parameter type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::AddGetParmEntryToBatch(void *p_table,
+                                                  cam_intf_parm_type_t paramType)
+{
+    ///in get params, we have no information on the size of the param requested
+    //for, hence we assume the largest size and reserve space for the same
+    size_t j = 0;
+    size_t paramLength = sizeof(parm_type_t);
+    parm_buffer_new_t *param_buf = (parm_buffer_new_t *)p_table;
+    size_t num_entry = param_buf->num_entry;
+    size_t size_req = paramLength + sizeof(parm_entry_type_new_t) - sizeof(char);
+    size_t aligned_size_req = (size_req + 3U) & (~3U);
+    parm_entry_type_new_t *curr_param = (parm_entry_type_new_t *)
+            (void *) &param_buf->entry[0];
+
+    /* first search if the key is already present in the batch list
+     * this is a search penalty but as the batch list is never more
+     * than a few tens of entries at most,it should be ok.
+     * if search performance becomes a bottleneck, we can
+     * think of implementing a hashing mechanism.
+     * but it is still better than the huge memory required for
+     * direct indexing
+     */
+    for (j = 0; j < num_entry; j++) {
+        if (paramType == curr_param->entry_type) {
+            CDBG_HIGH("%s:Batch parameter overwrite for param: %d",
+                    __func__, paramType);
+        break;
+        }
+        curr_param = GET_NEXT_PARAM(curr_param, parm_entry_type_new_t);
+    }
+
+    //new param, search not found
+    if (j == num_entry) {
+        if (aligned_size_req > param_buf->tot_rem_size) {
+            ALOGE("%s:Batch buffer running out of size, commit and resend", __func__);
+            // This is an extreme corner case
+            // if the size of the batch set is full, we return error
+            // the caller is expected to commit the get batch, use the params
+            // returned, initialize the batch again and continue.
+            return NO_MEMORY;
+        }
+
+        curr_param = (parm_entry_type_new_t *) (void *)
+                (&param_buf->entry[0] + param_buf->curr_size);
+        param_buf->curr_size += aligned_size_req;
+        param_buf->tot_rem_size -= aligned_size_req;
+        param_buf->num_entry++;
+    }
+
+    curr_param->entry_type = paramType;
+    curr_param->size = paramLength;
+    curr_param->aligned_size = aligned_size_req;
+    CDBG_HIGH("%s:num_entry: %d, paramType: %d ",__func__, param_buf->num_entry, paramType);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitSetBatch
+ *
+ * DESCRIPTION: commit all set parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitSetBatch()
+{
+    int32_t rc = NO_ERROR;
+    if (m_pParamBuf->num_entry > 0) {
+        rc = m_pCamOpsTbl->ops->set_parms(m_pCamOpsTbl->camera_handle,
+                                                      (void *)m_pParamBuf);
+        CDBG("%s: commitSetBatch done",__func__);
+    }
+    if (rc == NO_ERROR) {
+        // commit change from temp storage into param map
+        rc = commitParamChanges();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitGetBatch
+ *
+ * DESCRIPTION: commit all get parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitGetBatch()
+{
+    int32_t rc = NO_ERROR;
+    if (m_pParamBuf->num_entry > 0) {
+        rc = m_pCamOpsTbl->ops->get_parms(m_pCamOpsTbl->camera_handle,
+                                                          (void *)m_pParamBuf);
+        CDBG_HIGH("%s: commitGetBatch done",__func__);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParamEntry
+ *
+ * DESCRIPTION: update a parameter entry in the local temp map obj
+ *
+ * PARAMETERS :
+ *   @key     : key of the entry
+ *   @value   : value of the entry
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParamEntry(const char *key, const char *value)
+{
+    m_tempMap.replaceValueFor(String8(key), String8(value));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParamChanges
+ *
+ * DESCRIPTION: commit all changes in local temp map obj into parameter obj
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParamChanges()
+{
+    size_t size = m_tempMap.size();
+    for (size_t i = 0; i < size; i++) {
+        String8 k, v;
+        k = m_tempMap.keyAt(i);
+        v = m_tempMap.valueAt(i);
+        set(k, v);
+    }
+    m_tempMap.clear();
+
+    // update local changes
+    m_bRecordingHint = m_bRecordingHint_new;
+    m_bZslMode = m_bZslMode_new;
+
+    /* After applying scene mode auto,
+      Camera effects need to be reapplied */
+    if ( m_bSceneTransitionAuto ) {
+        m_bUpdateEffects = true;
+        m_bSceneTransitionAuto = false;
+    }
+
+    if ( m_bReleaseTorchCamera && ( NULL != m_pTorch)
+        && !m_bRecordingHint_new ) {
+        m_pTorch->releaseTorchCamera();
+        m_bReleaseTorchCamera = false;
+    }
+
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocScaleParam
+ *
+ * DESCRIPTION: constructor of QCameraReprocScaleParam
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocScaleParam::QCameraReprocScaleParam(QCameraParameters *parent __unused)
+  : mScaleEnabled(false),
+    mIsUnderScaling(false),
+    mNeedScaleCnt(0),
+    mSensorSizeTblCnt(0),
+    mSensorSizeTbl(NULL),
+    mTotalSizeTblCnt(0)
+{
+    mPicSizeFromAPK.width = 0;
+    mPicSizeFromAPK.height = 0;
+    mPicSizeSetted.width = 0;
+    mPicSizeSetted.height = 0;
+    memset(mNeedScaledSizeTbl, 0, sizeof(mNeedScaledSizeTbl));
+    memset(mTotalSizeTbl, 0, sizeof(mTotalSizeTbl));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~~QCameraReprocScaleParam
+ *
+ * DESCRIPTION: destructor of QCameraReprocScaleParam
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocScaleParam::~QCameraReprocScaleParam()
+{
+    //do nothing now.
+}
+
+/*===========================================================================
+ * FUNCTION   : setScaledSizeTbl
+ *
+ * DESCRIPTION: re-set picture size table with dimensions that need scaling if Reproc Scale is enabled
+ *
+ * PARAMETERS :
+ *   @scale_cnt   : count of picture sizes that want scale
+ *   @scale_tbl    : picture size table that want scale
+ *   @org_cnt     : sensor supported picture size count
+ *   @org_tbl      : sensor supported picture size table
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocScaleParam::setScaleSizeTbl(size_t scale_cnt,
+        cam_dimension_t *scale_tbl, size_t org_cnt, cam_dimension_t *org_tbl)
+{
+    int32_t rc = NO_ERROR;
+    size_t i;
+    mNeedScaleCnt = 0;
+
+    if(!mScaleEnabled || scale_cnt <=0 || scale_tbl == NULL || org_cnt <=0 || org_tbl == NULL){
+        return BAD_VALUE;    // Do not need scale, so also need not reset picture size table
+    }
+
+    mSensorSizeTblCnt = org_cnt;
+    mSensorSizeTbl = org_tbl;
+    mNeedScaleCnt = checkScaleSizeTable(scale_cnt, scale_tbl, org_cnt, org_tbl);
+    if(mNeedScaleCnt <= 0){
+        ALOGE("%s: do not have picture sizes need scaling.", __func__);
+        return BAD_VALUE;
+    }
+
+    if(mNeedScaleCnt + org_cnt > MAX_SIZES_CNT){
+        ALOGE("%s: picture size list exceed the max count.", __func__);
+        return BAD_VALUE;
+    }
+
+    //get the total picture size table
+    mTotalSizeTblCnt = mNeedScaleCnt + org_cnt;
+    for(i = 0; i < mNeedScaleCnt; i++){
+        mTotalSizeTbl[i].width = mNeedScaledSizeTbl[i].width;
+        mTotalSizeTbl[i].height = mNeedScaledSizeTbl[i].height;
+        CDBG_HIGH("%s: scale picture size: i =%d, width=%d, height=%d.", __func__,
+            i, mTotalSizeTbl[i].width, mTotalSizeTbl[i].height);
+    }
+    for(; i < mTotalSizeTblCnt; i++){
+        mTotalSizeTbl[i].width = org_tbl[i-mNeedScaleCnt].width;
+        mTotalSizeTbl[i].height = org_tbl[i-mNeedScaleCnt].height;
+        CDBG_HIGH("%s: sensor supportted picture size: i =%d, width=%d, height=%d.", __func__,
+            i, mTotalSizeTbl[i].width, mTotalSizeTbl[i].height);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getScaledSizeTblCnt
+ *
+ * DESCRIPTION: get picture size cnt that need scale
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : uint8_t type of picture size count
+ *==========================================================================*/
+size_t QCameraReprocScaleParam::getScaleSizeTblCnt()
+{
+    return mNeedScaleCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : getScaledSizeTbl
+ *
+ * DESCRIPTION: get picture size table that need scale
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : cam_dimension_t list of picture size table
+ *==========================================================================*/
+cam_dimension_t *QCameraReprocScaleParam::getScaledSizeTbl()
+{
+    if(!mScaleEnabled)
+        return NULL;
+
+    return mNeedScaledSizeTbl;
+}
+
+/*===========================================================================
+ * FUNCTION   : setScaleEnable
+ *
+ * DESCRIPTION: enable or disable Reproc Scale
+ *
+ * PARAMETERS :
+ *   @enabled : enable: 1; disable 0
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraReprocScaleParam::setScaleEnable(bool enabled)
+{
+    mScaleEnabled = enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : isScaleEnabled
+ *
+ * DESCRIPTION: check if Reproc Scale is enabled
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraReprocScaleParam::isScaleEnabled()
+{
+    return mScaleEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : isScalePicSize
+ *
+ * DESCRIPTION: check if current picture size is from Scale Table
+ *
+ * PARAMETERS :
+ *   @width     : current picture width
+ *   @height    : current picture height
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraReprocScaleParam::isScalePicSize(int width, int height)
+{
+    //Check if the picture size is in scale table
+    if(mNeedScaleCnt <= 0)
+        return FALSE;
+
+    for (size_t i = 0; i < mNeedScaleCnt; i++) {
+        if ((mNeedScaledSizeTbl[i].width == width) && (mNeedScaledSizeTbl[i].height == height)) {
+            //found match
+            return TRUE;
+        }
+    }
+
+    ALOGE("%s: Not in scale picture size table.", __func__);
+    return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION   : isValidatePicSize
+ *
+ * DESCRIPTION: check if current picture size is validate
+ *
+ * PARAMETERS :
+ *   @width     : current picture width
+ *   @height    : current picture height
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraReprocScaleParam::isValidatePicSize(int width, int height)
+{
+    size_t i = 0;
+
+    for(i = 0; i < mSensorSizeTblCnt; i++){
+        if(mSensorSizeTbl[i].width == width
+            && mSensorSizeTbl[i].height== height){
+            return TRUE;
+        }
+    }
+
+    for(i = 0; i < mNeedScaleCnt; i++){
+        if(mNeedScaledSizeTbl[i].width == width
+            && mNeedScaledSizeTbl[i].height== height){
+            return TRUE;
+        }
+    }
+
+    ALOGE("%s: Invalidate input picture size.", __func__);
+    return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSensorSupportedPicSize
+ *
+ * DESCRIPTION: set sensor supported picture size.
+ *    For Snapshot stream size configuration, we need use sensor supported size.
+ *    We will use CPP to do Scaling based on output Snapshot stream.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocScaleParam::setSensorSupportedPicSize()
+{
+    //will find a suitable picture size (here we leave a prossibility to add other scale requirement)
+    //Currently we only focus on upscaling, and checkScaleSizeTable() has guaranteed the dimension ratio.
+
+    if(!mIsUnderScaling || mSensorSizeTblCnt <= 0)
+        return BAD_VALUE;
+
+    //We just get the max sensor supported size here.
+    mPicSizeSetted.width = mSensorSizeTbl[0].width;
+    mPicSizeSetted.height = mSensorSizeTbl[0].height;
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setValidatePicSize
+ *
+ * DESCRIPTION: set sensor supported size and change scale status.
+ *
+ * PARAMETERS :
+ *   @width    : input picture width
+ *   @height   : input picture height
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocScaleParam::setValidatePicSize(int &width,int &height)
+{
+    if(!mScaleEnabled)
+        return BAD_VALUE;
+
+    mIsUnderScaling = FALSE; //default: not under scale
+
+    if(isScalePicSize(width, height)){
+        // input picture size need scaling operation. Record size from APK and setted
+        mIsUnderScaling = TRUE;
+        mPicSizeFromAPK.width = width;
+        mPicSizeFromAPK.height = height;
+
+        if(setSensorSupportedPicSize() != NO_ERROR)
+            return BAD_VALUE;
+
+        //re-set picture size to sensor supported size
+        width = mPicSizeSetted.width;
+        height = mPicSizeSetted.height;
+        CDBG_HIGH("%s: mPicSizeFromAPK- with=%d, height=%d, mPicSizeSetted- with =%d, height=%d.",
+            __func__, mPicSizeFromAPK.width, mPicSizeFromAPK.height, mPicSizeSetted.width, mPicSizeSetted.height);
+    }else{
+        mIsUnderScaling = FALSE;
+        //no scale is needed for input picture size
+        if(!isValidatePicSize(width, height)){
+            ALOGE("%s: invalidate input picture size.", __func__);
+            return BAD_VALUE;
+        }
+        mPicSizeSetted.width = width;
+        mPicSizeSetted.height = height;
+    }
+
+    CDBG_HIGH("%s: X. mIsUnderScaling=%d, width=%d, height=%d.", __func__, mIsUnderScaling, width, height);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPicSizeFromAPK
+ *
+ * DESCRIPTION: get picture size that get from APK
+ *
+ * PARAMETERS :
+ *   @width     : input width
+ *   @height    : input height
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocScaleParam::getPicSizeFromAPK(int &width, int &height)
+{
+    if(!mIsUnderScaling)
+        return BAD_VALUE;
+
+    width = mPicSizeFromAPK.width;
+    height = mPicSizeFromAPK.height;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPicSizeSetted
+ *
+ * DESCRIPTION: get picture size that setted into mm-camera
+ *
+ * PARAMETERS :
+ *   @width     : input width
+ *   @height    : input height
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocScaleParam::getPicSizeSetted(int &width, int &height)
+{
+    width = mPicSizeSetted.width;
+    height = mPicSizeSetted.height;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : isUnderScaling
+ *
+ * DESCRIPTION: check if we are in Reproc Scaling requirment
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraReprocScaleParam::isUnderScaling()
+{
+    return mIsUnderScaling;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkScaleSizeTable
+ *
+ * DESCRIPTION: check PICTURE_SIZE_NEED_SCALE to choose
+ *
+ * PARAMETERS :
+ *   @scale_cnt   : count of picture sizes that want scale
+ *   @scale_tbl    : picture size table that want scale
+ *   @org_cnt     : sensor supported picture size count
+ *   @org_tbl      : sensor supported picture size table
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+size_t QCameraReprocScaleParam::checkScaleSizeTable(size_t scale_cnt,
+        cam_dimension_t *scale_tbl, size_t org_cnt, cam_dimension_t *org_tbl)
+{
+    size_t stbl_cnt = 0;
+    size_t temp_cnt = 0;
+    ssize_t i = 0;
+    if(scale_cnt <=0 || scale_tbl == NULL || org_tbl == NULL || org_cnt <= 0)
+        return stbl_cnt;
+
+    //get validate scale size table. Currently we only support:
+    // 1. upscale. The scale size must larger than max sensor supported size
+    // 2. Scale dimension ratio must be same as the max sensor supported size.
+    temp_cnt = scale_cnt;
+    for (i = (ssize_t)(scale_cnt - 1); i >= 0; i--) {
+        if (scale_tbl[i].width > org_tbl[0].width ||
+                (scale_tbl[i].width == org_tbl[0].width &&
+                    scale_tbl[i].height > org_tbl[0].height)) {
+            //get the smallest scale size
+            break;
+        }
+        temp_cnt--;
+    }
+
+    //check dimension ratio
+    double supported_ratio = (double)org_tbl[0].width / (double)org_tbl[0].height;
+    for (i = 0; i < (ssize_t)temp_cnt; i++) {
+        double cur_ratio = (double)scale_tbl[i].width / (double)scale_tbl[i].height;
+        if (fabs(supported_ratio - cur_ratio) > ASPECT_TOLERANCE) {
+            continue;
+        }
+        mNeedScaledSizeTbl[stbl_cnt].width = scale_tbl[i].width;
+        mNeedScaledSizeTbl[stbl_cnt].height= scale_tbl[i].height;
+        stbl_cnt++;
+    }
+
+    return stbl_cnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : getTotalSizeTblCnt
+ *
+ * DESCRIPTION: get total picture size count after adding dimensions that need scaling
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : uint8_t type of picture size count
+ *==========================================================================*/
+size_t QCameraReprocScaleParam::getTotalSizeTblCnt()
+{
+    return mTotalSizeTblCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : getTotalSizeTbl
+ *
+ * DESCRIPTION: get picture size table after adding dimensions that need scaling
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : cam_dimension_t list of picture size table
+ *==========================================================================*/
+cam_dimension_t *QCameraReprocScaleParam::getTotalSizeTbl()
+{
+    if(!mScaleEnabled)
+        return NULL;
+
+    return mTotalSizeTbl;
+}
+
+/*===========================================================================
+ * FUNCTION   : isHDREnabled
+ *
+ * DESCRIPTION: if HDR is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::isHDREnabled()
+{
+    return ((m_nBurstNum == 1) && (m_bHDREnabled || m_HDRSceneEnabled));
+}
+
+/*===========================================================================
+ * FUNCTION   : isAVTimerEnabled
+ *
+ * DESCRIPTION: if AVTimer is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::isAVTimerEnabled()
+{
+    return m_bAVTimerEnabled;
+}
+
+/*===========================================================================
+* FUNCTION   : isDISEnabled
+*
+* DESCRIPTION: if DIS is enabled
+*
+* PARAMETERS : none
+*
+* RETURN    : true: needed
+*               false: no need
+*==========================================================================*/
+bool QCameraParameters::isDISEnabled()
+{
+    return m_bDISEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : MobicatMask
+ *
+ * DESCRIPTION: returns mobicat mask
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : mobicat mask
+ *==========================================================================*/
+uint8_t QCameraParameters::getMobicatMask()
+{
+    return m_bMobiMask;
+}
+
+/*===========================================================================
+ * FUNCTION   : needThumbnailReprocess
+ *
+ * DESCRIPTION: Check if thumbnail reprocessing is needed
+ *
+ * PARAMETERS : @pFeatureMask - feature mask
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::needThumbnailReprocess(uint32_t *pFeatureMask)
+{
+    if (isUbiFocusEnabled() || isChromaFlashEnabled() ||
+            isOptiZoomEnabled() || isfssrEnabled() ||
+            isMultiTouchFocusEnabled()) {
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_CHROMA_FLASH;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_UBIFOCUS;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_OPTIZOOM;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_FSSR;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_MULTI_TOUCH_FOCUS;
+        return false;
+    } else {
+        return true;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraBuffersForImageProc
+ *
+ * DESCRIPTION: get number of extra input buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by ImageProc;
+ *              0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForImageProc()
+{
+    int numOfBufs = 0;
+
+    if (isUbiFocusEnabled()) {
+        numOfBufs += m_pCapability->ubifocus_af_bracketing_need.burst_count - 1;
+        if (isUbiRefocus()) {
+            numOfBufs +=
+                m_pCapability->ubifocus_af_bracketing_need.burst_count + 1;
+        }
+    } else if (isMultiTouchFocusEnabled()) {
+        numOfBufs += m_currNumBufMTF - 1;
+        if (isMTFRefocus()) {
+            numOfBufs += m_currNumBufMTF + 1;
+        }
+    } else if (m_bOptiZoomOn) {
+        numOfBufs += m_pCapability->opti_zoom_settings_need.burst_count - 1;
+    } else if (isChromaFlashEnabled()) {
+        numOfBufs += 1; /* flash and non flash */
+    } else if (isfssrEnabled()) {
+        numOfBufs += m_pCapability->fssr_settings_need.burst_count - 1;
+        //One output buffer of 4X size excluded
+    }
+
+    return (uint8_t)(numOfBufs * getBurstNum());
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraBuffersForVideo
+ *
+ * DESCRIPTION: get number of extra buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by ImageProc;
+ *              0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForVideo()
+{
+    uint8_t numOfBufs = 0;
+
+    if (isSeeMoreEnabled()) {
+        numOfBufs = 1;
+    }
+
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraBuffersForPreview
+ *
+ * DESCRIPTION: get number of extra buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by ImageProc;
+ *              0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForPreview()
+{
+    uint8_t numOfBufs = 0;
+
+    if (isSeeMoreEnabled() && !isZSLMode() && getRecordingHintValue()) {
+        numOfBufs = 1;
+    }
+
+    return numOfBufs;
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraParameters.h b/msm8974/QCamera2/HAL/QCameraParameters.h
new file mode 100644
index 0000000..88efe1e
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraParameters.h
@@ -0,0 +1,942 @@
+/*
+** Copyright 2008, The Android Open Source Project
+** Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+** Not a Contribution. Apache license notifications and license are
+** retained for attribution purposes only.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+#include <camera/CameraParameters.h>
+#include <cutils/properties.h>
+#include <hardware/camera.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include "cam_intf.h"
+#include "cam_types.h"
+#include "QCameraMem.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include <mm_jpeg_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
+
+#define GPS_PROCESSING_METHOD_SIZE       101
+#define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
+#define FOCAL_LENGTH_DECIMAL_PRECISION   1000
+
+class QCameraTorchInterface
+{
+public:
+    virtual int prepareTorchCamera() = 0;
+    virtual int releaseTorchCamera() = 0;
+    virtual ~QCameraTorchInterface() {}
+};
+
+class QCameraAdjustFPS
+{
+public:
+    virtual int recalcFPSRange(int &minFPS, int &maxFPS,
+            cam_fps_range_t &adjustedRange) = 0;
+    virtual ~QCameraAdjustFPS() {}
+};
+
+class QCameraParameters;
+class QCameraReprocScaleParam{
+public:
+    QCameraReprocScaleParam(QCameraParameters *parent);
+    virtual ~QCameraReprocScaleParam();
+
+    virtual void setScaleEnable(bool enabled);
+    virtual int32_t setScaleSizeTbl(size_t scale_cnt,
+            cam_dimension_t *scale_tbl, size_t org_cnt,
+            cam_dimension_t *org_tbl);
+    virtual int32_t setValidatePicSize(int &width, int &height);
+
+    virtual bool isScaleEnabled();
+    virtual bool isUnderScaling();
+
+
+    virtual size_t getScaleSizeTblCnt();
+    virtual cam_dimension_t *getScaledSizeTbl();
+    virtual size_t getTotalSizeTblCnt();
+    virtual cam_dimension_t *getTotalSizeTbl();
+    virtual int32_t getPicSizeFromAPK(int &width, int &height);
+    virtual int32_t getPicSizeSetted(int &width, int &height);
+
+private:
+    bool isScalePicSize(int width, int height);
+    bool isValidatePicSize(int width, int height);
+    int32_t setSensorSupportedPicSize();
+    size_t checkScaleSizeTable(size_t scale_cnt, cam_dimension_t *scale_tbl,
+            size_t org_cnt, cam_dimension_t *org_tbl);
+
+    bool mScaleEnabled;
+    bool mIsUnderScaling;   //if in scale status
+
+    // picture size cnt that need scale operation
+    size_t mNeedScaleCnt;
+    cam_dimension_t mNeedScaledSizeTbl[MAX_SCALE_SIZES_CNT];
+
+    // sensor supported size cnt and table
+    size_t mSensorSizeTblCnt;
+    cam_dimension_t *mSensorSizeTbl;
+
+    // Total size cnt (sensor supported + need scale cnt)
+    size_t mTotalSizeTblCnt;
+    cam_dimension_t mTotalSizeTbl[MAX_SIZES_CNT];
+
+    cam_dimension_t mPicSizeFromAPK;   // dimension that APK is expected
+    cam_dimension_t mPicSizeSetted;    // dimension that config vfe
+};
+
+class QCameraParameters: public CameraParameters
+{
+public:
+    QCameraParameters();
+    QCameraParameters(const String8 &params);
+    ~QCameraParameters();
+
+    // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+    // Example value: "800x480,432x320". Read only.
+    static const char KEY_QC_SUPPORTED_HFR_SIZES[];
+    // The mode of preview frame rate.
+    // Example value: "frame-rate-auto, frame-rate-fixed".
+    static const char KEY_QC_PREVIEW_FRAME_RATE_MODE[];
+    static const char KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+    static const char KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[];
+    static const char KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[];
+    static const char KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+    // Supported live snapshot sizes
+    static const char KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[];
+
+    // Supported scaled picture sizes
+    static const char KEY_QC_SCALED_PICTURE_SIZES[];
+
+    // Supported Raw formats
+    static const char KEY_QC_SUPPORTED_RAW_FORMATS[];
+    static const char KEY_QC_RAW_FORMAT[];
+
+    //Touch Af/AEC settings.
+    static const char KEY_QC_TOUCH_AF_AEC[];
+    static const char KEY_QC_SUPPORTED_TOUCH_AF_AEC[];
+    //Touch Index for AEC.
+    static const char KEY_QC_TOUCH_INDEX_AEC[];
+    //Touch Index for AF.
+    static const char KEY_QC_TOUCH_INDEX_AF[];
+    // Current auto scene detection mode.
+    // Example value: "off" or "on" constants. Read/write.
+    static const char KEY_QC_SCENE_DETECT[];
+    // Supported auto scene detection settings.
+    // Example value: "off,on". Read only.
+    static const char KEY_QC_SUPPORTED_SCENE_DETECT[];
+    static const char KEY_QC_SELECTABLE_ZONE_AF[];
+
+    static const char KEY_QC_ISO_MODE[];
+    static const char KEY_QC_SUPPORTED_ISO_MODES[];
+    static const char KEY_QC_EXPOSURE_TIME[];
+    static const char KEY_QC_MIN_EXPOSURE_TIME[];
+    static const char KEY_QC_MAX_EXPOSURE_TIME[];
+    static const char KEY_QC_LENSSHADE[] ;
+    static const char KEY_QC_SUPPORTED_LENSSHADE_MODES[] ;
+    static const char KEY_QC_AUTO_EXPOSURE[];
+    static const char KEY_QC_SUPPORTED_AUTO_EXPOSURE[];
+
+    static const char KEY_QC_GPS_LATITUDE_REF[];
+    static const char KEY_QC_GPS_LONGITUDE_REF[];
+    static const char KEY_QC_GPS_ALTITUDE_REF[];
+    static const char KEY_QC_GPS_STATUS[];
+    static const char KEY_QC_MEMORY_COLOR_ENHANCEMENT[];
+    static const char KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+    static const char KEY_QC_DIS[];
+    static const char KEY_QC_SUPPORTED_DIS_MODES[];
+
+    static const char KEY_QC_ZSL[];
+    static const char KEY_QC_SUPPORTED_ZSL_MODES[];
+    static const char KEY_QC_ZSL_BURST_INTERVAL[];
+    static const char KEY_QC_ZSL_BURST_LOOKBACK[];
+    static const char KEY_QC_ZSL_QUEUE_DEPTH[];
+
+    static const char KEY_QC_CAMERA_MODE[];
+    static const char KEY_QC_ORIENTATION[];
+
+    static const char KEY_QC_VIDEO_HIGH_FRAME_RATE[];
+    static const char KEY_QC_VIDEO_HIGH_SPEED_RECORDING[];
+    static const char KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+    static const char KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[];
+    static const char KEY_QC_SUPPORTED_HDR_IMAGING_MODES[];
+    static const char KEY_QC_AE_BRACKET_HDR[];
+    static const char KEY_QC_SUPPORTED_AE_BRACKET_MODES[];
+    static const char KEY_QC_CAPTURE_BURST_EXPOSURE[];
+    static const char KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[];
+    static const char KEY_QC_SNAPSHOT_BURST_NUM[];
+    static const char KEY_QC_NO_DISPLAY_MODE[];
+    static const char KEY_QC_RAW_PICUTRE_SIZE[];
+    static const char KEY_QC_TINTLESS_ENABLE[];
+    static const char KEY_QC_CDS_MODE[];
+    static const char KEY_QC_WB_MANUAL_CCT[];
+    static const char KEY_QC_MIN_WB_CCT[];
+    static const char KEY_QC_MAX_WB_CCT[];
+    static const char KEY_QC_LONG_SHOT[];
+    static const char WHITE_BALANCE_MANUAL_CCT[];
+    static const char FOCUS_MODE_MANUAL_POSITION[];
+
+    static const char KEY_QC_MANUAL_FOCUS_POSITION[];
+    static const char KEY_QC_MANUAL_FOCUS_POS_TYPE[];
+    static const char KEY_QC_MIN_FOCUS_POS_INDEX[];
+    static const char KEY_QC_MAX_FOCUS_POS_INDEX[];
+    static const char KEY_QC_MIN_FOCUS_POS_DAC[];
+    static const char KEY_QC_MAX_FOCUS_POS_DAC[];
+
+    static const char KEY_INTERNAL_PERVIEW_RESTART[];
+
+    // DENOISE
+    static const char KEY_QC_DENOISE[];
+    static const char KEY_QC_SUPPORTED_DENOISE[];
+
+    //Selectable zone AF.
+    static const char KEY_QC_FOCUS_ALGO[];
+    static const char KEY_QC_SUPPORTED_FOCUS_ALGOS[];
+
+    //Face Detection
+    static const char KEY_QC_FACE_DETECTION[];
+    static const char KEY_QC_SUPPORTED_FACE_DETECTION[];
+
+    //Face Recognition
+    static const char KEY_QC_FACE_RECOGNITION[];
+    static const char KEY_QC_SUPPORTED_FACE_RECOGNITION[];
+
+    // supported camera features to be queried by Snapdragon SDK
+    //Read only
+    static const char KEY_QC_SUPPORTED_CAMERA_FEATURES[];
+
+    //Indicates number of faces requested by the application.
+    //This value will be rejected if the requested faces
+    //greater than supported by hardware.
+    //Write only.
+    static const char KEY_QC_MAX_NUM_REQUESTED_FACES[];
+
+    //preview flip
+    static const char KEY_QC_PREVIEW_FLIP[];
+    //video flip
+    static const char KEY_QC_VIDEO_FLIP[];
+    //snapshot picture flip
+    static const char KEY_QC_SNAPSHOT_PICTURE_FLIP[];
+
+    static const char KEY_QC_SUPPORTED_FLIP_MODES[];
+
+    //Face Detection, Facial processing requirement
+    static const char KEY_QC_SNAPSHOT_FD_DATA[];
+
+    //Auto HDR enable
+    static const char KEY_QC_AUTO_HDR_ENABLE[];
+
+    // video rotation
+    static const char KEY_QC_VIDEO_ROTATION[];
+
+    //Redeye Reduction
+    static const char KEY_QC_REDEYE_REDUCTION[];
+    static const char KEY_QC_SUPPORTED_REDEYE_REDUCTION[];
+    static const char EFFECT_EMBOSS[];
+    static const char EFFECT_SKETCH[];
+    static const char EFFECT_NEON[];
+
+    //AF Bracketing
+    static const char KEY_QC_AF_BRACKET[];
+    static const char KEY_QC_SUPPORTED_AF_BRACKET_MODES[];
+
+    //Chroma Flash
+    static const char KEY_QC_CHROMA_FLASH[];
+    static const char KEY_QC_SUPPORTED_CHROMA_FLASH_MODES[];
+
+    //Opti Zoom
+    static const char KEY_QC_OPTI_ZOOM[];
+    static const char KEY_QC_SUPPORTED_OPTI_ZOOM_MODES[];
+
+    //True Portrait
+    static const char KEY_QC_TRUE_PORTRAIT[];
+    static const char KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES[];
+
+    //FSSR
+    static const char KEY_QC_FSSR[];
+    static const char KEY_QC_SUPPORTED_FSSR_MODES[];
+
+    //See more
+    static const char KEY_QC_SEE_MORE[];
+    static const char KEY_QC_SUPPORTED_SEE_MORE_MODES[];
+
+    //Longshot
+    static const char KEY_QC_LONGSHOT_SUPPORTED[];
+
+    //ZSL+HDR
+    static const char KEY_QC_ZSL_HDR_SUPPORTED[];
+
+    // Auto HDR supported
+    static const char KEY_QC_AUTO_HDR_SUPPORTED[];
+
+    //Multi-touch Focus
+    static const char KEY_QC_MULTI_TOUCH_FOCUS[];
+    static const char KEY_QC_SUPPORTED_MULTI_TOUCH_FOCUS_MODES[];
+
+    // Values for Touch AF/AEC
+    static const char TOUCH_AF_AEC_OFF[];
+    static const char TOUCH_AF_AEC_ON[];
+
+    // Values for Scene mode
+    static const char SCENE_MODE_ASD[];
+    static const char SCENE_MODE_BACKLIGHT[];
+    static const char SCENE_MODE_FLOWERS[];
+    static const char SCENE_MODE_AR[];
+    static const char SCENE_MODE_HDR[];
+    static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+    static const char PIXEL_FORMAT_YV12[]; // NV12
+    static const char PIXEL_FORMAT_NV12[]; //NV12
+    static const char QC_PIXEL_FORMAT_NV12_VENUS[]; //NV12 VENUS
+
+    // Values for raw picture format
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[];
+
+    // ISO values
+    static const char ISO_AUTO[];
+    static const char ISO_HJR[];
+    static const char ISO_100[];
+    static const char ISO_200[];
+    static const char ISO_400[];
+    static const char ISO_800[];
+    static const char ISO_1600[];
+    static const char ISO_3200[];
+
+    // Values for auto exposure settings.
+    static const char AUTO_EXPOSURE_FRAME_AVG[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+    static const char AUTO_EXPOSURE_SPOT_METERING[];
+    static const char AUTO_EXPOSURE_SMART_METERING[];
+    static const char AUTO_EXPOSURE_USER_METERING[];
+    static const char AUTO_EXPOSURE_SPOT_METERING_ADV[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[];
+
+    static const char KEY_QC_SHARPNESS[];
+    static const char KEY_QC_MIN_SHARPNESS[];
+    static const char KEY_QC_MAX_SHARPNESS[];
+    static const char KEY_QC_SHARPNESS_STEP[];
+    static const char KEY_QC_CONTRAST[];
+    static const char KEY_QC_MIN_CONTRAST[];
+    static const char KEY_QC_MAX_CONTRAST[];
+    static const char KEY_QC_CONTRAST_STEP[];
+    static const char KEY_QC_SATURATION[];
+    static const char KEY_QC_MIN_SATURATION[];
+    static const char KEY_QC_MAX_SATURATION[];
+    static const char KEY_QC_SATURATION_STEP[];
+    static const char KEY_QC_BRIGHTNESS[];
+    static const char KEY_QC_MIN_BRIGHTNESS[];
+    static const char KEY_QC_MAX_BRIGHTNESS[];
+    static const char KEY_QC_BRIGHTNESS_STEP[];
+    static const char KEY_QC_SCE_FACTOR[];
+    static const char KEY_QC_MIN_SCE_FACTOR[];
+    static const char KEY_QC_MAX_SCE_FACTOR[];
+    static const char KEY_QC_SCE_FACTOR_STEP[];
+
+    static const char KEY_QC_HISTOGRAM[] ;
+    static const char KEY_QC_SUPPORTED_HISTOGRAM_MODES[] ;
+    static const char KEY_QC_HDR_NEED_1X[];
+    static const char KEY_QC_VIDEO_HDR[];
+    static const char KEY_QC_VT_ENABLE[];
+    static const char KEY_QC_SUPPORTED_VIDEO_HDR_MODES[];
+    static const char KEY_QC_SENSOR_HDR[];
+    static const char KEY_QC_SUPPORTED_SENSOR_HDR_MODES[];
+
+    // Values for SKIN TONE ENHANCEMENT
+    static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+    static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+    // Values for Denoise
+    static const char DENOISE_OFF[] ;
+    static const char DENOISE_ON[] ;
+
+    // Values for auto exposure settings.
+    static const char FOCUS_ALGO_AUTO[];
+    static const char FOCUS_ALGO_SPOT_METERING[];
+    static const char FOCUS_ALGO_CENTER_WEIGHTED[];
+    static const char FOCUS_ALGO_FRAME_AVERAGE[];
+
+    // Values for AE Bracketing settings.
+    static const char AE_BRACKET_OFF[];
+    static const char AE_BRACKET[];
+
+    // Values for AF Bracketing settings.
+    static const char AF_BRACKET_OFF[];
+    static const char AF_BRACKET_ON[];
+
+    // Values for Chroma Flash settings.
+    static const char CHROMA_FLASH_OFF[];
+    static const char CHROMA_FLASH_ON[];
+
+    // Values for Opti Zoom settings.
+    static const char OPTI_ZOOM_OFF[];
+    static const char OPTI_ZOOM_ON[];
+
+    // Values for FSSR settings.
+    static const char FSSR_OFF[];
+    static const char FSSR_ON[];
+
+    // Values for See More settings.
+    static const char SEE_MORE_OFF[];
+    static const char SEE_MORE_ON[];
+
+    // Values for True Portrait settings.
+    static const char TRUE_PORTRAIT_OFF[];
+    static const char TRUE_PORTRAIT_ON[];
+
+    // Values for Multi-touch Focus settings
+    static const char MULTI_TOUCH_FOCUS_OFF[];
+    static const char MULTI_TOUCH_FOCUS_ON[];
+
+    // Values for HFR settings.
+    static const char VIDEO_HFR_OFF[];
+    static const char VIDEO_HFR_2X[];
+    static const char VIDEO_HFR_3X[];
+    static const char VIDEO_HFR_4X[];
+    static const char VIDEO_HFR_5X[];
+
+    // Values for feature on/off settings.
+    static const char VALUE_OFF[];
+    static const char VALUE_ON[];
+
+    // Values for feature enable/disable settings.
+    static const char VALUE_ENABLE[];
+    static const char VALUE_DISABLE[];
+
+    // Values for feature true/false settings.
+    static const char VALUE_FALSE[];
+    static const char VALUE_TRUE[];
+
+    //Values for flip settings
+    static const char FLIP_MODE_OFF[];
+    static const char FLIP_MODE_V[];
+    static const char FLIP_MODE_H[];
+    static const char FLIP_MODE_VH[];
+
+    //Values for CDS Mode
+    static const char CDS_MODE_OFF[];
+    static const char CDS_MODE_ON[];
+    static const char CDS_MODE_AUTO[];
+
+    static const char KEY_SELECTED_AUTO_SCENE[];
+
+    enum {
+        CAMERA_ORIENTATION_UNKNOWN = 0,
+        CAMERA_ORIENTATION_PORTRAIT = 1,
+        CAMERA_ORIENTATION_LANDSCAPE = 2,
+    };
+
+    template <typename valueType> struct QCameraMap {
+        const char *const desc;
+        valueType val;
+    };
+
+    friend class QCameraReprocScaleParam;
+    QCameraReprocScaleParam m_reprocScaleParam;
+
+    void getSupportedHfrSizes(Vector<Size> &sizes);
+    void setPreviewFrameRateMode(const char *mode);
+    const char *getPreviewFrameRateMode() const;
+    void setTouchIndexAec(int x, int y);
+    void getTouchIndexAec(int *x, int *y);
+    void setTouchIndexAf(int x, int y);
+    void getTouchIndexAf(int *x, int *y);
+
+    int32_t init(cam_capability_t *,
+                 mm_camera_vtbl_t *,
+                 QCameraAdjustFPS *,
+                 QCameraTorchInterface *);
+    void deinit();
+    int32_t assign(QCameraParameters& params);
+    int32_t initDefaultParameters();
+    int32_t updateParameters(QCameraParameters&, bool &needRestart);
+    int32_t commitParameters();
+    int getPreviewHalPixelFormat() const;
+    int32_t getStreamRotation(cam_stream_type_t streamType,
+                               cam_pp_feature_config_t &featureConfig,
+                               cam_dimension_t &dim);
+    int32_t getStreamFormat(cam_stream_type_t streamType,
+                             cam_format_t &format);
+    int32_t getStreamDimension(cam_stream_type_t streamType,
+                                cam_dimension_t &dim);
+    void getThumbnailSize(int *width, int *height) const;
+
+    uint8_t getZSLBurstInterval();
+    uint8_t getZSLQueueDepth();
+    uint8_t getZSLBackLookCount();
+    uint8_t getMaxUnmatchedFramesInQueue();
+    int getMinPPBufs();
+    bool isZSLMode() {return m_bZslMode;};
+    bool isNoDisplayMode() {return m_bNoDisplayMode;};
+    bool isWNREnabled() {return m_bWNROn;};
+    bool isHfrMode() {return m_bHfrMode;};
+    void getHfrFps(cam_fps_range_t &pFpsRange) { pFpsRange = m_hfrFpsRange;};
+    uint8_t getNumOfSnapshots();
+    uint8_t getNumOfExtraHDRInBufsIfNeeded();
+    uint8_t getNumOfExtraHDROutBufsIfNeeded();
+    uint8_t getBurstNum();
+    bool getRecordingHintValue() {return m_bRecordingHint;}; // return local copy of video hint
+    int setRecordingHintValue(int32_t value); // set local copy of video hint and send to server
+                                              // no change in parameters value
+    uint32_t getJpegQuality();
+    uint32_t getJpegRotation();
+    int32_t getEffectValue();
+    int32_t getFlashValue();
+    int32_t getSupportedFlashModes();
+    int32_t getRedEyeValue();
+    int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
+    int32_t getExifFocalLength(rat_t *focalLenght);
+    uint16_t getExifIsoSpeed();
+    int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod, uint32_t &count);
+    int32_t getExifLatitude(rat_t *latitude, char *latRef);
+    int32_t getExifLongitude(rat_t *longitude, char *lonRef);
+    int32_t getExifAltitude(rat_t *altitude, char *altRef);
+    int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen, rat_t *gpsTimeStamp);
+    int32_t updateFocusDistances(cam_focus_distances_info_t *focusDistances);
+
+    bool isAEBracketEnabled();
+    int32_t setAEBracketing();
+    bool isFpsDebugEnabled() {return m_bDebugFps;};
+    bool isHistogramEnabled() {return m_bHistogramEnabled;};
+    bool isFaceDetectionEnabled() {return ((m_nFaceProcMask & CAM_FACE_PROCESS_MASK_DETECTION) != 0);};
+    int32_t setHistogram(bool enabled);
+    int32_t setFaceDetection(bool enabled);
+    int32_t setFrameSkip(enum msm_vfe_frame_skip_pattern pattern);
+    qcamera_thermal_mode getThermalMode() {return m_ThermalMode;};
+    int32_t updateRecordingHintValue(int32_t value);
+    int32_t setHDRAEBracket(cam_exp_bracketing_t hdrBracket);
+    bool isHDREnabled();
+    bool isAutoHDREnabled();
+    int32_t stopAEBracket();
+    int32_t updateFlash(bool commitSettings);
+    int32_t updateRAW(cam_dimension_t max_dim);
+    bool isAVTimerEnabled();
+    uint8_t getMobicatMask();
+    bool isDISEnabled();
+
+    cam_focus_mode_type getFocusMode() const {return mFocusMode;};
+    int32_t setNumOfSnapshot();
+    int32_t adjustPreviewFpsRange(cam_fps_range_t *fpsRange);
+    bool isJpegPictureFormat() {return (mPictureFormat == CAM_FORMAT_JPEG);};
+    bool isNV16PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_422_NV16);};
+    bool isNV21PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_420_NV21);};
+    cam_denoise_process_type_t getWaveletDenoiseProcessPlate();
+    int32_t getLiveSnapshotSize(cam_dimension_t &dim) {dim = m_LiveSnapshotSize; return NO_ERROR;};
+    int32_t getRawSize(cam_dimension_t &dim) {dim = m_rawSize; return NO_ERROR;};
+    int32_t setRawSize(cam_dimension_t &dim);
+    int getFlipMode(cam_stream_type_t streamType);
+    bool isSnapshotFDNeeded();
+
+    bool isHDR1xFrameEnabled() {return m_bHDR1xFrameEnabled;}
+    bool isYUVFrameInfoNeeded();
+    const char*getFrameFmtString(cam_format_t fmt);
+    bool isHDR1xExtraBufferNeeded() {return m_bHDR1xExtraBufferNeeded;}
+    bool isHDROutputCropEnabled() {return m_bHDROutputCropEnabled;}
+
+    bool isPreviewFlipChanged() { return m_bPreviewFlipChanged; };
+    bool isVideoFlipChanged() { return m_bVideoFlipChanged; };
+    bool isSnapshotFlipChanged() { return m_bSnapshotFlipChanged; };
+    void setHDRSceneEnable(bool bflag);
+    int32_t updateCCTValue(int32_t cct);
+
+    const char *getASDStateString(cam_auto_scene_t scene);
+    bool isHDRThumbnailProcessNeeded() { return m_bHDRThumbnailProcessNeeded; };
+    int getAutoFlickerMode();
+
+    bool setStreamConfigure(bool isCapture, bool previewAsPostview);
+    uint8_t getNumOfExtraBuffersForImageProc();
+    uint8_t getNumOfExtraBuffersForVideo();
+    uint8_t getNumOfExtraBuffersForPreview();
+    bool needThumbnailReprocess(uint32_t *pFeatureMask);
+    inline bool isUbiFocusEnabled() {return m_bAFBracketingOn;};
+    inline bool isMultiTouchFocusSelected() {return m_bMultiTouchFocusOn;};
+    bool isMultiTouchFocusEnabled();
+    void resetMultiTouchFocusParam();
+    inline bool isTouchFocusing() {return m_bTouchFocusOn;};
+    inline bool isChromaFlashEnabled() {return m_bChromaFlashOn;};
+    inline bool isSeeMoreEnabled() {return m_bSeeMoreOn;};
+    inline bool isTruePortraitEnabled() {return m_bTruePortraitOn;};
+    inline uint32_t TpMaxMetaSize() {
+        return m_pCapability->true_portrait_settings_need.meta_max_size;};
+    inline uint32_t TpHeaderSize() {
+        return m_pCapability->true_portrait_settings_need.meta_header_size;};
+    inline uint32_t TPBodyMaskWidth() {
+        return m_pCapability->true_portrait_settings_need.body_mask_width;};
+    bool isOptiZoomEnabled();
+    bool isfssrEnabled() {return m_bFssrOn;};
+    bool isDifferentFlipZSL();
+    int32_t commitAFBracket(cam_af_bracketing_t afBracket);
+    int32_t commitMTFBracket(cam_af_bracketing_t mtfBracket);
+    int32_t commitFlashBracket(cam_flash_bracketing_t flashBracket);
+    int32_t set3ALock(const char *lockStr);
+    int32_t setAndCommitZoom(int zoom_level);
+    uint8_t getBurstCountForAdvancedCapture();
+    int32_t setLongshotEnable(bool enable);
+    inline bool isUbiRefocus() {return isUbiFocusEnabled() &&
+        (m_pCapability->ubifocus_af_bracketing_need.output_count > 1);};
+    inline uint8_t UfOutputCount() {
+        return m_pCapability->ubifocus_af_bracketing_need.output_count;};
+    inline bool isMTFRefocus() {return (isMultiTouchFocusEnabled() &&
+            (m_pCapability->mtf_af_bracketing_parm.output_count > 1));};
+    uint32_t MTFOutputCount();
+    inline bool generateThumbFromMain() {return isUbiFocusEnabled() ||
+            isChromaFlashEnabled() || isOptiZoomEnabled() || isDifferentFlipZSL() ||
+            isfssrEnabled() || isMultiTouchFocusEnabled();}
+    cam_af_bracketing_t m_MTFBracketInfo;
+    int32_t updateMTFInfo(const int32_t lenPos);
+    uint8_t m_currNumBufMTF;
+    int32_t  updateCurrentFocusPosition(int32_t pos);
+    bool isDisplayFrameNeeded() { return m_bDisplayFrame; };
+    int32_t setDisplayFrame(bool enabled) {m_bDisplayFrame=enabled; return 0;};
+    bool isAdvCamFeaturesEnabled() {return isUbiFocusEnabled() ||
+            isChromaFlashEnabled() || isOptiZoomEnabled() || isHDREnabled() ||
+            isfssrEnabled() || isMultiTouchFocusEnabled();}
+    int32_t setIntEvent(cam_int_evt_params_t params);
+
+private:
+    int32_t setPreviewSize(const QCameraParameters& );
+    int32_t setVideoSize(const QCameraParameters& );
+    int32_t setPictureSize(const QCameraParameters& );
+    int32_t setLiveSnapshotSize(const QCameraParameters& );
+    int32_t setPreviewFormat(const QCameraParameters& );
+    int32_t setPictureFormat(const QCameraParameters& );
+    int32_t setOrientation(const QCameraParameters& );
+    int32_t setJpegThumbnailSize(const QCameraParameters& );
+    int32_t setJpegQuality(const QCameraParameters& );
+    int32_t setPreviewFpsRange(const QCameraParameters& );
+    int32_t setPreviewFrameRate(const QCameraParameters& );
+    int32_t setAutoExposure(const QCameraParameters& );
+    int32_t setEffect(const QCameraParameters& );
+    int32_t setBrightness(const QCameraParameters& );
+    int32_t setFocusMode(const QCameraParameters& );
+    int32_t setFocusPosition(const QCameraParameters& );
+    int32_t setSharpness(const QCameraParameters& );
+    int32_t setSaturation(const QCameraParameters& );
+    int32_t setContrast(const QCameraParameters& );
+    int32_t setSkinToneEnhancement(const QCameraParameters& );
+    int32_t setSceneDetect(const QCameraParameters& );
+    int32_t setVideoHDR(const QCameraParameters& );
+    int32_t setVtEnable(const QCameraParameters& );
+    int32_t setZoom(const QCameraParameters& );
+    int32_t setISOValue(const QCameraParameters& );
+    int32_t setExposureTime(const QCameraParameters& );
+    int32_t setRotation(const QCameraParameters& );
+    int32_t setVideoRotation(const QCameraParameters& );
+    int32_t setFlash(const QCameraParameters& );
+    int32_t setAecLock(const QCameraParameters& );
+    int32_t setAwbLock(const QCameraParameters& );
+    int32_t setMCEValue(const QCameraParameters& );
+    int32_t setDISValue(const QCameraParameters& params);
+    int32_t setLensShadeValue(const QCameraParameters& );
+    int32_t setExposureCompensation(const QCameraParameters& );
+    int32_t setWhiteBalance(const QCameraParameters& );
+    int32_t setWBManualCCT(const QCameraParameters& );
+    int32_t setAntibanding(const QCameraParameters& );
+    int32_t setFocusAreas(const QCameraParameters& );
+    int32_t setMeteringAreas(const QCameraParameters& );
+    int32_t setSceneMode(const QCameraParameters& );
+    int32_t setSelectableZoneAf(const QCameraParameters& );
+    int32_t setAEBracket(const QCameraParameters& );
+    int32_t setAFBracket(const QCameraParameters& );
+    int32_t setMultiTouchFocus(const QCameraParameters& );
+    int32_t setTouchAFAEC(const QCameraParameters& params);
+    int32_t setChromaFlash(const QCameraParameters& );
+    int32_t setOptiZoom(const QCameraParameters& );
+    int32_t setTruePortrait(const QCameraParameters& );
+    int32_t setFssr(const QCameraParameters& );
+    int32_t setSeeMore(const QCameraParameters& );
+    int32_t setRedeyeReduction(const QCameraParameters& );
+    int32_t setGpsLocation(const QCameraParameters& );
+    int32_t setRecordingHint(const QCameraParameters& );
+    int32_t setNoDisplayMode(const QCameraParameters& );
+    int32_t setWaveletDenoise(const QCameraParameters& );
+    int32_t setZslMode(const QCameraParameters& );
+    int32_t setZslAttributes(const QCameraParameters& );
+    int32_t setAutoHDR(const QCameraParameters& params);
+    int32_t setCameraMode(const QCameraParameters& );
+    int32_t setFaceRecognition(const QCameraParameters& );
+    int32_t setFlip(const QCameraParameters& );
+    int32_t setBurstNum(const QCameraParameters& params);
+    int32_t setSnapshotFDReq(const QCameraParameters& );
+    int32_t setStatsDebugMask();
+    int32_t setISPDebugMask();
+    int32_t setAlgoOptimizationsMask();
+    int32_t setSensorDebugMask();
+    int32_t setTintlessValue(const QCameraParameters& params);
+    int32_t setCDSMode(const QCameraParameters& params);
+    int32_t setMobicat(const QCameraParameters& params);
+    bool UpdateHFRFrameRate(const QCameraParameters& params);
+    int32_t setLongshotParam(const QCameraParameters& params);
+
+    int32_t setAutoExposure(const char *autoExp);
+    int32_t setPreviewFpsRange(int min_fps,int max_fps,
+            int vid_min_fps,int vid_max_fps);
+    int32_t setEffect(const char *effect);
+    int32_t setBrightness(int brightness);
+    int32_t setFocusMode(const char *focusMode);
+    int32_t setFocusPosition(const char *typeStr, const char *posStr);
+    int32_t setSharpness(int sharpness);
+    int32_t setSaturation(int saturation);
+    int32_t setContrast(int contrast);
+    int32_t setSkinToneEnhancement(int sceFactor);
+    int32_t setSceneDetect(const char *scendDetect);
+    int32_t setVideoHDR(const char *videoHDR);
+    int32_t setSensorSnapshotHDR(const char *snapshotHDR);
+    int32_t setVtEnable(const char *vtEnable);
+    int32_t setZoom(int zoom_level);
+    int32_t setISOValue(const char *isoValue);
+    int32_t setExposureTime(const char *expTimeStr);
+    int32_t setFlash(const char *flashStr);
+    int32_t setAecLock(const char *aecStr);
+    int32_t setAwbLock(const char *awbStr);
+    int32_t setMCEValue(const char *mceStr);
+    int32_t setDISValue(const char *disStr);
+    int32_t setHighFrameRate(const int32_t hfrMode);
+    int32_t setLensShadeValue(const char *lensShadeStr);
+    int32_t setExposureCompensation(int expComp);
+    int32_t setWhiteBalance(const char *wbStr);
+    int32_t setWBManualCCT(const char *cctStr);
+    int32_t setAntibanding(const char *antiBandingStr);
+    int32_t setFocusAreas(const char *focusAreasStr);
+    int32_t setMeteringAreas(const char *meteringAreasStr);
+    int32_t setSceneMode(const char *sceneModeStr);
+    int32_t setSelectableZoneAf(const char *selZoneAFStr);
+    int32_t setAEBracket(const char *aecBracketStr);
+    int32_t setAFBracket(const char *afBracketStr);
+    int32_t setMultiTouchFocus(const char *multiTouchFocusStr);
+    int32_t setTouchAFAEC(const char *touchAfAecStr);
+    int32_t setChromaFlash(const char *chromaFlashStr);
+    int32_t setOptiZoom(const char *optiZoomStr);
+    int32_t setTruePortrait(const char *truePortraitStr);
+    int32_t setFssr(const char *fssrStr);
+    int32_t setSeeMore(const char *optiZoomStr);
+    int32_t setRedeyeReduction(const char *redeyeStr);
+    int32_t setWaveletDenoise(const char *wnrStr);
+    int32_t setFaceRecognition(const char *faceRecog, uint32_t maxFaces);
+    int32_t setTintlessValue(const char *tintStr);
+
+
+    int32_t parse_pair(const char *str, int *first, int *second,
+                       char delim, char **endptr);
+    void parseSizesList(const char *sizesStr, Vector<Size> &sizes);
+    int32_t parseNDimVector(const char *str, int *num, int N, char delim);
+    int32_t parseCameraAreaString(const char *str, int max_num_areas,
+                                  cam_area_t *pAreas, int& num_areas_found);
+    bool validateCameraAreas(cam_area_t *areas, int num_areas);
+    int parseGPSCoordinate(const char *coord_str, rat_t *coord);
+    int32_t getRational(rat_t *rat, int num, int denom);
+    String8 createSizesString(const cam_dimension_t *sizes, size_t len);
+    String8 createHfrValuesString(const cam_hfr_info_t *values, size_t len,
+            const QCameraMap<cam_hfr_mode_t> *map, size_t map_len);
+    String8 createHfrSizesString(const cam_hfr_info_t *values, size_t len);
+    String8 createFpsRangeString(const cam_fps_range_t *fps,
+            size_t len, int &default_fps_index);
+    String8 createFpsString(cam_fps_range_t &fps);
+    String8 createZoomRatioValuesString(uint32_t *zoomRatios, size_t length);
+
+    // ops for batch set/get params with server
+    int32_t initBatchUpdate(void *p_table);
+    int32_t AddSetParmEntryToBatch(void *p_table,
+            cam_intf_parm_type_t paramType,
+            size_t paramLength, void *paramValue);
+    int32_t commitSetBatch();
+    int32_t AddGetParmEntryToBatch(void *p_table,
+                                   cam_intf_parm_type_t paramType);
+    int32_t commitGetBatch();
+
+    // ops to tempororily update parameter entries and commit
+    int32_t updateParamEntry(const char *key, const char *value);
+    int32_t commitParamChanges();
+
+    // Map from strings to values
+    static const cam_dimension_t THUMBNAIL_SIZES_MAP[];
+    static const QCameraMap<cam_auto_exposure_mode_type> AUTO_EXPOSURE_MAP[];
+    static const QCameraMap<cam_format_t> PREVIEW_FORMATS_MAP[];
+    static const QCameraMap<cam_format_t> PICTURE_TYPES_MAP[];
+    static const QCameraMap<cam_focus_mode_type> FOCUS_MODES_MAP[];
+    static const QCameraMap<cam_effect_mode_type> EFFECT_MODES_MAP[];
+    static const QCameraMap<cam_scene_mode_type> SCENE_MODES_MAP[];
+    static const QCameraMap<cam_flash_mode_t> FLASH_MODES_MAP[];
+    static const QCameraMap<cam_focus_algorithm_type> FOCUS_ALGO_MAP[];
+    static const QCameraMap<cam_wb_mode_type> WHITE_BALANCE_MODES_MAP[];
+    static const QCameraMap<cam_antibanding_mode_type> ANTIBANDING_MODES_MAP[];
+    static const QCameraMap<cam_iso_mode_type> ISO_MODES_MAP[];
+    static const QCameraMap<cam_hfr_mode_t> HFR_MODES_MAP[];
+    static const QCameraMap<cam_bracket_mode> BRACKETING_MODES_MAP[];
+    static const QCameraMap<int> ON_OFF_MODES_MAP[];
+    static const QCameraMap<int> ENABLE_DISABLE_MODES_MAP[];
+    static const QCameraMap<int> DENOISE_ON_OFF_MODES_MAP[];
+    static const QCameraMap<int> TRUE_FALSE_MODES_MAP[];
+    static const QCameraMap<int> TOUCH_AF_AEC_MODES_MAP[];
+    static const QCameraMap<cam_flip_t> FLIP_MODES_MAP[];
+    static const QCameraMap<int> AF_BRACKETING_MODES_MAP[];
+    static const QCameraMap<int> CHROMA_FLASH_MODES_MAP[];
+    static const QCameraMap<int> OPTI_ZOOM_MODES_MAP[];
+    static const QCameraMap<int> TRUE_PORTRAIT_MODES_MAP[];
+    static const QCameraMap<int> FSSR_MODES_MAP[];
+    static const QCameraMap<int> MULTI_TOUCH_FOCUS_MODES_MAP[];
+    static const QCameraMap<cam_cds_mode_type_t> CDS_MODES_MAP[];
+
+    cam_capability_t *m_pCapability;
+    mm_camera_vtbl_t *m_pCamOpsTbl;
+    QCameraHeapMemory *m_pParamHeap;
+    parm_buffer_new_t *m_pParamBuf; // ptr to param buf in m_pParamHeap
+
+    bool m_bZslMode;                // if ZSL is enabled
+    bool m_bZslMode_new;
+    bool m_bRecordingHint;          // local copy of recording hint
+    bool m_bRecordingHint_new;
+    bool m_bHistogramEnabled;       // if histogram is enabled
+    bool m_bLongshotEnabled;        // if longshot is enabled
+    uint32_t m_nFaceProcMask;       // face process mask
+    bool m_bDebugFps;               // if FPS need to be logged
+    cam_focus_mode_type mFocusMode;
+    cam_format_t mPreviewFormat;
+    int32_t mPictureFormat;         // could be CAMERA_PICTURE_TYPE_JPEG or cam_format_t
+    bool m_bNeedRestart;            // if preview needs restart after parameters updated
+    bool m_bNoDisplayMode;
+    bool m_bWNROn;
+    bool m_bInited;
+    uint8_t m_nBurstNum;
+    cam_exp_bracketing_t m_AEBracketingClient;
+    bool m_bUpdateEffects;          // Cause reapplying of effects
+    bool m_bSceneTransitionAuto;    // Indicate that scene has changed to Auto
+    bool m_bPreviewFlipChanged;        // if flip setting for preview changed
+    bool m_bVideoFlipChanged;          // if flip setting for video changed
+    bool m_bSnapshotFlipChanged;       // if flip setting for snapshot changed
+    bool m_bFixedFrameRateSet;      // Indicates that a fixed frame rate is set
+    qcamera_thermal_mode m_ThermalMode; // adjust fps vs adjust frameskip
+    cam_dimension_t m_LiveSnapshotSize; // live snapshot size
+    cam_dimension_t m_rawSize; // live snapshot size
+    bool m_bHDREnabled;             // if HDR is enabled
+    bool m_bAVTimerEnabled;    //if AVTimer is enabled
+    uint8_t m_bMobiMask;
+    bool m_bDISEnabled;
+    QCameraAdjustFPS *m_AdjustFPS;
+    bool m_bHDR1xFrameEnabled;          // if frame with exposure compensation 0 during HDR is enabled
+    bool m_HDRSceneEnabled; // Auto HDR indication
+    bool m_bHDRThumbnailProcessNeeded;        // if thumbnail need to be processed for HDR
+    bool m_bHDR1xExtraBufferNeeded;     // if extra frame with exposure compensation 0 during HDR is needed
+    bool m_bHDROutputCropEnabled;     // if HDR output frame need to be scaled to user resolution
+    QCameraTorchInterface *m_pTorch; // Interface for enabling torch
+    bool m_bReleaseTorchCamera; // Release camera resources after torch gets disabled
+    int32_t m_curCCT;
+    int32_t m_curFocusPos;
+
+    DefaultKeyedVector<String8,String8> m_tempMap; // map for temororily store parameters to be set
+    cam_fps_range_t m_default_fps_range;
+
+    bool m_bAFBracketingOn;
+    bool m_bMultiTouchFocusOn;
+    bool m_bTouchFocusOn;
+    bool m_bChromaFlashOn;
+    bool m_bOptiZoomOn;
+    bool m_bFssrOn;
+    bool m_bSeeMoreOn;
+    bool m_bUbiRefocus;
+    cam_fps_range_t m_hfrFpsRange;
+    bool m_bHfrMode;
+    int32_t mHfrMode;
+    bool m_bDisplayFrame;
+    bool m_bAeBracketingEnabled;
+    int32_t mFlashValue;
+    int32_t mFlashDaemonValue;
+    bool m_bSensorHDREnabled;             // if HDR is enabled
+    bool m_bTruePortraitOn;
+
+};
+
+}; // namespace qcamera
+
+#endif
diff --git a/msm8974/QCamera2/HAL/QCameraPostProc.cpp b/msm8974/QCamera2/HAL/QCameraPostProc.cpp
new file mode 100644
index 0000000..1b5d847
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraPostProc.cpp
@@ -0,0 +1,2882 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#define LOG_TAG "QCameraPostProc"
+
+#include <fcntl.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include <utils/Trace.h>
+
+#include "QCamera2HWI.h"
+#include "QCameraPostProc.h"
+
+namespace qcamera {
+
+const char *QCameraPostProcessor::STORE_LOCATION = "/sdcard/img_%d.jpg";
+
+#define FREE_JPEG_OUTPUT_BUFFER(ptr,cnt)     \
+    int jpeg_bufs; \
+    for (jpeg_bufs = 0; jpeg_bufs < (int)cnt; jpeg_bufs++)  { \
+      if (ptr[jpeg_bufs] != NULL) { \
+          free(ptr[jpeg_bufs]); \
+          ptr[jpeg_bufs] = NULL; \
+      } \
+    }
+
+/*===========================================================================
+ * FUNCTION   : QCameraPostProcessor
+ *
+ * DESCRIPTION: constructor of QCameraPostProcessor.
+ *
+ * PARAMETERS :
+ *   @cam_ctrl : ptr to HWI object
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
+    : m_parent(cam_ctrl),
+      mJpegCB(NULL),
+      mJpegUserData(NULL),
+      mJpegClientHandle(0),
+      mJpegSessionId(0),
+      m_pJpegExifObj(NULL),
+      m_bThumbnailNeeded(TRUE),
+      m_pReprocChannel(NULL),
+      m_pDualReprocChannel(NULL),
+      m_bInited(FALSE),
+      m_inputPPQ(releasePPInputData, this),
+      m_ongoingPPQ(releaseOngoingPPData, this),
+      m_inputJpegQ(releaseJpegData, this),
+      m_ongoingJpegQ(releaseJpegData, this),
+      m_inputRawQ(releaseRawData, this),
+      mSaveFrmCnt(0),
+      mUseSaveProc(false),
+      mUseJpegBurst(false),
+      mJpegMemOpt(true),
+      mNewJpegSessionNeeded(true),
+      mMultipleStages(false),
+      m_JpegOutputMemCount(0),
+      m_reprocStream(NULL)
+{
+    memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    memset(&m_pJpegOutputMem, 0, sizeof(m_pJpegOutputMem));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraPostProcessor
+ *
+ * DESCRIPTION: deconstructor of QCameraPostProcessor.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraPostProcessor::~QCameraPostProcessor()
+{
+    FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem,m_JpegOutputMemCount);
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    if (m_pReprocChannel != NULL) {
+        m_pReprocChannel->stop();
+        delete m_pReprocChannel;
+        m_pReprocChannel = NULL;
+    }
+    if (m_pDualReprocChannel != NULL) {
+        m_pDualReprocChannel->stop();
+        delete m_pDualReprocChannel;
+        m_pDualReprocChannel = NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of postprocessor
+ *
+ * PARAMETERS :
+ *   @jpeg_cb      : callback to handle jpeg event from mm-camera-interface
+ *   @user_data    : user data ptr for jpeg callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
+{
+    mJpegCB = jpeg_cb;
+    mJpegUserData = user_data;
+    mm_dimension max_size;
+
+    if ((0 > m_parent->m_max_pic_width) || (0 > m_parent->m_max_pic_height)) {
+        ALOGE("%s : Negative dimension %dx%d", __func__,
+                m_parent->m_max_pic_width, m_parent->m_max_pic_height);
+        return BAD_VALUE;
+    }
+
+    //set max pic size
+    memset(&max_size, 0, sizeof(mm_dimension));
+    max_size.w = (uint32_t)m_parent->m_max_pic_width;
+    max_size.h = (uint32_t)m_parent->m_max_pic_height;
+
+    mJpegClientHandle = jpeg_open(&mJpegHandle, max_size);
+    if(!mJpegClientHandle) {
+        ALOGE("%s : jpeg_open did not work", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    m_dataProcTh.launch(dataProcessRoutine, this);
+    m_saveProcTh.launch(dataSaveRoutine, this);
+
+    m_bInited = TRUE;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: de-initialization of postprocessor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::deinit()
+{
+    if (m_bInited == TRUE) {
+        m_dataProcTh.exit();
+        m_saveProcTh.exit();
+
+        if(mJpegClientHandle > 0) {
+            int rc = mJpegHandle.close(mJpegClientHandle);
+            ALOGE("%s: Jpeg closed, rc = %d, mJpegClientHandle = %x",
+                  __func__, rc, mJpegClientHandle);
+            mJpegClientHandle = 0;
+            memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+        }
+        m_bInited = FALSE;
+        m_reprocStream = NULL;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start postprocessor. Data process thread and data notify thread
+ *              will be launched.
+ *
+ * PARAMETERS :
+ *   @pSrcChannel : source channel obj ptr that possibly needs reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : if any reprocess is needed, a reprocess channel/stream
+ *              will be started.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    int32_t rc = NO_ERROR;
+    if (m_bInited == FALSE) {
+        ALOGE("%s: postproc not initialized yet", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    if (m_parent->needReprocess()) {
+        if (m_pReprocChannel != NULL) {
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+        }
+        // if reprocess is needed, start reprocess channel
+        m_pReprocChannel = m_parent->addReprocChannel(pSrcChannel);
+        if (m_pReprocChannel == NULL) {
+            ALOGE("%s: cannot add reprocess channel", __func__);
+            return UNKNOWN_ERROR;
+        }
+        QCameraStream *pStream = NULL;
+        for (uint8_t i = 0; i < m_pReprocChannel->getNumOfStreams(); i++) {
+            pStream = m_pReprocChannel->getStreamByIndex(i);
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC)) {
+                m_reprocStream = pStream;
+                break;
+            }
+        }
+
+        rc = m_pReprocChannel->start();
+        if (rc != 0) {
+            ALOGE("%s: cannot start reprocess channel", __func__);
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+            return rc;
+        }
+    }
+
+    if (m_pReprocChannel && m_parent->needDualReprocess()) {
+        if (m_pDualReprocChannel != NULL) {
+            delete m_pDualReprocChannel;
+            m_pDualReprocChannel = NULL;
+        }
+        // if reprocess is needed, start reprocess channel
+        m_pDualReprocChannel = m_parent->addDualReprocChannel(m_pReprocChannel);
+        if (m_pDualReprocChannel == NULL) {
+            ALOGE("%s: cannot add second reprocess channel", __func__);
+            return UNKNOWN_ERROR;
+        }
+
+        rc = m_pDualReprocChannel->start();
+        if (rc != 0) {
+            ALOGE("%s: cannot start second reprocess channel", __func__);
+            delete m_pDualReprocChannel;
+            m_pDualReprocChannel = NULL;
+            return rc;
+        }
+    }
+
+    property_get("persist.camera.longshot.save", prop, "0");
+    mUseSaveProc = atoi(prop) > 0 ? true : false;
+
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
+    m_parent->m_cbNotifier.startSnapshots();
+
+    mMultipleStages = false;
+
+    // Create Jpeg session
+    if ( !m_parent->mParameters.getRecordingHintValue() &&
+            !m_parent->isLongshotEnabled() &&
+            !m_parent->isZSLMode()) {
+
+        QCameraChannel *pChannel = NULL;
+        pChannel = m_parent->needReprocess() ? m_pReprocChannel : pSrcChannel;
+        QCameraStream *pSnapshotStream = NULL;
+        QCameraStream *pThumbStream = NULL;
+
+        for (uint32_t i = 0; i < pChannel->getNumOfStreams(); ++i) {
+            QCameraStream *pStream = pChannel->getStreamByIndex(i);
+
+            if ( NULL == pStream ) {
+                break;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                pSnapshotStream = pStream;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                pThumbStream = pStream;
+            }
+        }
+
+        // If thumbnail is not part of the reprocess channel, then
+        // try to get it from the source channel
+        if ((NULL == pThumbStream) && (pChannel == m_pReprocChannel)) {
+            for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); ++i) {
+                QCameraStream *pStream = pSrcChannel->getStreamByIndex(i);
+
+                if ( NULL == pStream ) {
+                    break;
+                }
+
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                        pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                    pThumbStream = pStream;
+                }
+            }
+        }
+
+        if (m_parent->mParameters.generateThumbFromMain()) {
+            pThumbStream = NULL;
+        }
+
+        if ( NULL != pSnapshotStream ) {
+            mm_jpeg_encode_params_t encodeParam;
+            memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+            rc = getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: error getting encoding config", __func__);
+                return rc;
+            }
+            CDBG_HIGH("[KPI Perf] %s : call jpeg create_session", __func__);
+
+            rc = mJpegHandle.create_session(mJpegClientHandle,
+                    &encodeParam,
+                    &mJpegSessionId);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: error creating a new jpeg encoding session", __func__);
+                return rc;
+            }
+            mNewJpegSessionNeeded = false;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : reprocess channel will be stopped and deleted if there is any
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stop()
+{
+    if (m_bInited == TRUE) {
+        m_parent->m_cbNotifier.stopSnapshots();
+        // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegEncodingConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ *   @encode_parm   : param to be filled with encoding configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+                                                    QCameraStream *main_stream,
+                                                    QCameraStream *thumb_stream)
+{
+    CDBG("%s : E", __func__);
+    int32_t ret = NO_ERROR;
+    size_t out_size;
+
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.jpeg_burst", prop, "0");
+    mUseJpegBurst = (atoi(prop) > 0) && !mUseSaveProc;
+    encode_parm.burst_mode = mUseJpegBurst;
+
+    cam_rect_t crop;
+    memset(&crop, 0, sizeof(cam_rect_t));
+    main_stream->getCropInfo(crop);
+
+    cam_dimension_t src_dim, dst_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    memset(&dst_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
+    if (hdr_output_crop && crop.height) {
+        dst_dim.height = crop.height;
+    } else {
+        dst_dim.height = src_dim.height;
+    }
+    if (hdr_output_crop && crop.width) {
+        dst_dim.width = crop.width;
+    } else {
+        dst_dim.width = src_dim.width;
+    }
+
+    // set rotation only when no online rotation or offline pp rotation is done before
+    if (!m_parent->needRotationReprocess()) {
+        encode_parm.rotation = m_parent->getJpegRotation();
+    }
+
+    encode_parm.main_dim.src_dim = src_dim;
+    encode_parm.main_dim.dst_dim = dst_dim;
+
+    m_dst_dim = dst_dim;
+    m_src_dim = src_dim;
+
+    encode_parm.jpeg_cb = mJpegCB;
+    encode_parm.userdata = mJpegUserData;
+
+    m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
+    // system property to disable the thumbnail encoding in order to reduce the power
+    // by default thumbnail encoding is set to TRUE and explicitly set this property to
+    // disable the thumbnail encoding
+    property_get("persist.camera.thumbnail.disable", prop, "0");
+    if (atoi(prop) == 1) {
+        m_bThumbnailNeeded = FALSE;
+        CDBG_HIGH("%s : m_bThumbnailNeeded is %d", __func__, m_bThumbnailNeeded);
+    }
+    cam_dimension_t thumbnailSize;
+    memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
+    m_parent->getThumbnailSize(thumbnailSize);
+    if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
+        // (0,0) means no thumbnail
+        m_bThumbnailNeeded = FALSE;
+    }
+    encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+    // get color format
+    cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
+    main_stream->getFormat(img_fmt);
+    encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+    // get jpeg quality
+    uint32_t val = m_parent->getJpegQuality();
+    if (0U < val) {
+        encode_parm.quality = val;
+    } else {
+        ALOGI("%s: Using default JPEG quality", __func__);
+        encode_parm.quality = 85;
+    }
+    cam_frame_len_offset_t main_offset;
+    memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
+    main_stream->getFrameOffset(main_offset);
+
+    // src buf config
+    QCameraMemory *pStreamMem = main_stream->getStreamBufs();
+    if (pStreamMem == NULL) {
+        ALOGE("%s: cannot get stream bufs from main stream", __func__);
+        ret = BAD_VALUE;
+        goto on_error;
+    }
+    encode_parm.num_src_bufs = pStreamMem->getCnt();
+    for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
+        camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+        if (stream_mem != NULL) {
+            encode_parm.src_main_buf[i].index = i;
+            encode_parm.src_main_buf[i].buf_size = stream_mem->size;
+            encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+            encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
+            encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
+            encode_parm.src_main_buf[i].offset = main_offset;
+        }
+    }
+
+    if (m_bThumbnailNeeded == TRUE) {
+        m_parent->getThumbnailSize(encode_parm.thumb_dim.dst_dim);
+        if (thumb_stream == NULL) {
+            thumb_stream = main_stream;
+
+            if ((90 == m_parent->getJpegRotation())
+                    || (270 == m_parent->getJpegRotation())) {
+                IMG_SWAP(encode_parm.thumb_dim.dst_dim.width,
+                        encode_parm.thumb_dim.dst_dim.height);
+            }
+        }
+        pStreamMem = thumb_stream->getStreamBufs();
+        if (pStreamMem == NULL) {
+            ALOGE("%s: cannot get stream bufs from thumb stream", __func__);
+            ret = BAD_VALUE;
+            goto on_error;
+        }
+        cam_frame_len_offset_t thumb_offset;
+        memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
+        thumb_stream->getFrameOffset(thumb_offset);
+        encode_parm.num_tmb_bufs =  pStreamMem->getCnt();
+        for (uint32_t i = 0; i < pStreamMem->getCnt(); i++) {
+            camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+            if (stream_mem != NULL) {
+                encode_parm.src_thumb_buf[i].index = i;
+                encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
+                encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+                encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
+                encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+                encode_parm.src_thumb_buf[i].offset = thumb_offset;
+            }
+        }
+        cam_format_t img_fmt_thumb = CAM_FORMAT_YUV_420_NV12;
+        thumb_stream->getFormat(img_fmt_thumb);
+        encode_parm.thumb_color_format = getColorfmtFromImgFmt(img_fmt_thumb);
+
+        // crop is the same if frame is the same
+        if (thumb_stream != main_stream) {
+            memset(&crop, 0, sizeof(cam_rect_t));
+            thumb_stream->getCropInfo(crop);
+        }
+
+        memset(&src_dim, 0, sizeof(cam_dimension_t));
+        thumb_stream->getFrameDimension(src_dim);
+        encode_parm.thumb_dim.src_dim = src_dim;
+
+        if ((thumb_stream != main_stream) ||
+                (!m_parent->needRotationReprocess())) {
+            encode_parm.thumb_rotation = m_parent->getJpegRotation();
+        }
+
+        encode_parm.thumb_dim.crop = crop;
+    }
+
+    encode_parm.num_dst_bufs = 1;
+    if (mUseJpegBurst) {
+        encode_parm.num_dst_bufs = MAX_JPEG_BURST;
+    }
+    encode_parm.get_memory = NULL;
+    out_size = main_offset.frame_len;
+    if (mJpegMemOpt) {
+        encode_parm.get_memory = getJpegMemory;
+        out_size = sizeof(omx_jpeg_ouput_buf_t);
+        encode_parm.num_dst_bufs = encode_parm.num_src_bufs;
+    }
+    m_JpegOutputMemCount = (uint32_t)encode_parm.num_dst_bufs;
+    for (uint32_t i = 0; i < m_JpegOutputMemCount; i++) {
+        if (m_pJpegOutputMem[i] != NULL)
+          free(m_pJpegOutputMem[i]);
+        omx_jpeg_ouput_buf_t omx_out_buf;
+        omx_out_buf.handle = this;
+        // allocate output buf for jpeg encoding
+        m_pJpegOutputMem[i] = malloc(out_size);
+
+        if (NULL == m_pJpegOutputMem[i]) {
+          ret = NO_MEMORY;
+          ALOGE("%s : initHeapMem for jpeg, ret = NO_MEMORY", __func__);
+          goto on_error;
+        }
+
+        if (mJpegMemOpt) {
+            memcpy(m_pJpegOutputMem[i], &omx_out_buf, sizeof(omx_out_buf));
+        }
+
+
+        encode_parm.dest_buf[i].index = i;
+        encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
+        encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMem[i];
+        encode_parm.dest_buf[i].fd = 0;
+        encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
+        encode_parm.dest_buf[i].offset = main_offset;
+    }
+
+
+    CDBG("%s : X", __func__);
+    return NO_ERROR;
+
+on_error:
+    FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem, m_JpegOutputMemCount);
+
+    CDBG("%s : X with error %d", __func__, ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify through notify callback registered by upper layer
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type of notify
+ *   @ext1    : extension
+ *   @ext2    : extension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
+                                            int32_t ext1,
+                                            int32_t ext2)
+{
+    return m_parent->sendEvtNotify(msg_type, ext1, ext2);
+}
+
+/*===========================================================================
+ * FUNCTION   : sendDataNotify
+ *
+ * DESCRIPTION: enqueue data into dataNotify thread
+ *
+ * PARAMETERS :
+ *   @msg_type: data callback msg type
+ *   @data    : ptr to data memory struct
+ *   @index   : index to data buffer
+ *   @metadata: ptr to meta data buffer if there is any
+ *   @release_data : ptr to struct indicating if data need to be released
+ *                   after notify
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
+                                             camera_memory_t *data,
+                                             uint8_t index,
+                                             camera_frame_metadata_t *metadata,
+                                             qcamera_release_data_t *release_data)
+{
+    qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
+    if (NULL == data_cb) {
+        ALOGE("%s: no mem for acamera_data_argm_t", __func__);
+        return NO_MEMORY;
+    }
+    memset(data_cb, 0, sizeof(qcamera_data_argm_t));
+    data_cb->msg_type = msg_type;
+    data_cb->data = data;
+    data_cb->index = index;
+    data_cb->metadata = metadata;
+    if (release_data != NULL) {
+        data_cb->release_data = *release_data;
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
+    cbArg.msg_type = msg_type;
+    cbArg.data = data;
+    cbArg.metadata = metadata;
+    cbArg.user_data = data_cb;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseNotifyData;
+    int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: Error enqueuing jpeg data into notify queue", __func__);
+        releaseNotifyData(data_cb, this, UNKNOWN_ERROR);
+        return UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : depends on if offline reprocess is needed, received frame will
+ *              be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
+{
+    if (m_bInited == FALSE) {
+        ALOGE("%s: postproc not initialized yet", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    if (m_parent->needReprocess()) {
+        if ((!m_parent->isLongshotEnabled() &&
+             !m_parent->m_stateMachine.isNonZSLCaptureRunning()) ||
+            (m_parent->isLongshotEnabled() &&
+             m_parent->isCaptureShutterEnabled())) {
+            //play shutter sound
+            m_parent->playShutter();
+        }
+
+        ATRACE_INT("Camera:Reprocess", 1);
+        CDBG_HIGH("%s: need reprocess", __func__);
+        // enqueu to post proc input queue
+        m_inputPPQ.enqueue((void *)frame);
+    } else if (m_parent->mParameters.isNV16PictureFormat() ||
+        m_parent->mParameters.isNV21PictureFormat()) {
+        //check if raw frame information is needed.
+        if(m_parent->mParameters.isYUVFrameInfoNeeded())
+            setYUVFrameInfo(frame);
+
+        processRawData(frame);
+    } else {
+        //play shutter sound
+        if(!m_parent->m_stateMachine.isNonZSLCaptureRunning())
+            m_parent->playShutter();
+
+        CDBG_HIGH("%s: no need offline reprocess, sending to jpeg encoding", __func__);
+        qcamera_jpeg_data_t *jpeg_job =
+            (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+        if (jpeg_job == NULL) {
+            ALOGE("%s: No memory for jpeg job", __func__);
+            return NO_MEMORY;
+        }
+
+        memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+        jpeg_job->src_frame = frame;
+
+        // find meta data frame
+        mm_camera_buf_def_t *meta_frame = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            // look through input superbuf
+            if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+                meta_frame = frame->bufs[i];
+                break;
+            }
+        }
+
+        if (meta_frame != NULL) {
+            // fill in meta data frame ptr
+            jpeg_job->metadata = (cam_metadata_info_t *)meta_frame->buffer;
+        }
+
+        // enqueu to jpeg input queue
+        m_inputJpegQ.enqueue((void *)jpeg_job);
+    }
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawData
+ *
+ * DESCRIPTION: enqueue raw data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
+{
+    if (m_bInited == FALSE) {
+        ALOGE("%s: postproc not initialized yet", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    // enqueu to raw input queue
+    m_inputRawQ.enqueue((void *)frame);
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegEvt
+ *
+ * DESCRIPTION: process jpeg event from mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ *   @evt     : payload of jpeg event, including information about jpeg encoding
+ *              status, jpeg size and so on.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : This event will also trigger DataProc thread to move to next job
+ *              processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
+ *              if there is any pending job in jpeg input queue)
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
+{
+    if (m_bInited == FALSE) {
+        ALOGE("%s: postproc not initialized yet", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    int32_t rc = NO_ERROR;
+    camera_memory_t *jpeg_mem = NULL;
+    omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+
+    if (mUseSaveProc && m_parent->isLongshotEnabled()) {
+        qcamera_jpeg_evt_payload_t *saveData = ( qcamera_jpeg_evt_payload_t * ) malloc(sizeof(qcamera_jpeg_evt_payload_t));
+        if ( NULL == saveData ) {
+            ALOGE("%s: Can not allocate save data message!", __func__);
+            return NO_MEMORY;
+        }
+        *saveData = *evt;
+        m_inputSaveQ.enqueue((void *) saveData);
+        m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        // Release jpeg job data
+        m_ongoingJpegQ.flushNodes(matchJobId, (void*)&evt->jobId);
+
+        if (m_inputPPQ.getCurrentSize() > 0) {
+            m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+        }
+        CDBG_HIGH("[KPI Perf] %s : jpeg job %d", __func__, evt->jobId);
+
+        if ((false == m_parent->m_bIntEvtPending) &&
+            (m_parent->mDataCb == NULL ||
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 )) {
+            CDBG_HIGH("%s: No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled",
+                  __func__);
+            rc = NO_ERROR;
+            goto end;
+        }
+
+        if(evt->status == JPEG_JOB_STATUS_ERROR) {
+            ALOGE("%s: Error event handled from jpeg, status = %d",
+                  __func__, evt->status);
+            rc = FAILED_TRANSACTION;
+            goto end;
+        }
+        if (!mJpegMemOpt) {
+            m_parent->dumpJpegToFile(evt->out_data.buf_vaddr,
+                                      evt->out_data.buf_filled_len,
+                                      evt->jobId);
+        }
+        else {
+            jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+            if (jpeg_out != NULL) {
+                jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+                if (jpeg_mem != NULL) {
+                    m_parent->dumpJpegToFile(jpeg_mem->data,
+                                              evt->out_data.buf_filled_len,
+                                              evt->jobId);
+                    jpeg_mem = NULL;
+                }
+                jpeg_out = NULL;
+            }
+        }
+        if(true == m_parent->m_bIntEvtPending) {
+          //signal the eztune condition variable
+          pthread_mutex_lock(&m_parent->m_int_lock);
+          pthread_cond_signal(&m_parent->m_int_cond);
+          pthread_mutex_unlock(&m_parent->m_int_lock);
+          m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+          return rc;
+        }
+
+        /* check if the all the captures are done */
+        if ((m_parent->mParameters.isUbiRefocus() &&
+            (m_parent->getOutputImageCount() <
+            m_parent->mParameters.UfOutputCount()))
+            || (m_parent->mParameters.isMTFRefocus()
+            && (m_parent->getOutputImageCount() <
+            m_parent->mParameters.MTFOutputCount()))) {
+            jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+            jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+            if (NULL != jpeg_mem) {
+                jpeg_mem->release(jpeg_mem);
+                jpeg_mem = NULL;
+            }
+            goto end;
+        }
+
+        if (!mJpegMemOpt) {
+            // alloc jpeg memory to pass to upper layer
+            jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len,
+                1, m_parent->mCallbackCookie);
+            if (NULL == jpeg_mem) {
+                rc = NO_MEMORY;
+                ALOGE("%s : getMemory for jpeg, ret = NO_MEMORY", __func__);
+                goto end;
+            }
+            memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
+        } else {
+            jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+            jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+        }
+
+        CDBG_HIGH("%s : Calling upperlayer callback to store JPEG image", __func__);
+        qcamera_release_data_t release_data;
+        memset(&release_data, 0, sizeof(qcamera_release_data_t));
+        release_data.data = jpeg_mem;
+        CDBG_HIGH("[KPI Perf] %s: PROFILE_JPEG_CB ",__func__);
+        rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                            jpeg_mem,
+                            0,
+                            NULL,
+                            &release_data);
+
+end:
+        if (rc != NO_ERROR) {
+            // send error msg to upper layer
+            sendEvtNotify(CAMERA_MSG_ERROR,
+                          UNKNOWN_ERROR,
+                          0);
+
+            if (NULL != jpeg_mem) {
+                jpeg_mem->release(jpeg_mem);
+                jpeg_mem = NULL;
+            }
+        }
+    }
+
+    // wait up data proc thread to do next job,
+    // if previous request is blocked due to ongoing jpeg job
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPData
+ *
+ * DESCRIPTION: process received frame after reprocess.
+ *
+ * PARAMETERS :
+ *   @frame   : received frame from reprocess channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : The frame after reprocess need to send to jpeg encoding.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
+{
+    bool needSuperBufMatch = m_parent->mParameters.generateThumbFromMain();
+    if (m_bInited == FALSE) {
+        ALOGE("%s: postproc not initialized yet", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
+
+    if (!needSuperBufMatch && (job == NULL || job->src_frame == NULL) ) {
+        ALOGE("%s: Cannot find reprocess job", __func__);
+        return BAD_VALUE;
+    }
+
+    if (!needSuperBufMatch && (m_parent->mParameters.isNV16PictureFormat() ||
+        m_parent->mParameters.isNV21PictureFormat())) {
+        releaseSuperBuf(job->src_frame);
+        free(job->src_frame);
+        free(job);
+
+        if(m_parent->mParameters.isYUVFrameInfoNeeded())
+            setYUVFrameInfo(frame);
+        return processRawData(frame);
+    }
+
+    if (m_parent->isLongshotEnabled() &&
+         !getMultipleStages() &&
+         !m_parent->isCaptureShutterEnabled()) {
+        m_parent->playShutter();
+    }
+
+    qcamera_jpeg_data_t *jpeg_job =
+        (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+    if (jpeg_job == NULL) {
+        ALOGE("%s: No memory for jpeg job", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+    jpeg_job->src_frame = frame;
+    jpeg_job->src_reproc_frame = job ? job->src_frame : NULL;
+    jpeg_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
+    jpeg_job->reproc_frame_release = job ? job->reproc_frame_release : false;
+
+    // find meta data frame
+    mm_camera_buf_def_t *meta_frame = NULL;
+    for (uint32_t i = 0; job && (i < job->src_frame->num_bufs); i++) {
+        // look through input superbuf
+        if (job->src_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+            meta_frame = job->src_frame->bufs[i];
+            break;
+        }
+    }
+
+    if (meta_frame == NULL) {
+        // look through reprocess superbuf
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+                meta_frame = frame->bufs[i];
+                break;
+            }
+        }
+    }
+
+    if (meta_frame != NULL) {
+        // fill in meta data frame ptr
+        jpeg_job->metadata = (cam_metadata_info_t *)meta_frame->buffer;
+    }
+
+    // free pp job buf
+    if (job) {
+        free(job);
+    }
+
+    // enqueu reprocessed frame to jpeg input queue
+    m_inputJpegQ.enqueue((void *)jpeg_job);
+
+    CDBG_HIGH("%s: %d] ", __func__, __LINE__);
+    // wait up data proc thread
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : findJpegJobByJobId
+ *
+ * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
+ *
+ * PARAMETERS :
+ *   @jobId   : job Id of the job
+ *
+ * RETURN     : ptr to a jpeg job struct. NULL if not found.
+ *
+ * NOTE       : Currently only one job is sending to mm-jpeg-interface for jpeg
+ *              encoding. Therefore simply dequeue from the ongoing Jpeg Queue
+ *              will serve the purpose to find the jpeg job.
+ *==========================================================================*/
+qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
+{
+    qcamera_jpeg_data_t * job = NULL;
+    if (jobId == 0) {
+        ALOGE("%s: not a valid jpeg jobId", __func__);
+        return NULL;
+    }
+
+    // currely only one jpeg job ongoing, so simply dequeue the head
+    job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    return job;
+}
+
+/*===========================================================================
+ * FUNCTION   : releasePPInputData
+ *
+ * DESCRIPTION: callback function to release post process input data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseSuperBuf((mm_camera_super_buf_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegData
+ *
+ * DESCRIPTION: callback function to release jpeg job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to ongoing jpeg job data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
+        CDBG_HIGH("%s : Rleased job ID %u", __func__,
+            ((qcamera_jpeg_data_t *)data)->jobId);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOngoingPPData
+ *
+ * DESCRIPTION: callback function to release ongoing postprocess job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to onging postprocess job
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
+        if (NULL != pp_job->src_frame) {
+            if (!pp_job->reproc_frame_release) {
+                pme->releaseSuperBuf(pp_job->src_frame);
+            }
+            free(pp_job->src_frame);
+            pp_job->src_frame = NULL;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseNotifyData
+ *
+ * DESCRIPTION: function to release internal resources in notify data struct
+ *
+ * PARAMETERS :
+ *   @user_data  : ptr user data
+ *   @cookie     : callback cookie
+ *   @cb_status  : callback status
+ *
+ * RETURN     : None
+ *
+ * NOTE       : deallocate jpeg heap memory if it's not NULL
+ *==========================================================================*/
+void QCameraPostProcessor::releaseNotifyData(void *user_data,
+                                             void *cookie,
+                                             int32_t cb_status)
+{
+    qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
+    QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
+    if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
+
+        if ( postProc->mUseSaveProc &&
+             app_cb->release_data.unlinkFile &&
+             ( NO_ERROR != cb_status ) ) {
+
+            String8 unlinkPath((const char *) app_cb->release_data.data->data,
+                                app_cb->release_data.data->size);
+            int rc = 0;
+            rc = unlink(unlinkPath.string());
+            CDBG_HIGH("%s : Unlinking stored file rc = %d",
+                  __func__,
+                  rc);
+        }
+
+        if (app_cb && NULL != app_cb->release_data.data) {
+            app_cb->release_data.data->release(app_cb->release_data.data);
+            app_cb->release_data.data = NULL;
+        }
+        if (app_cb && NULL != app_cb->release_data.frame) {
+            postProc->releaseSuperBuf(app_cb->release_data.frame);
+            free(app_cb->release_data.frame);
+            app_cb->release_data.frame = NULL;
+        }
+        if (app_cb && NULL != app_cb->release_data.streamBufs) {
+            app_cb->release_data.streamBufs->deallocate();
+            delete app_cb->release_data.streamBufs;
+            app_cb->release_data.streamBufs = NULL;
+        }
+        free(app_cb);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSuperBuf
+ *
+ * DESCRIPTION: function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ *   @super_buf : ptr to the superbuf frame
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
+{
+    QCameraChannel *pChannel = NULL;
+
+    if (NULL != super_buf) {
+        pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
+
+        if ( NULL == pChannel ) {
+            if (m_pReprocChannel != NULL &&
+                m_pReprocChannel->getMyHandle() == super_buf->ch_id) {
+                pChannel = m_pReprocChannel;
+            } else if (m_pDualReprocChannel != NULL &&
+                m_pDualReprocChannel->getMyHandle() == super_buf->ch_id) {
+                pChannel = m_pDualReprocChannel;
+            }
+        }
+
+        if (pChannel != NULL) {
+            pChannel->bufDone(super_buf);
+        } else {
+            ALOGE(" %s : Channel id %d not found!!",
+                  __func__,
+                  super_buf->ch_id);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegJobData
+ *
+ * DESCRIPTION: function to release internal resources in jpeg job struct
+ *
+ * PARAMETERS :
+ *   @job     : ptr to jpeg job struct
+ *
+ * RETURN     : None
+ *
+ * NOTE       : original source frame need to be queued back to kernel for
+ *              future use. Output buf of jpeg job need to be released since
+ *              it's allocated for each job. Exif object need to be deleted.
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
+{
+    CDBG("%s: E", __func__);
+    if (NULL != job) {
+        if (NULL != job->src_reproc_frame) {
+            if (!job->reproc_frame_release) {
+                releaseSuperBuf(job->src_reproc_frame);
+            }
+            free(job->src_reproc_frame);
+            job->src_reproc_frame = NULL;
+        }
+
+        if (NULL != job->src_frame) {
+            releaseSuperBuf(job->src_frame);
+            free(job->src_frame);
+            job->src_frame = NULL;
+        }
+
+        if (NULL != job->pJpegExifObj) {
+            delete job->pJpegExifObj;
+            job->pJpegExifObj = NULL;
+        }
+
+        if (NULL != job->src_reproc_bufs) {
+            delete [] job->src_reproc_bufs;
+        }
+
+    }
+    CDBG("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSaveJobData
+ *
+ * DESCRIPTION: function to release internal resources in store jobs
+ *
+ * PARAMETERS :
+ *   @job     : ptr to save job struct
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSaveJobData(void *data, void *user_data)
+{
+    CDBG("%s: E", __func__);
+
+    QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
+    if (NULL == pme) {
+        ALOGE("%s: Invalid postproc handle", __func__);
+        return;
+    }
+
+    qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) data;
+    if (job_data == NULL) {
+        ALOGE("%s: Invalid jpeg event data", __func__);
+        return;
+    }
+
+    // find job by jobId
+    qcamera_jpeg_data_t *job = pme->findJpegJobByJobId(job_data->jobId);
+
+    if (NULL != job) {
+        pme->releaseJpegJobData(job);
+        free(job);
+    } else {
+        ALOGE("%s : Invalid jpeg job", __func__);
+    }
+
+    CDBG("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseRawData
+ *
+ * DESCRIPTION: function to release internal resources in store jobs
+ *
+ * PARAMETERS :
+ *   @job     : ptr to save job struct
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCameraPostProcessor::releaseRawData(void *data, void *user_data)
+{
+    CDBG("%s: E", __func__);
+
+    QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
+    if (NULL == pme) {
+        ALOGE("%s: Invalid postproc handle", __func__);
+        return;
+    }
+    mm_camera_super_buf_t *super_buf = (mm_camera_super_buf_t *) data;
+    pme->releaseSuperBuf(super_buf);
+
+    CDBG("%s: X", __func__);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getColorfmtFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg color format based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : jpeg color format that can be understandable by omx lib
+ *==========================================================================*/
+mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_420_YV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_422_NV61:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
+    default:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegImgTypeFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg encode image type based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : return jpeg source image format (YUV or Bitstream)
+ *==========================================================================*/
+mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_YV12:
+    case CAM_FORMAT_YUV_422_NV61:
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_FMT_YUV;
+    default:
+        return MM_JPEG_FMT_YUV;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : queryStreams
+ *
+ * DESCRIPTION: utility method for retrieving main, thumbnail and reprocess
+ *              streams and frame from bundled super buffer
+ *
+ * PARAMETERS :
+ *   @main    : ptr to main stream if present
+ *   @thumb   : ptr to thumbnail stream if present
+ *   @main_image : ptr to main image if present
+ *   @thumb_image: ptr to thumbnail image if present
+ *   @frame   : bundled super buffer
+ *   @reproc_frame : bundled source frame buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::queryStreams(QCameraStream **main,
+        QCameraStream **thumb,
+        mm_camera_buf_def_t **main_image,
+        mm_camera_buf_def_t **thumb_image,
+        mm_camera_super_buf_t *frame,
+        mm_camera_super_buf_t *reproc_frame)
+{
+    if (NULL == frame) {
+        return NO_INIT;
+    }
+
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        } else if (m_pDualReprocChannel != NULL &&
+            m_pDualReprocChannel->getMyHandle() == frame->ch_id) {
+            pChannel = m_pDualReprocChannel;
+        }
+    }
+    if (pChannel == NULL) {
+        CDBG_HIGH("%s: No corresponding channel (ch_id = %d) exist, return here",
+              __func__, frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    *main = *thumb = NULL;
+    *main_image = *thumb_image = NULL;
+    // find snapshot frame and thumnail frame
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream =
+                pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                *main= pStream;
+                *main_image = frame->bufs[i];
+            } else if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                       pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                       pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                       pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                *thumb = pStream;
+                *thumb_image = frame->bufs[i];
+            }
+        }
+    }
+
+    if (*thumb_image == NULL && reproc_frame != NULL) {
+        QCameraChannel *pSrcReprocChannel = NULL;
+        pSrcReprocChannel = m_parent->getChannelByHandle(reproc_frame->ch_id);
+        if (pSrcReprocChannel != NULL) {
+            // find thumbnail frame
+            for (uint32_t i = 0; i < reproc_frame->num_bufs; i++) {
+                QCameraStream *pStream =
+                        pSrcReprocChannel->getStreamByHandle(
+                                reproc_frame->bufs[i]->stream_id);
+                if (pStream != NULL) {
+                    if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                        pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                        *thumb = pStream;
+                        *thumb_image = reproc_frame->bufs[i];
+                    }
+                }
+            }
+        }
+    }
+
+    if (m_parent->mParameters.generateThumbFromMain()) {
+        *thumb = NULL;
+        *thumb_image = NULL;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION   : syncStreamParams
+*
+* DESCRIPTION: Query the runtime parameters of all streams included
+*              in the main and reprocessed frames
+*
+* PARAMETERS :
+*   @frame : Main image super buffer
+*
+* RETURN     : int32_t type of status
+*              NO_ERROR  -- success
+*              none-zero failure code
+*==========================================================================*/
+int32_t QCameraPostProcessor::syncStreamParams(mm_camera_super_buf_t *frame)
+{
+    QCameraStream *main_stream = NULL;
+    QCameraStream *thumb_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    mm_camera_buf_def_t *thumb_frame = NULL;
+    int32_t ret = NO_ERROR;
+
+    ret = queryStreams(&main_stream,
+            &thumb_stream,
+            &main_frame,
+            &thumb_frame,
+            frame,
+            NULL);
+    if (NO_ERROR != ret) {
+        ALOGE("%s : Camera streams query from input frames failed %d",
+                __func__,
+                ret);
+        return ret;
+    }
+
+    if (NULL != main_stream) {
+        ret = main_stream->syncRuntimeParams();
+        if (NO_ERROR != ret) {
+            ALOGE("%s : Syncing of main stream runtime parameters failed %d",
+                    __func__,
+                    ret);
+            return ret;
+        }
+    }
+
+    if (NULL != thumb_stream) {
+        ret = thumb_stream->syncRuntimeParams();
+        if (NO_ERROR != ret) {
+            ALOGE("%s : Syncing of thumb stream runtime parameters failed %d",
+                    __func__,
+                    ret);
+            return ret;
+        }
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ *              mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ *   @jpeg_job_data : ptr to a struct saving job related information
+ *   @needNewSess   : flag to indicate if a new jpeg encoding session need
+ *                    to be created. After creation, this flag will be toggled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                                         uint8_t &needNewSess)
+{
+    CDBG("%s : E", __func__);
+    int32_t ret = NO_ERROR;
+    mm_jpeg_job_t jpg_job;
+    uint32_t jobId = 0;
+    QCameraStream *main_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    QCameraStream *thumb_stream = NULL;
+    mm_camera_buf_def_t *thumb_frame = NULL;
+    mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
+    cam_rect_t crop;
+    cam_stream_parm_buffer_t param;
+    cam_stream_img_prop_t imgProp;
+
+    // find channel
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        } else if (m_pDualReprocChannel != NULL &&
+            m_pDualReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pDualReprocChannel;
+        }
+    }
+
+    if (pChannel == NULL) {
+        ALOGE("%s:%d] No corresponding channel (ch_id = %d) exist, return here",
+              __func__, __LINE__, recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    ret = queryStreams(&main_stream,
+            &thumb_stream,
+            &main_frame,
+            &thumb_frame,
+            recvd_frame,
+            jpeg_job_data->src_reproc_frame);
+    if (NO_ERROR != ret) {
+        return ret;
+    }
+
+    if(NULL == main_frame){
+       ALOGE("%s : Main frame is NULL", __func__);
+       return BAD_VALUE;
+    }
+
+    if(NULL == thumb_frame){
+       CDBG("%s : Thumbnail frame does not exist", __func__);
+    }
+
+    QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
+    if (NULL == memObj) {
+        ALOGE("%s : Memeory Obj of main frame is NULL", __func__);
+        return NO_MEMORY;
+    }
+
+    // dump snapshot frame if enabled
+    m_parent->dumpFrameToFile(main_stream, main_frame, QCAMERA_DUMP_FRM_SNAPSHOT);
+
+    // send upperlayer callback for raw image
+    camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
+    if (NULL != m_parent->mDataCb &&
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+        cbArg.data = mem;
+        cbArg.index = 1;
+        m_parent->m_cbNotifier.notifyCallback(cbArg);
+    }
+    if (NULL != m_parent->mNotifyCb &&
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+        cbArg.ext1 = 0;
+        cbArg.ext2 = 0;
+        m_parent->m_cbNotifier.notifyCallback(cbArg);
+    }
+
+    if (mJpegClientHandle <= 0) {
+        ALOGE("%s: Error: bug here, mJpegClientHandle is 0", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    if (needNewSess) {
+        // create jpeg encoding session
+        mm_jpeg_encode_params_t encodeParam;
+        memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+        ret = getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
+        if (ret != NO_ERROR) {
+            ALOGE("%s: error getting encoding config", __func__);
+            return ret;
+        }
+        CDBG_HIGH("[KPI Perf] %s : call jpeg create_session", __func__);
+        ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+        if (ret != NO_ERROR) {
+            ALOGE("%s: error creating a new jpeg encoding session", __func__);
+            return ret;
+        }
+        needNewSess = FALSE;
+    }
+    // Fill in new job
+    memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+    jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpg_job.encode_job.session_id = mJpegSessionId;
+    jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
+    jpg_job.encode_job.dst_index = 0;
+
+    if (mJpegMemOpt) {
+        jpg_job.encode_job.dst_index = jpg_job.encode_job.src_index;
+    } else if (mUseJpegBurst) {
+        jpg_job.encode_job.dst_index = -1;
+    }
+
+    cam_dimension_t src_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
+    bool img_feature_enabled =
+      m_parent->mParameters.isUbiFocusEnabled() ||
+      m_parent->mParameters.isMultiTouchFocusEnabled() ||
+      m_parent->mParameters.isChromaFlashEnabled() ||
+      m_parent->mParameters.isOptiZoomEnabled() ||
+      m_parent->mParameters.isfssrEnabled();
+
+    CDBG_HIGH("%s:%d] Crop needed %d", __func__, __LINE__, img_feature_enabled);
+    crop.left = 0;
+    crop.top = 0;
+    crop.height = src_dim.height;
+    crop.width = src_dim.width;
+
+    param = main_stream->getOutputCrop();
+    for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
+       if (param.outputCrop.crop_info[i].stream_id
+           == main_stream->getMyServerID()) {
+               crop = param.outputCrop.crop_info[i].crop;
+               main_stream->setCropInfo(crop);
+       }
+    }
+
+    if (img_feature_enabled) {
+        memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+
+        param = main_stream->getImgProp();
+        imgProp = param.imgProp;
+        main_stream->setCropInfo(imgProp.crop);
+        crop = imgProp.crop;
+        thumb_stream = NULL; /* use thumbnail from main image */
+        if (imgProp.is_raw_image) {
+           camera_memory_t *mem = memObj->getMemory(
+               main_frame->buf_idx, false);
+           ALOGE("%s:%d] Process raw image %p %d", __func__, __LINE__,
+               mem, imgProp.size);
+           /* dump image */
+           if (mem && mem->data) {
+               if (m_parent->mParameters.isUbiFocusEnabled()){
+                   CAM_DUMP_TO_FILE("/data/misc/camera/ubifocus", "DepthMapImage",
+                                    -1, "y",
+                                    (uint8_t *)mem->data,
+                                    imgProp.size);
+               }
+               if (m_parent->mParameters.isMultiTouchFocusEnabled()) {
+                   CAM_DUMP_TO_FILE("/data/misc/camera/multiTouchFocus", "DepthMapImage",
+                                    -1, "y",
+                                    (uint8_t *)mem->data,
+                                    imgProp.size);
+               }
+           }
+           return NO_ERROR;
+        }
+    } else if (m_parent->mParameters.isTruePortraitEnabled()) {
+        if (mem && mem->data) {
+            cam_frame_len_offset_t offset;
+            memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+            main_stream->getFrameOffset(offset);
+            uint32_t meta_offset = (uint32_t)(offset.mp[0].len +
+                    offset.mp[1].len);
+
+            uint8_t *tp_meta = (uint8_t *)mem->data + meta_offset;
+            double aspect_ratio;
+
+            if (src_dim.width < src_dim.height) {
+                aspect_ratio = (double)src_dim.width / (double)src_dim.height;
+            } else {
+                aspect_ratio = (double)src_dim.height / (double)src_dim.width;
+            }
+
+            uint32_t tp_bodymask_height = (uint32_t)
+                    ((double)m_parent->mParameters.TPBodyMaskWidth() * aspect_ratio);
+            uint32_t tp_meta_size = m_parent->mParameters.TpHeaderSize() +
+                    (m_parent->mParameters.TPBodyMaskWidth() * tp_bodymask_height);
+
+            CDBG_HIGH("%s:%d] %d x %d, %f, %d, %d", __func__, __LINE__,
+                    m_parent->mParameters.TPBodyMaskWidth(), tp_bodymask_height,
+                    aspect_ratio, meta_offset, tp_meta_size);
+
+            CAM_DUMP_TO_FILE("/data/misc/camera/tp", "bm",
+                    -1, "y",
+                    tp_meta,
+                    tp_meta_size);
+        }
+    }
+
+    cam_dimension_t dst_dim;
+
+    if (hdr_output_crop && crop.height) {
+        dst_dim.height = crop.height;
+    } else {
+        dst_dim.height = src_dim.height;
+    }
+    if (hdr_output_crop && crop.width) {
+        dst_dim.width = crop.width;
+    } else {
+        dst_dim.width = src_dim.width;
+    }
+
+    // main dim
+    jpg_job.encode_job.main_dim.src_dim = src_dim;
+    jpg_job.encode_job.main_dim.dst_dim = dst_dim;
+    jpg_job.encode_job.main_dim.crop = crop;
+
+    // get exif data
+    QCameraExif *pJpegExifObj = m_parent->getExifData();
+    jpeg_job_data->pJpegExifObj = pJpegExifObj;
+    if (pJpegExifObj != NULL) {
+        jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
+        jpg_job.encode_job.exif_info.numOfEntries =
+            pJpegExifObj->getNumOfEntries();
+    }
+
+    // set rotation only when no online rotation or offline pp rotation is done before
+    if (!m_parent->needRotationReprocess()) {
+        jpg_job.encode_job.rotation = m_parent->getJpegRotation();
+    }
+    CDBG_HIGH("%s: jpeg rotation is set to %d", __func__, jpg_job.encode_job.rotation);
+
+    // thumbnail dim
+    if (m_bThumbnailNeeded == TRUE) {
+        m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
+
+        if (thumb_stream == NULL) {
+            // need jpeg thumbnail, but no postview/preview stream exists
+            // we use the main stream/frame to encode thumbnail
+            thumb_stream = main_stream;
+            thumb_frame = main_frame;
+
+            if ((90 == m_parent->getJpegRotation())
+                    || (270 == m_parent->getJpegRotation())) {
+                IMG_SWAP(jpg_job.encode_job.thumb_dim.dst_dim.width,
+                        jpg_job.encode_job.thumb_dim.dst_dim.height);
+            }
+        }
+
+        memset(&src_dim, 0, sizeof(cam_dimension_t));
+        thumb_stream->getFrameDimension(src_dim);
+        jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+
+        // crop is the same if frame is the same
+        if (thumb_frame != main_frame) {
+            crop.left = 0;
+            crop.top = 0;
+            crop.height = src_dim.height;
+            crop.width = src_dim.width;
+
+            param = thumb_stream->getOutputCrop();
+            for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
+                if (param.outputCrop.crop_info[i].stream_id
+                        == thumb_stream->getMyServerID()) {
+                    crop = param.outputCrop.crop_info[i].crop;
+                    thumb_stream->setCropInfo(crop);
+               }
+            }
+        }
+
+        jpg_job.encode_job.thumb_dim.crop = crop;
+        if (thumb_frame)
+            jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
+        CDBG_HIGH("%s, thumbnail src w/h (%dx%d), dst w/h (%dx%d)", __func__,
+            jpg_job.encode_job.thumb_dim.src_dim.width,
+            jpg_job.encode_job.thumb_dim.src_dim.height,
+            jpg_job.encode_job.thumb_dim.dst_dim.width,
+            jpg_job.encode_job.thumb_dim.dst_dim.height);
+    }
+
+    if (thumb_frame != NULL) {
+        // dump thumbnail frame if enabled
+        m_parent->dumpFrameToFile(thumb_stream, thumb_frame, QCAMERA_DUMP_FRM_THUMBNAIL);
+    }
+
+    if (jpeg_job_data->metadata != NULL) {
+        // fill in meta data frame ptr
+        jpg_job.encode_job.p_metadata = jpeg_job_data->metadata;
+    }
+
+    m_parent->mExifParams.ui_flash_mode = (cam_flash_mode_t) m_parent->getFlash();
+    m_parent->mExifParams.red_eye = (exif_redeye_t) m_parent->getRedeye();
+    m_parent->mExifParams.flash_presence = (exif_flash_func_pre_t) m_parent->getFlashPresence();
+    m_parent->mExifParams.sensor_params.sens_type = m_parent->getSensorType();
+
+    jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
+
+    jpg_job.encode_job.mobicat_mask = m_parent->mParameters.getMobicatMask();
+
+    if (NULL != jpg_job.encode_job.p_metadata &&
+        (jpg_job.encode_job.mobicat_mask > 0)) {
+        memcpy(jpg_job.encode_job.p_metadata->
+            chromatix_mobicat_af_data.private_mobicat_af_data,
+            jpg_job.encode_job.cam_exif_params.af_mobicat_params,
+            sizeof(jpg_job.encode_job.cam_exif_params.af_mobicat_params));
+
+        /* Save a copy of 3A debug params */
+        jpg_job.encode_job.p_metadata->is_mobicat_ae_params_valid =
+            jpg_job.encode_job.cam_exif_params.ae_debug_params_valid;
+        jpg_job.encode_job.p_metadata->is_mobicat_awb_params_valid =
+            jpg_job.encode_job.cam_exif_params.awb_debug_params_valid;
+        jpg_job.encode_job.p_metadata->is_mobicat_af_params_valid =
+            jpg_job.encode_job.cam_exif_params.af_debug_params_valid;
+        jpg_job.encode_job.p_metadata->is_mobicat_asd_params_valid =
+            jpg_job.encode_job.cam_exif_params.asd_debug_params_valid;
+        jpg_job.encode_job.p_metadata->is_mobicat_stats_params_valid =
+            jpg_job.encode_job.cam_exif_params.stats_debug_params_valid;
+
+        if (jpg_job.encode_job.cam_exif_params.ae_debug_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_ae_data =
+               jpg_job.encode_job.cam_exif_params.ae_debug_params;
+        }
+        if (jpg_job.encode_job.cam_exif_params.awb_debug_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_awb_data =
+               jpg_job.encode_job.cam_exif_params.awb_debug_params;
+        }
+        if (jpg_job.encode_job.cam_exif_params.af_debug_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_af_data =
+               jpg_job.encode_job.cam_exif_params.af_debug_params;
+        }
+        if (jpg_job.encode_job.cam_exif_params.asd_debug_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_asd_data =
+               jpg_job.encode_job.cam_exif_params.asd_debug_params;
+        }
+        if (jpg_job.encode_job.cam_exif_params.stats_debug_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_stats_buffer_data =
+               jpg_job.encode_job.cam_exif_params.stats_debug_params;
+        }
+    }
+
+    CDBG_HIGH("[KPI Perf] %s : PROFILE_JPEG_JOB_START", __func__);
+    ret = mJpegHandle.start_job(&jpg_job, &jobId);
+    if (ret == NO_ERROR) {
+        // remember job info
+        jpeg_job_data->jobId = jobId;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawImageImpl
+ *
+ * DESCRIPTION: function to send raw image to upper layer
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : frame to be encoded
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    QCameraStream *pStream = NULL;
+    mm_camera_buf_def_t *frame = NULL;
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        }
+    }
+    if (pChannel == NULL) {
+        ALOGE("%s:%d] No corresponding channel (ch_id = %d) exist, return here",
+              __func__, __LINE__, recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        QCameraStream *pCurStream =
+            pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (pCurStream != NULL) {
+            if (pCurStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pCurStream->isTypeOf(CAM_STREAM_TYPE_RAW) ||
+                pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
+                pStream = pCurStream;
+                frame = recvd_frame->bufs[i];
+                break;
+            }
+        }
+    }
+
+    if ( NULL == frame ) {
+        ALOGE("%s: No valid raw buffer", __func__);
+        return BAD_VALUE;
+    }
+
+    QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
+    bool zslChannelUsed = m_parent->isZSLMode() &&
+            ( pChannel != m_pReprocChannel );
+    camera_memory_t *raw_mem = NULL;
+
+    if (rawMemObj != NULL) {
+        if (zslChannelUsed) {
+            raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
+        } else {
+            raw_mem = m_parent->mGetMemory(frame->fd,
+                                           frame->frame_len,
+                                           1,
+                                           m_parent->mCallbackCookie);
+            if (NULL == raw_mem) {
+                ALOGE("%s : Not enough memory for RAW cb ", __func__);
+                return NO_MEMORY;
+            }
+        }
+    }
+
+    if (NULL != rawMemObj && NULL != raw_mem) {
+        // dump frame into file
+        if (frame->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
+            pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+            // for YUV422 NV16 case
+            m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_SNAPSHOT);
+        } else {
+            m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_RAW);
+        }
+
+        // send data callback / notify for RAW_IMAGE
+        if (NULL != m_parent->mDataCb &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+            cbArg.data = raw_mem;
+            cbArg.index = 0;
+            m_parent->m_cbNotifier.notifyCallback(cbArg);
+        }
+        if (NULL != m_parent->mNotifyCb &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+            cbArg.ext1 = 0;
+            cbArg.ext2 = 0;
+            m_parent->m_cbNotifier.notifyCallback(cbArg);
+        }
+
+        if ((m_parent->mDataCb != NULL) &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
+            qcamera_release_data_t release_data;
+            memset(&release_data, 0, sizeof(qcamera_release_data_t));
+            if ( zslChannelUsed ) {
+                release_data.frame = recvd_frame;
+            } else {
+                release_data.data = raw_mem;
+            }
+            rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                                raw_mem,
+                                0,
+                                NULL,
+                                &release_data);
+        } else {
+            raw_mem->release(raw_mem);
+        }
+    } else {
+        ALOGE("%s: Cannot get raw mem", __func__);
+        rc = UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataSaveRoutine
+ *
+ * DESCRIPTION: data saving routine
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataSaveRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_saveProcTh;
+    cmdThread->setName("CAM_JpegSave");
+    char saveName[PROPERTY_VALUE_MAX];
+
+    CDBG("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            CDBG("%s: start data proc", __func__);
+            is_active = TRUE;
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                CDBG("%s: stop data proc", __func__);
+                is_active = FALSE;
+
+                // flush input save Queue
+                pme->m_inputSaveQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                CDBG_HIGH("%s: Do next job, active is %d", __func__, is_active);
+
+                qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) pme->m_inputSaveQ.dequeue();
+                if (job_data == NULL) {
+                    ALOGE("%s: Invalid jpeg event data", __func__);
+                    continue;
+                }
+
+                pme->m_ongoingJpegQ.flushNodes(matchJobId, (void*)&job_data->jobId);
+
+                CDBG_HIGH("[KPI Perf] %s : jpeg job %d", __func__, job_data->jobId);
+
+                if (is_active == TRUE) {
+                    memset(saveName, '\0', sizeof(saveName));
+                    snprintf(saveName,
+                             sizeof(saveName),
+                             QCameraPostProcessor::STORE_LOCATION,
+                             pme->mSaveFrmCnt);
+
+                    int file_fd = open(saveName, O_RDWR | O_CREAT, 0655);
+                    if (file_fd > 0) {
+                        ssize_t written_len = write(file_fd, job_data->out_data.buf_vaddr,
+                                job_data->out_data.buf_filled_len);
+                        if ((ssize_t)job_data->out_data.buf_filled_len != written_len) {
+                            ALOGE("%s: Failed save complete data %d bytes "
+                                  "written instead of %d bytes!",
+                                  __func__, written_len,
+                                  job_data->out_data.buf_filled_len);
+                        } else {
+                            CDBG_HIGH("%s: written number of bytes %d\n",
+                                __func__, written_len);
+                        }
+
+                        close(file_fd);
+                    } else {
+                        ALOGE("%s: fail t open file for saving", __func__);
+                    }
+                    pme->mSaveFrmCnt++;
+
+                    camera_memory_t* jpeg_mem = pme->m_parent->mGetMemory(-1,
+                                                         strlen(saveName),
+                                                         1,
+                                                         pme->m_parent->mCallbackCookie);
+                    if (NULL == jpeg_mem) {
+                        ret = NO_MEMORY;
+                        ALOGE("%s : getMemory for jpeg, ret = NO_MEMORY", __func__);
+                        goto end;
+                    }
+                    memcpy(jpeg_mem->data, saveName, strlen(saveName));
+
+                    ALOGE("%s : Calling upperlayer callback to store JPEG image", __func__);
+                    qcamera_release_data_t release_data;
+                    memset(&release_data, 0, sizeof(qcamera_release_data_t));
+                    release_data.data = jpeg_mem;
+                    release_data.unlinkFile = true;
+                    ALOGE("[KPI Perf] %s: PROFILE_JPEG_CB ",__func__);
+                    ret = pme->sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                                        jpeg_mem,
+                                        0,
+                                        NULL,
+                                        &release_data);
+                }
+
+end:
+                free(job_data);
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            CDBG("%s : save thread exit", __func__);
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    CDBG("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcessRoutine
+ *
+ * DESCRIPTION: data process routine that handles input data either from input
+ *              Jpeg Queue to do jpeg encoding, or from input PP Queue to do
+ *              reprocess.
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataProcessRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
+    cmdThread->setName("CAM_JpegProc");
+
+    CDBG("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            CDBG("%s: start data proc", __func__);
+            is_active = TRUE;
+            pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,
+                                      FALSE,
+                                      FALSE);
+
+            // signal cmd is completed
+            cam_sem_post(&cmdThread->sync_sem);
+
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                CDBG("%s: stop data proc", __func__);
+                is_active = FALSE;
+
+                pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
+                                           TRUE,
+                                           TRUE);
+                // cancel all ongoing jpeg jobs
+                qcamera_jpeg_data_t *jpeg_job =
+                    (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                while (jpeg_job != NULL) {
+                    pme->mJpegHandle.abort_job(jpeg_job->jobId);
+
+                    pme->releaseJpegJobData(jpeg_job);
+                    free(jpeg_job);
+
+                    jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                }
+
+                // destroy jpeg encoding session
+                if ( 0 < pme->mJpegSessionId ) {
+                    pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
+                    pme->mJpegSessionId = 0;
+                }
+
+                // free jpeg out buf and exif obj
+                FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMem,
+                    pme->m_JpegOutputMemCount);
+
+                if (pme->m_pJpegExifObj != NULL) {
+                    delete pme->m_pJpegExifObj;
+                    pme->m_pJpegExifObj = NULL;
+                }
+
+                // stop reproc channel if exists
+                if (pme->m_pReprocChannel != NULL) {
+                    pme->m_pReprocChannel->stop();
+                    delete pme->m_pReprocChannel;
+                    pme->m_pReprocChannel = NULL;
+                }
+
+                // stop dual reproc channel if exists
+                if (pme->m_pDualReprocChannel != NULL) {
+                    pme->m_pDualReprocChannel->stop();
+                    delete pme->m_pDualReprocChannel;
+                    pme->m_pDualReprocChannel = NULL;
+                }
+
+                // flush ongoing postproc Queue
+                pme->m_ongoingPPQ.flush();
+
+                // flush input jpeg Queue
+                pme->m_inputJpegQ.flush();
+
+                // flush input Postproc Queue
+                pme->m_inputPPQ.flush();
+
+                // flush input raw Queue
+                pme->m_inputRawQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+
+                pme->mNewJpegSessionNeeded = true;
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                CDBG("%s: Do next job, active is %d", __func__, is_active);
+                if (is_active == TRUE) {
+                    qcamera_jpeg_data_t *jpeg_job =
+                        (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+
+                    if (NULL != jpeg_job) {
+                        pme->syncStreamParams(jpeg_job->src_frame);
+
+                      // add into ongoing jpeg job Q
+                      pme->m_ongoingJpegQ.enqueue((void *)jpeg_job);
+                      ret = pme->encodeData(jpeg_job,
+                              pme->mNewJpegSessionNeeded);
+                      if (NO_ERROR != ret) {
+                        // dequeue the last one
+                        pme->m_ongoingJpegQ.dequeue(false);
+                        pme->releaseJpegJobData(jpeg_job);
+                        free(jpeg_job);
+                        pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                      }
+                    }
+
+
+                    // process raw data if any
+                    mm_camera_super_buf_t *super_buf =
+                        (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+
+                    if (NULL != super_buf) {
+                        //play shutter sound
+                        pme->m_parent->playShutter();
+                        ret = pme->processRawImageImpl(super_buf);
+                        if (NO_ERROR != ret) {
+                            pme->releaseSuperBuf(super_buf);
+                            free(super_buf);
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                        }
+                    }
+
+                    mm_camera_super_buf_t *pp_frame = NULL;
+                    if (pme->m_inputPPQ.getCurrentSize() > 0) {
+                        if (!pme->m_parent->isLongshotEnabled() ||
+                                (pme->m_ongoingPPQ.getCurrentSize() <
+                                pme->m_reprocStream->getNumQueuedBuf())) {
+                            pp_frame = (mm_camera_super_buf_t *)pme->m_inputPPQ.dequeue();
+                        }
+                        else {
+                            CDBG_HIGH("Postpone reprocess.On going reproc=%d,Queued reproc buf=%d",
+                                    pme->m_ongoingPPQ.getCurrentSize(),
+                                    pme->m_reprocStream->getNumQueuedBuf());
+                        }
+                    }
+                    if (NULL != pp_frame) {
+                        pme->syncStreamParams(pp_frame);
+
+                        qcamera_pp_data_t *pp_job =
+                            (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
+                        if (pp_job != NULL) {
+                            memset(pp_job, 0, sizeof(qcamera_pp_data_t));
+                            if (pme->m_pReprocChannel != NULL) {
+                                // add into ongoing PP job Q
+                                pp_job->src_frame = pp_frame;
+                                ret = pme->reprocess(pp_job);
+                                if (NO_ERROR == ret) {
+                                    pme->stopCapture();
+                                }
+                            } else {
+                                ALOGE("%s: Reprocess channel is NULL", __func__);
+                                ret = -1;
+                            }
+                        } else {
+                            ALOGE("%s: no mem for qcamera_pp_data_t", __func__);
+                            ret = -1;
+                        }
+
+                        if (0 != ret) {
+                            // free pp_job
+                            if (pp_job != NULL) {
+                                free(pp_job);
+                            }
+                            // free frame
+                            if (pp_frame != NULL) {
+                                pme->releaseSuperBuf(pp_frame);
+                                free(pp_frame);
+                            }
+                            // send error notify
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                        }
+                    }
+                } else {
+                    // not active, simply return buf and do no op
+                    qcamera_jpeg_data_t *jpeg_data =
+                        (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+                    if (NULL != jpeg_data) {
+                        pme->releaseJpegJobData(jpeg_data);
+                        free(jpeg_data);
+                    }
+                    mm_camera_super_buf_t *super_buf =
+                        (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                    super_buf = (mm_camera_super_buf_t *)pme->m_inputPPQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    CDBG("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : reprocess
+ *
+ * DESCRIPTION: Trigger reprocessing
+ *
+ * PARAMETERS :
+ *   @pp_job  : Postproc job
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::reprocess(qcamera_pp_data_t *pp_job)
+{
+    int rc = NO_ERROR;
+
+    if (NULL == pp_job) {
+        return BAD_VALUE;
+    }
+
+    // find appropriate reprocess channel
+    QCameraReprocessChannel *pChannel = NULL;
+    if (m_pReprocChannel != NULL &&
+        m_pReprocChannel->getSourceChannel()->getMyHandle() == pp_job->src_frame->ch_id) {
+        pChannel = m_pReprocChannel;
+    } else if (m_pDualReprocChannel != NULL &&
+        m_pDualReprocChannel->getSourceChannel()->getMyHandle() == pp_job->src_frame->ch_id) {
+        pChannel = m_pDualReprocChannel;
+    }
+
+    if (NULL == pChannel) {
+        CDBG_HIGH("Unable to find reproc channel from source super buf");
+        return BAD_VALUE;
+    }
+
+    if (m_parent->isRegularCapture()) {
+        if ((NULL != pp_job->src_frame) &&
+                (0 < pp_job->src_frame->num_bufs)) {
+            mm_camera_buf_def_t *bufs = NULL;
+            uint32_t num_bufs = pp_job->src_frame->num_bufs;
+            bufs = new mm_camera_buf_def_t[num_bufs];
+            if (NULL == bufs) {
+                ALOGE("%s:Unable to allocate cached buffers", __func__);
+                return NO_MEMORY;
+            }
+
+            for (uint32_t i = 0; i < num_bufs; i++) {
+                bufs[i] = *pp_job->src_frame->bufs[i];
+                pp_job->src_frame->bufs[i] = &bufs[i];
+            }
+            pp_job->src_reproc_bufs = bufs;
+        }
+
+        // Don't release source frame after encoding
+        // at this point the source channel will not exist.
+        pp_job->reproc_frame_release = true;
+        m_ongoingPPQ.enqueue((void *)pp_job);
+        rc = pChannel->doReprocessOffline(pp_job->src_frame);
+    } else {
+        m_ongoingPPQ.enqueue((void *)pp_job);
+        rc = pChannel->doReprocess(pp_job->src_frame);
+    }
+
+    if (NO_ERROR != rc) {
+        // remove from ongoing PP job Q
+        m_ongoingPPQ.dequeue(false);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopCapture
+ *
+ * DESCRIPTION: Trigger image capture stop
+ *
+ * PARAMETERS :
+ * None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stopCapture()
+{
+     int rc = NO_ERROR;
+
+     if (m_parent->isRegularCapture()) {
+        rc = m_parent->processAPI(
+                        QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,
+                        NULL);
+     }
+
+     return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegPaddingReq
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @padding_info : jpeg specific padding requirement
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
+{
+    // TODO: hardcode for now, needs to query from mm-jpeg-interface
+    padding_info.width_padding  = CAM_PAD_NONE;
+    padding_info.height_padding  = CAM_PAD_TO_16;
+    padding_info.plane_padding  = CAM_PAD_TO_WORD;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setYUVFrameInfo
+ *
+ * DESCRIPTION: set Raw YUV frame data info for up-layer
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : currently we return frame len, y offset, cbcr offset and frame format
+ *==========================================================================*/
+int32_t QCameraPostProcessor::setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame)
+{
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        }
+    }
+
+    if (pChannel == NULL) {
+        ALOGE("%s:%d] No corresponding channel (ch_id = %d) exist, return here",
+              __func__, __LINE__, recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        QCameraStream *pStream =
+            pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                //get the main frame, use stream info
+                cam_frame_len_offset_t frame_offset;
+                cam_dimension_t frame_dim;
+                cam_format_t frame_fmt;
+                const char *fmt_string;
+                pStream->getFrameDimension(frame_dim);
+                pStream->getFrameOffset(frame_offset);
+                pStream->getFormat(frame_fmt);
+                fmt_string = m_parent->mParameters.getFrameFmtString(frame_fmt);
+
+                int cbcr_offset = (int32_t)frame_offset.mp[0].len -
+                        frame_dim.width * frame_dim.height;
+                m_parent->mParameters.set("snapshot-framelen", (int)frame_offset.frame_len);
+                m_parent->mParameters.set("snapshot-yoff", (int)frame_offset.mp[0].offset);
+                m_parent->mParameters.set("snapshot-cbcroff", cbcr_offset);
+                if (fmt_string != NULL) {
+                    m_parent->mParameters.set("snapshot-format", fmt_string);
+                } else {
+                    m_parent->mParameters.set("snapshot-format", "");
+                }
+
+                CDBG_HIGH("%s: frame width=%d, height=%d, yoff=%d, cbcroff=%d, fmt_string=%s",
+                            __func__, frame_dim.width, frame_dim.height, frame_offset.mp[0].offset,
+                            cbcr_offset, fmt_string);
+                return NO_ERROR;
+            }
+        }
+    }
+
+    return BAD_VALUE;
+}
+
+bool QCameraPostProcessor::matchJobId(void *data, void *, void *match_data)
+{
+  qcamera_jpeg_data_t * job = (qcamera_jpeg_data_t *) data;
+  uint32_t job_id = *((uint32_t *) match_data);
+  return job->jobId == job_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegMemory
+ *
+ * DESCRIPTION: buffer allocation function
+ *   to pass to jpeg interface
+ *
+ * PARAMETERS :
+ *   @out_buf : buffer descriptor struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraPostProcessor::getJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
+{
+    CDBG_HIGH("%s: Allocating jpeg out buffer of size: %d", __func__, out_buf->size);
+    QCameraPostProcessor *procInst = (QCameraPostProcessor *) out_buf->handle;
+    camera_memory_t *cam_mem = procInst->m_parent->mGetMemory(-1, out_buf->size, 1U,
+            procInst->m_parent->mCallbackCookie);
+    out_buf->mem_hdl = cam_mem;
+    out_buf->vaddr = cam_mem->data;
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraExif
+ *
+ * DESCRIPTION: constructor of QCameraExif
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraExif::QCameraExif()
+    : m_nNumEntries(0)
+{
+    memset(m_Entries, 0, sizeof(m_Entries));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraExif
+ *
+ * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraExif::~QCameraExif()
+{
+    for (uint32_t i = 0; i < m_nNumEntries; i++) {
+        switch (m_Entries[i].tag_entry.type) {
+        case EXIF_BYTE:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._bytes != NULL) {
+                    free(m_Entries[i].tag_entry.data._bytes);
+                    m_Entries[i].tag_entry.data._bytes = NULL;
+                }
+            }
+            break;
+        case EXIF_ASCII:
+            {
+                if (m_Entries[i].tag_entry.data._ascii != NULL) {
+                    free(m_Entries[i].tag_entry.data._ascii);
+                    m_Entries[i].tag_entry.data._ascii = NULL;
+                }
+            }
+            break;
+        case EXIF_SHORT:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._shorts != NULL) {
+                    free(m_Entries[i].tag_entry.data._shorts);
+                    m_Entries[i].tag_entry.data._shorts = NULL;
+                }
+            }
+            break;
+        case EXIF_LONG:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._longs != NULL) {
+                    free(m_Entries[i].tag_entry.data._longs);
+                    m_Entries[i].tag_entry.data._longs = NULL;
+                }
+            }
+            break;
+        case EXIF_RATIONAL:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._rats != NULL) {
+                    free(m_Entries[i].tag_entry.data._rats);
+                    m_Entries[i].tag_entry.data._rats = NULL;
+                }
+            }
+            break;
+        case EXIF_UNDEFINED:
+            {
+                if (m_Entries[i].tag_entry.data._undefined != NULL) {
+                    free(m_Entries[i].tag_entry.data._undefined);
+                    m_Entries[i].tag_entry.data._undefined = NULL;
+                }
+            }
+            break;
+        case EXIF_SLONG:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._slongs != NULL) {
+                    free(m_Entries[i].tag_entry.data._slongs);
+                    m_Entries[i].tag_entry.data._slongs = NULL;
+                }
+            }
+            break;
+        case EXIF_SRATIONAL:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._srats != NULL) {
+                    free(m_Entries[i].tag_entry.data._srats);
+                    m_Entries[i].tag_entry.data._srats = NULL;
+                }
+            }
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : addEntry
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
+                              exif_tag_type_t type,
+                              uint32_t count,
+                              void *data)
+{
+    int32_t rc = NO_ERROR;
+    if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("%s: Number of entries exceeded limit", __func__);
+        return NO_MEMORY;
+    }
+
+    m_Entries[m_nNumEntries].tag_id = tagid;
+    m_Entries[m_nNumEntries].tag_entry.type = type;
+    m_Entries[m_nNumEntries].tag_entry.count = count;
+    m_Entries[m_nNumEntries].tag_entry.copy = 1;
+    switch (type) {
+    case EXIF_BYTE:
+        {
+            if (count > 1) {
+                uint8_t *values = (uint8_t *)malloc(count);
+                if (values == NULL) {
+                    ALOGE("%s: No memory for byte array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
+            }
+        }
+        break;
+    case EXIF_ASCII:
+        {
+            char *str = NULL;
+            str = (char *)malloc(count + 1);
+            if (str == NULL) {
+                ALOGE("%s: No memory for ascii string", __func__);
+                rc = NO_MEMORY;
+            } else {
+                memset(str, 0, count + 1);
+                memcpy(str, data, count);
+                m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
+            }
+        }
+        break;
+    case EXIF_SHORT:
+        {
+            if (count > 1) {
+                uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for short array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(uint16_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
+            }
+        }
+        break;
+    case EXIF_LONG:
+        {
+            if (count > 1) {
+                uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for long array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(uint32_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._longs = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
+            }
+        }
+        break;
+    case EXIF_RATIONAL:
+        {
+            if (count > 1) {
+                rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for rational array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(rat_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._rats = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
+            }
+        }
+        break;
+    case EXIF_UNDEFINED:
+        {
+            uint8_t *values = (uint8_t *)malloc(count);
+            if (values == NULL) {
+                ALOGE("%s: No memory for undefined array", __func__);
+                rc = NO_MEMORY;
+            } else {
+                memcpy(values, data, count);
+                m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
+            }
+        }
+        break;
+    case EXIF_SLONG:
+        {
+            if (count > 1) {
+                int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for signed long array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(int32_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
+            }
+        }
+        break;
+    case EXIF_SRATIONAL:
+        {
+            if (count > 1) {
+                srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for signed rational array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(srat_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._srats = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
+            }
+        }
+        break;
+    }
+
+    // Increase number of entries
+    m_nNumEntries++;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraPostProc.h b/msm8974/QCamera2/HAL/QCameraPostProc.h
new file mode 100644
index 0000000..7b85921
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraPostProc.h
@@ -0,0 +1,216 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_POSTPROC_H__
+#define __QCAMERA_POSTPROC_H__
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+#include "QCamera2HWI.h"
+
+#define MAX_JPEG_BURST 2
+
+namespace qcamera {
+
+class QCameraExif;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    uint32_t client_hdl;             // handle of jpeg client (obtained when open jpeg)
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+    mm_camera_super_buf_t *src_reproc_frame; // original source frame for reproc if not NULL
+    cam_metadata_info_t * metadata;  // source frame metadata
+    bool reproc_frame_release;       // false release original buffer,
+                                     // true don't release it
+    mm_camera_buf_def_t *src_reproc_bufs;
+    QCameraExif *pJpegExifObj;
+} qcamera_jpeg_data_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    mm_camera_super_buf_t *src_frame;// source frame
+    bool reproc_frame_release;       // false release original buffer
+                                     // true don't release it
+    mm_camera_buf_def_t *src_reproc_bufs;
+} qcamera_pp_data_t;
+
+typedef struct {
+    mm_camera_super_buf_t *frame;    // source frame that needs post process
+} qcamera_pp_request_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID (obtained when start_jpeg_job)
+    jpeg_job_status_t status;        // jpeg encoding status
+    mm_jpeg_output_t out_data;         // ptr to jpeg output buf
+} qcamera_jpeg_evt_payload_t;
+
+typedef struct {
+    camera_memory_t *        data;     // ptr to data memory struct
+    mm_camera_super_buf_t *  frame;    // ptr to frame
+    QCameraMemory *          streamBufs; //ptr to stream buffers
+    bool                     unlinkFile; // unlink any stored buffers on error
+} qcamera_release_data_t;
+
+typedef struct {
+    int32_t                  msg_type; // msg type of data notify
+    camera_memory_t *        data;     // ptr to data memory struct
+    unsigned int             index;    // index of the buf in the whole buffer
+    camera_frame_metadata_t *metadata; // ptr to meta data
+    qcamera_release_data_t   release_data; // any data needs to be release after notify
+} qcamera_data_argm_t;
+
+#define MAX_EXIF_TABLE_ENTRIES 17
+class QCameraExif
+{
+public:
+    QCameraExif();
+    virtual ~QCameraExif();
+
+    int32_t addEntry(exif_tag_id_t tagid,
+                     exif_tag_type_t type,
+                     uint32_t count,
+                     void *data);
+    uint32_t getNumOfEntries() {return m_nNumEntries;};
+    QEXIF_INFO_DATA *getEntries() {return m_Entries;};
+
+private:
+    QEXIF_INFO_DATA m_Entries[MAX_EXIF_TABLE_ENTRIES];  // exif tags for JPEG encoder
+    uint32_t  m_nNumEntries;                            // number of valid entries
+};
+
+class QCameraPostProcessor
+{
+public:
+    QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl);
+    virtual ~QCameraPostProcessor();
+
+    int32_t init(jpeg_encode_callback_t jpeg_cb, void *user_data);
+    int32_t deinit();
+    int32_t start(QCameraChannel *pSrcChannel);
+    int32_t stop();
+    int32_t processData(mm_camera_super_buf_t *frame);
+    int32_t processRawData(mm_camera_super_buf_t *frame);
+    int32_t processPPData(mm_camera_super_buf_t *frame);
+    int32_t processJpegEvt(qcamera_jpeg_evt_payload_t *evt);
+    int32_t getJpegPaddingReq(cam_padding_info_t &padding_info);
+    QCameraReprocessChannel * getReprocChannel() {return m_pReprocChannel;};
+    bool getMultipleStages() { return mMultipleStages; };
+    void setMultipleStages(bool stages) { mMultipleStages = stages; };
+    inline bool getJpegMemOpt() {return mJpegMemOpt;}
+    inline void setJpegMemOpt(bool val) {mJpegMemOpt = val;}
+    QCameraStream* getReprocStream() {return m_reprocStream;}
+private:
+    int32_t sendDataNotify(int32_t msg_type,
+                           camera_memory_t *data,
+                           uint8_t index,
+                           camera_frame_metadata_t *metadata,
+                           qcamera_release_data_t *release_data);
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    qcamera_jpeg_data_t *findJpegJobByJobId(uint32_t jobId);
+    mm_jpeg_color_format getColorfmtFromImgFmt(cam_format_t img_fmt);
+    mm_jpeg_format_t getJpegImgTypeFromImgFmt(cam_format_t img_fmt);
+    int32_t getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+                                  QCameraStream *main_stream,
+                                  QCameraStream *thumb_stream);
+    int32_t encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                       uint8_t &needNewSess);
+    int32_t queryStreams(QCameraStream **main,
+            QCameraStream **thumb,
+            mm_camera_buf_def_t **main_image,
+            mm_camera_buf_def_t **thumb_image,
+            mm_camera_super_buf_t *main_frame,
+            mm_camera_super_buf_t *reproc_frame);
+    int32_t syncStreamParams(mm_camera_super_buf_t *frame);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    static void releaseNotifyData(void *user_data,
+                                  void *cookie,
+                                  int32_t cb_status);
+    void releaseJpegJobData(qcamera_jpeg_data_t *job);
+    static void releaseSaveJobData(void *data, void *user_data);
+    static void releaseRawData(void *data, void *user_data);
+    int32_t processRawImageImpl(mm_camera_super_buf_t *recvd_frame);
+
+    static void releaseJpegData(void *data, void *user_data);
+    static void releasePPInputData(void *data, void *user_data);
+    static void releaseOngoingPPData(void *data, void *user_data);
+
+    static void *dataProcessRoutine(void *data);
+    static void *dataSaveRoutine(void *data);
+
+    int32_t setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame);
+    static bool matchJobId(void *data, void *user_data, void *match_data);
+    static int getJpegMemory(omx_jpeg_ouput_buf_t *out_buf);
+
+    int32_t reprocess(qcamera_pp_data_t *pp_job);
+    int32_t stopCapture();
+
+private:
+    QCamera2HardwareInterface *m_parent;
+    jpeg_encode_callback_t     mJpegCB;
+    void *                     mJpegUserData;
+    mm_jpeg_ops_t              mJpegHandle;
+    uint32_t                   mJpegClientHandle;
+    uint32_t                   mJpegSessionId;
+
+    void *                     m_pJpegOutputMem[MM_JPEG_MAX_BUF];
+    QCameraExif *              m_pJpegExifObj;
+    uint32_t                   m_bThumbnailNeeded;
+    QCameraReprocessChannel *  m_pReprocChannel;
+    QCameraReprocessChannel *  m_pDualReprocChannel;
+
+    int8_t                     m_bInited; // if postproc is inited
+
+    QCameraQueue m_inputPPQ;            // input queue for postproc
+    QCameraQueue m_ongoingPPQ;          // ongoing postproc queue
+    QCameraQueue m_inputJpegQ;          // input jpeg job queue
+    QCameraQueue m_ongoingJpegQ;        // ongoing jpeg job queue
+    QCameraQueue m_inputRawQ;           // input raw job queue
+    QCameraQueue m_inputSaveQ;          // input save job queue
+    QCameraCmdThread m_dataProcTh;      // thread for data processing
+    QCameraCmdThread m_saveProcTh;      // thread for storing buffers
+    uint32_t mSaveFrmCnt;               // save frame counter
+    static const char *STORE_LOCATION;  // path for storing buffers
+    bool mUseSaveProc;                  // use store thread
+    bool mUseJpegBurst;                 // use jpeg burst encoding mode
+    bool mJpegMemOpt;
+    uint8_t mNewJpegSessionNeeded;
+    bool mMultipleStages;               // multiple stages are present
+    uint32_t   m_JpegOutputMemCount;
+    QCameraStream *m_reprocStream;
+
+public:
+    cam_dimension_t m_dst_dim;
+    cam_dimension_t m_src_dim;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_POSTPROC_H__ */
diff --git a/msm8974/QCamera2/HAL/QCameraStateMachine.cpp b/msm8974/QCamera2/HAL/QCameraStateMachine.cpp
new file mode 100644
index 0000000..2a6915e
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraStateMachine.cpp
@@ -0,0 +1,2987 @@
+/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStateMachine"
+
+#include <utils/Errors.h>
+#include "QCamera2HWI.h"
+#include "QCameraStateMachine.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : smEvtProcRoutine
+ *
+ * DESCRIPTION: Statemachine process thread routine to handle events
+ *              in different state.
+ *
+ * PARAMETERS :
+ *   @data    : ptr to QCameraStateMachine object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStateMachine::smEvtProcRoutine(void *data)
+{
+    int running = 1, ret;
+    QCameraStateMachine *pme = (QCameraStateMachine *)data;
+
+    CDBG_HIGH("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&pme->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        // first check API cmd queue
+        qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
+        if (node == NULL) {
+            // no API cmd, then check evt cmd queue
+            node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
+        }
+        if (node != NULL) {
+            switch (node->cmd) {
+            case QCAMERA_SM_CMD_TYPE_API:
+                pme->stateMachine(node->evt, node->evt_payload);
+                // API is in a way sync call, so evt_payload is managed by HWI
+                // no need to free payload for API
+                break;
+            case QCAMERA_SM_CMD_TYPE_EVT:
+                pme->stateMachine(node->evt, node->evt_payload);
+
+                // EVT is async call, so payload need to be free after use
+                free(node->evt_payload);
+                node->evt_payload = NULL;
+                break;
+            case QCAMERA_SM_CMD_TYPE_EXIT:
+                running = 0;
+                break;
+            default:
+                break;
+            }
+            free(node);
+            node = NULL;
+        }
+    } while (running);
+    CDBG_HIGH("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStateMachine
+ *
+ * DESCRIPTION: constructor of QCameraStateMachine. Will start process thread
+ *
+ * PARAMETERS :
+ *   @ctrl    : ptr to HWI object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
+    api_queue(),
+    evt_queue()
+{
+    m_parent = ctrl;
+    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+    cmd_pid = 0;
+    cam_sem_init(&cmd_sem, 0);
+    pthread_create(&cmd_pid,
+                   NULL,
+                   smEvtProcRoutine,
+                   this);
+    pthread_setname_np(cmd_pid, "CAM_stMachine");
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStateMachine
+ *
+ * DESCRIPTION: desctructor of QCameraStateMachine. Will stop process thread.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStateMachine::~QCameraStateMachine()
+{
+    cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseThread
+ *
+ * DESCRIPTION: Sends an exit command and terminates the state machine thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStateMachine::releaseThread()
+{
+    if (cmd_pid != 0) {
+        qcamera_sm_cmd_t *node =
+            (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(qcamera_sm_cmd_t));
+            node->cmd = QCAMERA_SM_CMD_TYPE_EXIT;
+
+            api_queue.enqueue((void *)node);
+            cam_sem_post(&cmd_sem);
+
+            /* wait until cmd thread exits */
+            if (pthread_join(cmd_pid, NULL) != 0) {
+                CDBG_HIGH("%s: pthread dead already\n", __func__);
+            }
+        }
+        cmd_pid = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : procAPI
+ *
+ * DESCRIPTION: process incoming API request from framework layer.
+ *
+ * PARAMETERS :
+ *   @evt          : event to be processed
+ *   @api_payload  : API payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
+                                     void *api_payload)
+{
+    qcamera_sm_cmd_t *node =
+        (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+    if (NULL == node) {
+        ALOGE("%s: No memory for qcamera_sm_cmd_t", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(node, 0, sizeof(qcamera_sm_cmd_t));
+    node->cmd = QCAMERA_SM_CMD_TYPE_API;
+    node->evt = evt;
+    node->evt_payload = api_payload;
+    if (api_queue.enqueue((void *)node)) {
+        cam_sem_post(&cmd_sem);
+        return NO_ERROR;
+    } else {
+        free(node);
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvt
+ *
+ * DESCRIPTION: process incoming envent from mm-camera-interface and
+ *              mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ *   @evt          : event to be processed
+ *   @evt_payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvt(qcamera_sm_evt_enum_t evt,
+                                     void *evt_payload)
+{
+    qcamera_sm_cmd_t *node =
+        (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+    if (NULL == node) {
+        ALOGE("%s: No memory for qcamera_sm_cmd_t", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(node, 0, sizeof(qcamera_sm_cmd_t));
+    node->cmd = QCAMERA_SM_CMD_TYPE_EVT;
+    node->evt = evt;
+    node->evt_payload = evt_payload;
+    if (evt_queue.enqueue((void *)node)) {
+        cam_sem_post(&cmd_sem);
+        return NO_ERROR;
+    } else {
+        free(node);
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : stateMachine
+ *
+ * DESCRIPTION: finite state machine entry function. Depends on state,
+ *              incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
+{
+    int32_t rc = NO_ERROR;
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEW_STOPPED:
+        rc = procEvtPreviewStoppedState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        rc = procEvtPreviewReadyState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEWING:
+        rc = procEvtPreviewingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+        rc = procEvtPrepareSnapshotState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PIC_TAKING:
+        rc = procEvtPicTakingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_RECORDING:
+        rc = procEvtRecordingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+        rc = procEvtVideoPicTakingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+        rc = procEvtPreviewPicTakingState(evt, payload);
+        break;
+    default:
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewStoppedState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_STOPPED.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
+                                                        void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            rc = m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (needRestart) {
+                // Clear memory pools
+                m_parent->m_memoryPool.clear();
+            }
+            if (rc == NO_ERROR) {
+                rc = m_parent->commitParameterChanges();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+        {
+            if (m_parent->mPreviewWindow == NULL) {
+                rc = m_parent->preparePreview();
+                if(rc == NO_ERROR) {
+                    // preview window is not set yet, move to previewReady state
+                    m_state = QCAMERA_SM_STATE_PREVIEW_READY;
+                } else {
+                    ALOGE("%s: preparePreview failed",__func__);
+                }
+            } else {
+                rc = m_parent->preparePreview();
+                if (rc == NO_ERROR) {
+                    rc = m_parent->startPreview();
+                    if (rc != NO_ERROR) {
+                        m_parent->unpreparePreview();
+                    } else {
+                        // start preview success, move to previewing state
+                        m_state = QCAMERA_SM_STATE_PREVIEWING;
+                    }
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            rc = m_parent->preparePreview();
+            if (rc == NO_ERROR) {
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+    break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            // no op needed here
+            CDBG_HIGH("%s: already in preview stopped state, do nothing", __func__);
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            rc = m_parent->release();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            ALOGW("Free video handle %d %d", evt, m_state);
+            QCameraVideoMemory::closeNativeHandle((const void *)payload);
+        }
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            // no op needed here
+            CDBG_HIGH("%s: No ops for evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *((qcamera_thermal_level_enum_t *)payload));
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewReadyState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_READY.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt,
+                                                      void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+            if (m_parent->mPreviewWindow != NULL) {
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // need restart preview for parameters to take effect
+                    m_parent->unpreparePreview();
+                    // Clear memory pools
+                    m_parent->m_memoryPool.clear();
+                    // commit parameter changes to server
+                    m_parent->commitParameterChanges();
+                    // prepare preview again
+                    rc = m_parent->preparePreview();
+                    if (rc != NO_ERROR) {
+                        m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                    }
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+        {
+            // no ops here
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            m_parent->unpreparePreview();
+            rc = 0;
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = 0;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            ALOGW("Free video handle %d %d", evt, m_state);
+            QCameraVideoMemory::closeNativeHandle((const void *)payload);
+        }
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEWING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewingState(qcamera_sm_evt_enum_t evt,
+                                                    void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // need restart preview for parameters to take effect
+                    // stop preview
+                    m_parent->stopPreview();
+                    // Clear memory pools
+                    m_parent->m_memoryPool.clear();
+                    // commit parameter changes to server
+                    m_parent->commitParameterChanges();
+                    // start preview again
+                    rc = m_parent->preparePreview();
+                    if (rc == NO_ERROR) {
+                        rc = m_parent->startPreview();
+                        if (rc != NO_ERROR) {
+                            m_parent->unpreparePreview();
+                        }
+                    }
+                    if (rc != NO_ERROR) {
+                        m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                    }
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            // no ops here
+            CDBG_HIGH("%s: Already in previewing, no ops here to start preview", __func__);
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->stopPreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            rc = m_parent->startRecording();
+            if (rc == NO_ERROR) {
+                // move state to recording state
+                m_state = QCAMERA_SM_STATE_RECORDING;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+            rc = m_parent->prepareHardwareForSnapshot(FALSE);
+            if (rc == NO_ERROR) {
+                // Do not signal API result in this case.
+                // Need to wait for snapshot done in metadta.
+                m_state = QCAMERA_SM_STATE_PREPARE_SNAPSHOT;
+            } else {
+                // Do not change state in this case.
+                ALOGE("%s: prepareHardwareForSnapshot failed %d",
+                    __func__, rc);
+
+                result.status = rc;
+                result.request_api = evt;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                m_parent->signalAPIResult(&result);
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+       {
+           if ( m_parent->mParameters.getRecordingHintValue() == true) {
+                m_parent->stopPreview();
+                m_parent->mParameters.updateRecordingHintValue(FALSE);
+                // start preview again
+                rc = m_parent->preparePreview();
+                if (rc == NO_ERROR) {
+                    rc = m_parent->startPreview();
+                    if (rc != NO_ERROR) {
+                        m_parent->unpreparePreview();
+                    }
+                }
+           }
+           if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+               m_state = QCAMERA_SM_STATE_PREVIEW_PIC_TAKING;
+               rc = m_parent->takePicture();
+               if (rc != NO_ERROR) {
+                   // move state to previewing state
+                   m_state = QCAMERA_SM_STATE_PREVIEWING;
+               }
+           } else {
+               m_state = QCAMERA_SM_STATE_PIC_TAKING;
+               rc = m_parent->takePicture();
+               if (rc != NO_ERROR) {
+                   // move state to preview stopped state
+                   m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+               }
+           }
+
+           result.status = rc;
+           result.request_api = evt;
+           result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+           m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+#ifndef VANILLA_HAL
+            if (CAMERA_CMD_LONGSHOT_ON == cmd_payload->cmd) {
+                if (QCAMERA_SM_EVT_RESTART_PERVIEW == cmd_payload->arg1) {
+                    m_parent->stopPreview();
+                    // Clear memory pools
+                    m_parent->m_memoryPool.clear();
+                    // start preview again
+                    rc = m_parent->preparePreview();
+                    if (rc == NO_ERROR) {
+                        rc = m_parent->startPreview();
+                        if (rc != NO_ERROR) {
+                            m_parent->unpreparePreview();
+                        }
+                    }
+                }
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            ALOGW("Free video handle %d %d", evt, m_state);
+            QCameraVideoMemory::closeNativeHandle((const void *)payload);
+        }
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->processAWBUpdate(internal_evt->awb_data);
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, internal_evt->evt_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                CDBG_HIGH("%s: no handling for server evt (%d) at this state",
+                      __func__, cam_evt->server_event_type);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *((qcamera_thermal_level_enum_t *)payload));
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPrepareSnapshotState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREPARE_SNAPSHOT.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt,
+                                                    void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+    case QCAMERA_SM_EVT_SET_PARAMS:
+    case QCAMERA_SM_EVT_GET_PARAMS:
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+    case QCAMERA_SM_EVT_DUMP:
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                CDBG("%s: Received QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE event",
+                    __func__);
+                m_parent->processPrepSnapshotDoneEvent(internal_evt->prep_snapshot_state);
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+                result.status = NO_ERROR;
+                result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                m_parent->signalAPIResult(&result);
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->processAWBUpdate(internal_evt->awb_data);
+                break;
+
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, internal_evt->evt_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                   void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                rc = m_parent->commitParameterChanges();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            // cancel picture first
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+#ifndef VANILLA_HAL
+            if ( CAMERA_CMD_LONGSHOT_OFF == cmd_payload->cmd ) {
+                // move state to previewing state
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+           if ( m_parent->isLongshotEnabled() ) {
+               rc = m_parent->longShot();
+            } else {
+                ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+                rc = INVALID_OPERATION;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->processAWBUpdate(internal_evt->awb_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_REPROCESS_STAGE_DONE:
+                {
+                    if ( m_parent->isLongshotEnabled() ) {
+                        if(!m_parent->m_postprocessor.getMultipleStages()) {
+                            m_parent->m_postprocessor.setMultipleStages(true);
+                        }
+                        m_parent->playShutter();
+                    }
+                }
+                break;
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                CDBG_HIGH("%s: no handling for server evt (%d) at this state",
+                      __func__, cam_evt->server_event_type);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL:
+        {
+            bool restartPreview = m_parent->isPreviewRestartEnabled();
+            rc = m_parent->stopCaptureChannel(restartPreview);
+
+            if (restartPreview && (NO_ERROR == rc)) {
+                rc = m_parent->preparePreview();
+                if (NO_ERROR == rc) {
+                    m_parent->m_bPreviewStarted = true;
+                    rc = m_parent->startPreview();
+                }
+            }
+
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            rc = m_parent->cancelPicture();
+
+            bool restartPreview = m_parent->isPreviewRestartEnabled();
+            if (restartPreview) {
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            } else {
+                m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *((qcamera_thermal_level_enum_t *)payload));
+        }
+        break;
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtRecordingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_RECORDING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtRecordingState(qcamera_sm_evt_enum_t evt,
+                                                   void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // WA: CTS test VideoSnapshot will try to
+            //     start preview during video recording.
+            CDBG_HIGH("CTS video restart op");
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // cannot set parameters that requires restart during recording
+                    ALOGE("%s: Cannot set parameters that requires restart during recording",
+                          __func__);
+                    rc = BAD_VALUE;
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+            rc = m_parent->takeLiveSnapshot();
+            if (rc != NO_ERROR) {
+                m_state = QCAMERA_SM_STATE_RECORDING;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            // no ops here
+            CDBG_HIGH("%s: already in recording state, no ops for start_recording", __func__);
+            rc = 0;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+        {
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+            rc = m_parent->stopPreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+            //In Video snapshot, prepare hardware is a no-op.
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->processAWBUpdate(internal_evt->awb_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *((qcamera_thermal_level_enum_t *)payload));
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtVideoPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_VIDEO_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                        void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // cannot set parameters that requires restart during recording
+                    ALOGE("%s: Cannot set parameters that requires restart during recording",
+                          __func__);
+                    rc = BAD_VALUE;
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+            rc = m_parent->stopPreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->processAWBUpdate(internal_evt->awb_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *((qcamera_thermal_level_enum_t *)payload));
+        }
+        break;
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                          void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // need restart preview for parameters to take effect
+                    // stop preview
+                    m_parent->stopPreview();
+                    // Clear memory pools
+                    m_parent->m_memoryPool.clear();
+                    // commit parameter changes to server
+                    m_parent->commitParameterChanges();
+                    // start preview again
+                    rc = m_parent->preparePreview();
+                    if (rc == NO_ERROR) {
+                        rc = m_parent->startPreview();
+                        if (rc != NO_ERROR) {
+                            m_parent->unpreparePreview();
+                        }
+                    }
+                    if (rc != NO_ERROR) {
+                        m_state = QCAMERA_SM_STATE_PIC_TAKING;
+                    }
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+#ifndef VANILLA_HAL
+            if ( CAMERA_CMD_LONGSHOT_OFF == cmd_payload->cmd ) {
+                // move state to previewing state
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+            }
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            if (m_parent->isZSLMode()) {
+                // cancel picture first
+                rc = m_parent->cancelPicture();
+                m_parent->stopChannel(QCAMERA_CH_TYPE_ZSL);
+            } else if (m_parent->isLongshotEnabled()) {
+                // just cancel picture
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+                m_parent->stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+            }
+            // unprepare preview
+            m_parent->unpreparePreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            if (m_parent->isZSLMode()) {
+                ALOGE("%s: cannot handle evt(%d) in state(%d) in ZSL mode",
+                      __func__, evt, m_state);
+                rc = INVALID_OPERATION;
+            } else if (m_parent->isLongshotEnabled()) {
+                ALOGE("%s: cannot handle evt(%d) in state(%d) in Longshot mode",
+                      __func__, evt, m_state);
+                rc = INVALID_OPERATION;
+            } else {
+                rc = m_parent->startRecording();
+                if (rc == NO_ERROR) {
+                    m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            if ( m_parent->isLongshotEnabled() ) {
+               rc = m_parent->longShot();
+            } else {
+                ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+                rc = INVALID_OPERATION;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->processAWBUpdate(internal_evt->awb_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_REPROCESS_STAGE_DONE:
+                {
+                    if ( m_parent->isLongshotEnabled() ) {
+                        if(!m_parent->m_postprocessor.getMultipleStages()) {
+                            m_parent->m_postprocessor.setMultipleStages(true);
+                        }
+                        m_parent->playShutter();
+                    }
+                }
+                break;
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+            }
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *((qcamera_thermal_level_enum_t *)payload));
+        }
+        break;
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewRunning
+ *
+ * DESCRIPTION: check if preview is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- preview running
+ *              false -- preview stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEWING:
+    case QCAMERA_SM_STATE_RECORDING:
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        return true;
+    default:
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewReady
+ *
+ * DESCRIPTION: check if preview is in ready state.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- preview is in ready state
+ *              false -- preview is stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewReady()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        return true;
+    default:
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isCaptureRunning
+ *
+ * DESCRIPTION: check if image capture is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- capture running
+ *              false -- capture stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isCaptureRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PIC_TAKING:
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+        return true;
+    default:
+        return false;
+    }
+}
+/*===========================================================================
+ * FUNCTION   : isNonZSLCaptureRunning
+ *
+ * DESCRIPTION: check if image capture is in process in non ZSL mode.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- capture running in non ZSL mode
+ *              false -- Either in not capture mode or captur is not in non ZSL mode
+ *==========================================================================*/
+bool QCameraStateMachine::isNonZSLCaptureRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PIC_TAKING:
+        return true;
+    default:
+        return false;
+    }
+}
+
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraStateMachine.h b/msm8974/QCamera2/HAL/QCameraStateMachine.h
new file mode 100644
index 0000000..bc31be4
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraStateMachine.h
@@ -0,0 +1,228 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STATEMACHINE_H__
+#define __QCAMERA_STATEMACHINE_H__
+
+#include <pthread.h>
+
+#include <cam_semaphore.h>
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+#include "QCameraQueue.h"
+#include "QCameraChannel.h"
+
+namespace qcamera {
+
+class QCamera2HardwareInterface;
+
+typedef enum {
+    /*******BEGIN OF: API EVT*********/
+    QCAMERA_SM_EVT_SET_PREVIEW_WINDOW = 1,   // set preview window
+    QCAMERA_SM_EVT_SET_CALLBACKS,            // set callbacks
+    QCAMERA_SM_EVT_ENABLE_MSG_TYPE,          // enable msg type
+    QCAMERA_SM_EVT_DISABLE_MSG_TYPE,         // disable msg type
+    QCAMERA_SM_EVT_MSG_TYPE_ENABLED,         // query certain msg type is enabled
+
+    QCAMERA_SM_EVT_SET_PARAMS,               // set parameters
+    QCAMERA_SM_EVT_GET_PARAMS,               // get parameters
+    QCAMERA_SM_EVT_PUT_PARAMS,               // put parameters, release param buf
+
+    QCAMERA_SM_EVT_START_PREVIEW,            // start preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW,  // start no display preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_STOP_PREVIEW,             // stop preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_PREVIEW_ENABLED,          // query if preview is running
+
+    QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS,   // request to store meta data in video buffers
+    QCAMERA_SM_EVT_START_RECORDING,          // start recording
+    QCAMERA_SM_EVT_STOP_RECORDING,           // stop recording
+    QCAMERA_SM_EVT_RECORDING_ENABLED,        // query if recording is running
+    QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME,  // release recording frame
+
+    QCAMERA_SM_EVT_PREPARE_SNAPSHOT,         // prepare snapshot in case LED needs to be flashed
+    QCAMERA_SM_EVT_TAKE_PICTURE,             // take picutre (zsl, regualr capture, live snapshot
+    QCAMERA_SM_EVT_CANCEL_PICTURE,           // cancel picture
+
+    QCAMERA_SM_EVT_START_AUTO_FOCUS,         // start auto focus
+    QCAMERA_SM_EVT_STOP_AUTO_FOCUS,          // stop auto focus
+    QCAMERA_SM_EVT_SEND_COMMAND,             // send command
+
+    QCAMERA_SM_EVT_RELEASE,                  // release camera resource
+    QCAMERA_SM_EVT_DUMP,                     // dump
+    QCAMERA_SM_EVT_REG_FACE_IMAGE,           // register a face image in imaging lib
+    /*******END OF: API EVT*********/
+
+    QCAMERA_SM_EVT_EVT_INTERNAL,             // internal evt notify
+    QCAMERA_SM_EVT_EVT_NOTIFY,               // evt notify from server
+    QCAMERA_SM_EVT_JPEG_EVT_NOTIFY,          // evt notify from jpeg
+    QCAMERA_SM_EVT_SNAPSHOT_DONE,            // internal evt that snapshot is done
+    QCAMERA_SM_EVT_THERMAL_NOTIFY,           // evt notify from thermal daemon
+    QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,     // stop capture channel
+    QCAMERA_SM_EVT_RESTART_PERVIEW,          // internal preview restart
+    QCAMERA_SM_EVT_MAX
+} qcamera_sm_evt_enum_t;
+
+typedef enum {
+    QCAMERA_API_RESULT_TYPE_DEF,             // default type, no additional info
+    QCAMERA_API_RESULT_TYPE_ENABLE_FLAG,     // msg_enabled, preview_enabled, recording_enabled
+    QCAMERA_API_RESULT_TYPE_PARAMS,          // returned parameters in string
+    QCAMERA_API_RESULT_TYPE_HANDLE,          // returned handle in int
+    QCAMERA_API_RESULT_TYPE_MAX
+} qcamera_api_result_type_t;
+
+typedef struct {
+    int32_t status;                          // api call status
+    qcamera_sm_evt_enum_t request_api;       // api evt requested
+    qcamera_api_result_type_t result_type;   // result type
+    union {
+        int enabled;                          // result_type == QCAMERA_API_RESULT_TYPE_ENABLE_FLAG
+        char *params;                         // result_type == QCAMERA_API_RESULT_TYPE_PARAMS
+        int handle;                           // result_type ==QCAMERA_API_RESULT_TYPE_HANDLE
+    };
+} qcamera_api_result_t;
+
+typedef struct api_result_list {
+   qcamera_api_result_t result;
+   struct api_result_list *next;
+}api_result_list;
+
+// definition for payload type of setting callback
+typedef struct {
+    camera_notify_callback notify_cb;
+    camera_data_callback data_cb;
+    camera_data_timestamp_callback data_cb_timestamp;
+    camera_request_memory get_memory;
+    void *user;
+} qcamera_sm_evt_setcb_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+    int32_t cmd;
+    int32_t arg1;
+    int32_t arg2;
+} qcamera_sm_evt_command_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+    void *img_ptr;
+    cam_pp_offline_src_config_t *config;
+} qcamera_sm_evt_reg_face_payload_t;
+
+typedef enum {
+    QCAMERA_INTERNAL_EVT_FOCUS_UPDATE,       // focus updating result
+    QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE, // prepare snapshot done
+    QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT, // face detection result
+    QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS,    // histogram
+    QCAMERA_INTERNAL_EVT_CROP_INFO,          // crop info
+    QCAMERA_INTERNAL_EVT_ASD_UPDATE,         // asd update result
+    QCAMERA_INTERNAL_EVT_AWB_UPDATE,         // awb update result
+    QCAMERA_INTERNAL_EVT_MAX
+} qcamera_internal_evt_type_t;
+
+typedef struct {
+    qcamera_internal_evt_type_t evt_type;
+    union {
+        cam_auto_focus_data_t focus_data;
+        cam_prep_snapshot_state_t prep_snapshot_state;
+        cam_face_detection_data_t faces_data;
+        cam_hist_stats_t stats_data;
+        cam_crop_data_t crop_data;
+        cam_auto_scene_t asd_data;
+        cam_awb_params_t awb_data;
+    };
+} qcamera_sm_internal_evt_payload_t;
+
+class QCameraStateMachine
+{
+public:
+    QCameraStateMachine(QCamera2HardwareInterface *ctrl);
+    virtual ~QCameraStateMachine();
+    int32_t procAPI(qcamera_sm_evt_enum_t evt, void *api_payload);
+    int32_t procEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+
+    bool isPreviewRunning(); // check if preview is running
+    bool isPreviewReady(); // check if preview is ready
+    bool isCaptureRunning(); // check if image capture is running
+    bool isNonZSLCaptureRunning(); // check if image capture is running in non ZSL mode
+    void releaseThread();
+
+private:
+    typedef enum {
+        QCAMERA_SM_STATE_PREVIEW_STOPPED,          // preview is stopped
+        QCAMERA_SM_STATE_PREVIEW_READY,            // preview started but preview window is not set yet
+        QCAMERA_SM_STATE_PREVIEWING,               // previewing
+        QCAMERA_SM_STATE_PREPARE_SNAPSHOT,         // prepare snapshot in case aec estimation is
+                                                   // needed for LED flash
+        QCAMERA_SM_STATE_PIC_TAKING,               // taking picture (preview stopped)
+        QCAMERA_SM_STATE_RECORDING,                // recording (preview running)
+        QCAMERA_SM_STATE_VIDEO_PIC_TAKING,         // taking live snapshot during recording (preview running)
+        QCAMERA_SM_STATE_PREVIEW_PIC_TAKING        // taking ZSL/live snapshot (recording stopped but preview running)
+    } qcamera_state_enum_t;
+
+    typedef enum
+    {
+        QCAMERA_SM_CMD_TYPE_API,                   // cmd from API
+        QCAMERA_SM_CMD_TYPE_EVT,                   // cmd from mm-camera-interface/mm-jpeg-interface event
+        QCAMERA_SM_CMD_TYPE_EXIT,                  // cmd for exiting statemachine cmdThread
+        QCAMERA_SM_CMD_TYPE_MAX
+    } qcamera_sm_cmd_type_t;
+
+    typedef struct {
+        qcamera_sm_cmd_type_t cmd;                  // cmd type (where it comes from)
+        qcamera_sm_evt_enum_t evt;                  // event type
+        void *evt_payload;                          // ptr to payload
+    } qcamera_sm_cmd_t;
+
+    int32_t stateMachine(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtRecordingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+
+    // main statemachine process routine
+    static void *smEvtProcRoutine(void *data);
+
+    QCamera2HardwareInterface *m_parent;  // ptr to HWI
+    qcamera_state_enum_t m_state;         // statemachine state
+    QCameraQueue api_queue;               // cmd queue for APIs
+    QCameraQueue evt_queue;               // cmd queue for evt from mm-camera-intf/mm-jpeg-intf
+    pthread_t cmd_pid;                    // cmd thread ID
+    cam_semaphore_t cmd_sem;              // semaphore for cmd thread
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STATEMACHINE_H__ */
diff --git a/msm8974/QCamera2/HAL/QCameraStream.cpp b/msm8974/QCamera2/HAL/QCameraStream.cpp
new file mode 100644
index 0000000..71d04e2
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraStream.cpp
@@ -0,0 +1,1730 @@
+/* Copyright (c) 2012-2016, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStream"
+
+#include <utils/Errors.h>
+#include "QCamera2HWI.h"
+#include "QCameraStream.h"
+
+// Media dependencies
+#ifdef USE_MEDIA_EXTENSIONS
+#include <media/hardware/HardwareAPI.h>
+typedef struct VideoNativeHandleMetadata media_metadata_buffer;
+#else
+#include "QComOMXMetadata.h"
+typedef struct encoder_media_buffer_type media_metadata_buffer;
+#endif
+
+
+#define CAMERA_MIN_ALLOCATED_BUFFERS     3
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : get_bufs
+ *
+ * DESCRIPTION: static function entry to allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("getBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->getBufs(offset, num_bufs, initial_reg_flag, bufs, ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : get_bufs_deffered
+ *
+ * DESCRIPTION: static function entry to allocate deffered stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs_deffered(
+        cam_frame_len_offset_t * /* offset */,
+        uint8_t *num_bufs,
+        uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs,
+        mm_camera_map_unmap_ops_tbl_t * /* ops_tbl */,
+        void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream || !stream->mRegFlags || !stream->mBufDefs) {
+        ALOGE("getBufs invalid stream pointer or mRegFlags/mBufDefs==NULL");
+        return NO_MEMORY;
+    }
+
+    *initial_reg_flag   = stream->mRegFlags;
+    *num_bufs           = stream->mNumBufs;
+    *bufs               = stream->mBufDefs;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs
+ *
+ * DESCRIPTION: static function entry to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs(
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+        void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("putBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->putBufs(ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs_deffered
+ *
+ * DESCRIPTION: static function entry to deallocate deffered stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs_deffered(
+                     mm_camera_map_unmap_ops_tbl_t * /*ops_tbl */,
+                     void * /*user_data*/ )
+{
+    // No op
+    // Used for handling buffers with deffered allocation. They are freed separately.
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidate_buf
+ *
+ * DESCRIPTION: static function entry to invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidate_buf(uint32_t index, void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->invalidateBuf(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : clean_invalidate_buf
+ *
+ * DESCRIPTION: static function entry to clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to clean invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::clean_invalidate_buf(uint32_t index, void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->cleanInvalidateBuf(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStream
+ *
+ * DESCRIPTION: constructor of QCameraStream
+ *
+ * PARAMETERS :
+ *   @allocator  : memory allocator obj
+ *   @camHandle  : camera handle
+ *   @chId       : channel handle
+ *   @camOps     : ptr to camera ops table
+ *   @paddingInfo: ptr to padding info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraStream::QCameraStream(QCameraAllocator &allocator,
+                             uint32_t camHandle,
+                             uint32_t chId,
+                             mm_camera_ops_t *camOps,
+                             cam_padding_info_t *paddingInfo,
+                             bool deffered) :
+        mDumpFrame(0),
+        mDumpMetaFrame(0),
+        mDumpSkipCnt(0),
+        mCamHandle(camHandle),
+        mChannelHandle(chId),
+        mHandle(0),
+        mCamOps(camOps),
+        mStreamInfo(NULL),
+        mNumBufs(0),
+        mNumBufsNeedAlloc(0),
+        mRegFlags(NULL),
+        mDataCB(NULL),
+        mUserData(NULL),
+        mDataQ(releaseFrameData, this),
+        mStreamInfoBuf(NULL),
+        mStreamBufs(NULL),
+        mAllocator(allocator),
+        mBufDefs(NULL),
+        mStreamBufsAcquired(false),
+        m_bActive(false),
+        mDynBufAlloc(false),
+        mBufAllocPid(0),
+        mDefferedAllocation(deffered),
+        wait_for_cond(false)
+{
+    mMemVtbl.user_data = this;
+    if ( !deffered ) {
+        mMemVtbl.get_bufs = get_bufs;
+        mMemVtbl.put_bufs = put_bufs;
+    } else {
+        mMemVtbl.get_bufs = get_bufs_deffered;
+        mMemVtbl.put_bufs = put_bufs_deffered;
+    }
+    mMemVtbl.invalidate_buf = invalidate_buf;
+    mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
+    memset(&mCropInfo, 0, sizeof(cam_rect_t));
+    memset(&m_MemOpsTbl, 0, sizeof(mm_camera_map_unmap_ops_tbl_t));
+    memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
+    memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
+    pthread_mutex_init(&mCropLock, NULL);
+    pthread_mutex_init(&mParameterLock, NULL);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStream
+ *
+ * DESCRIPTION: deconstructor of QCameraStream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraStream::~QCameraStream()
+{
+    pthread_mutex_destroy(&mCropLock);
+    pthread_mutex_destroy(&mParameterLock);
+
+    if (mDefferedAllocation) {
+        mStreamBufsAcquired = false;
+        releaseBuffs();
+    }
+
+    unmapStreamInfoBuf();
+    releaseStreamInfoBuf();
+
+    // delete stream
+    if (mHandle > 0) {
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+        mHandle = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapStreamInfoBuf
+ *
+ * DESCRIPTION: Unmap stream info buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapStreamInfoBuf()
+{
+    int rc = NO_ERROR;
+
+    if (mStreamInfoBuf != NULL) {
+        rc = mCamOps->unmap_stream_buf(mCamHandle,
+            mChannelHandle,
+            mHandle,
+            CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+            0,
+            -1);
+
+        if (rc < 0) {
+            ALOGE("Failed to unmap stream info buffer");
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseStreamInfoBuf
+ *
+ * DESCRIPTION: Release stream info buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseStreamInfoBuf()
+{
+    int rc = NO_ERROR;
+
+    if (mStreamInfoBuf != NULL) {
+        mStreamInfoBuf->deallocate();
+        delete mStreamInfoBuf;
+        mStreamInfoBuf = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deleteStream
+ *
+ * DESCRIPTION: Deletes a camera stream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraStream::deleteStream()
+{
+    if (mHandle > 0) {
+        acquireStreamBufs();
+        releaseBuffs();
+        unmapStreamInfoBuf();
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize stream obj
+ *
+ * PARAMETERS :
+ *   @streamInfoBuf: ptr to buf that contains stream info
+ *   @stream_cb    : stream data notify callback. Can be NULL if not needed
+ *   @userdata     : user data ptr
+ *   @bDynallocBuf : flag to indicate if buffer allocation can be in 2 steps
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::init(QCameraHeapMemory *streamInfoBuf,
+                            uint8_t minNumBuffers,
+                            stream_cb_routine stream_cb,
+                            void *userdata,
+                            bool bDynallocBuf)
+{
+    int32_t rc = OK;
+    ssize_t bufSize = BAD_INDEX;
+
+    mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
+    if (!mHandle) {
+        ALOGE("add_stream failed");
+        rc = UNKNOWN_ERROR;
+        goto done;
+    }
+
+    // assign and map stream info memory
+    mStreamInfoBuf = streamInfoBuf;
+    mStreamInfo = reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
+    mNumBufs = minNumBuffers;
+
+    bufSize = mStreamInfoBuf->getSize(0);
+    if (BAD_INDEX != bufSize) {
+        rc = mCamOps->map_stream_buf(mCamHandle,
+                mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                0, -1, mStreamInfoBuf->getFd(0), (uint32_t)bufSize);
+        if (rc < 0) {
+            ALOGE("Failed to map stream info buffer");
+            goto err1;
+        }
+    } else {
+        ALOGE("Failed to retrieve buffer size (bad index)");
+        goto err1;
+    }
+
+    // Calculate buffer size for deffered allocation
+    if (mDefferedAllocation) {
+        rc = calcOffset(mStreamInfo);
+        if (rc < 0) {
+            ALOGE("%s : Failed to calculate stream offset", __func__);
+            goto err1;
+        }
+    } else {
+        rc = configStream();
+        if (rc < 0) {
+            ALOGE("%s : Failed to config stream ", __func__);
+            goto err1;
+        }
+    }
+
+    mDataCB = stream_cb;
+    mUserData = userdata;
+    mDynBufAlloc = bDynallocBuf;
+    return 0;
+
+err1:
+    mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    mHandle = 0;
+    mStreamInfoBuf = NULL;
+    mStreamInfo = NULL;
+    mNumBufs = 0;
+done:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcOffset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @streamInfo  : stream information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t QCameraStream::calcOffset(cam_stream_info_t *streamInfo)
+{
+    int32_t rc = 0;
+
+    cam_dimension_t dim = streamInfo->dim;
+    if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_CPP &&
+            streamInfo->stream_type != CAM_STREAM_TYPE_VIDEO) {
+        if (streamInfo->pp_config.rotation == ROTATE_90 ||
+                streamInfo->pp_config.rotation == ROTATE_270) {
+            // rotated by 90 or 270, need to switch width and height
+            dim.width = streamInfo->dim.height;
+            dim.height = streamInfo->dim.width;
+        }
+    }
+
+    switch (streamInfo->stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        rc = mm_stream_calc_offset_preview(streamInfo->fmt,
+                &dim,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        rc = mm_stream_calc_offset_post_view(streamInfo->fmt,
+                &dim,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        rc = mm_stream_calc_offset_snapshot(streamInfo,
+                &dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        rc = mm_stream_calc_offset_postproc(streamInfo,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(&dim,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(streamInfo->fmt,
+                &dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    default:
+        ALOGE("%s: not supported for stream type %d",
+                __func__, streamInfo->stream_type);
+        rc = -1;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start stream. Will start main stream thread to handle stream
+ *              related ops.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::start()
+{
+    int32_t rc = 0;
+    rc = mProcTh.launch(dataProcRoutine, this);
+    if (rc == NO_ERROR) {
+        m_bActive = true;
+    }
+    pthread_mutex_init(&m_lock, NULL);
+    pthread_cond_init(&m_cond, NULL);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop stream. Will stop main stream thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::stop()
+{
+    int32_t rc = 0;
+    m_bActive = false;
+    rc = mProcTh.exit();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : syncRuntimeParams
+ *
+ * DESCRIPTION: query and sync runtime parameters like output crop
+ *              buffer info etc.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::syncRuntimeParams()
+{
+    int32_t ret = NO_ERROR;
+
+    memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
+    m_OutputCrop.type = CAM_STREAM_PARAM_TYPE_GET_OUTPUT_CROP;
+
+    ret = getParameter(m_OutputCrop);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: stream getParameter for output crop failed", __func__);
+        return ret;
+    }
+
+    memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
+    m_ImgProp.type = CAM_STREAM_PARAM_TYPE_GET_IMG_PROP;
+
+    ret = getParameter(m_ImgProp);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: stream getParameter for image prop failed", __func__);
+        return ret;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ *   @previewWindoe : preview window ops table to set preview crop window
+ *   @crop_info     : crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processZoomDone(preview_stream_ops_t *previewWindow,
+                                       cam_crop_data_t &crop_info)
+{
+    int32_t rc = 0;
+
+    if (!m_bActive) {
+        CDBG("%s : Stream not active", __func__);
+        return NO_ERROR;
+    }
+
+    // get stream param for crop info
+    for (int i = 0; i < crop_info.num_of_streams; i++) {
+        if (crop_info.crop_info[i].stream_id == mStreamInfo->stream_svr_id) {
+            pthread_mutex_lock(&mCropLock);
+            mCropInfo = crop_info.crop_info[i].crop;
+            pthread_mutex_unlock(&mCropLock);
+
+            // update preview window crop if it's preview/postview stream
+            if ( (previewWindow != NULL) &&
+                 (mStreamInfo->stream_type == CAM_STREAM_TYPE_PREVIEW ||
+                  mStreamInfo->stream_type == CAM_STREAM_TYPE_POSTVIEW) ) {
+                rc = previewWindow->set_crop(previewWindow,
+                                             mCropInfo.left,
+                                             mCropInfo.top,
+                                             mCropInfo.width,
+                                             mCropInfo.height);
+            }
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processDataNotify
+ *
+ * DESCRIPTION: process stream data notify
+ *
+ * PARAMETERS :
+ *   @frame   : stream frame received
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processDataNotify(mm_camera_super_buf_t *frame)
+{
+    CDBG("%s:\n", __func__);
+    if (m_bActive) {
+        mDataQ.enqueue((void *)frame);
+        return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        CDBG("%s: Stream thread is not active, no ops here", __func__);
+        bufDone(frame->bufs[0]->buf_idx);
+        free(frame);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: callback for data notify. This function is registered with
+ *              mm-camera-interface to handle data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    CDBG("%s:\n", __func__);
+    QCameraStream* stream = (QCameraStream *)userdata;
+    if (stream == NULL ||
+        recvd_frame == NULL ||
+        recvd_frame->bufs[0] == NULL ||
+        recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+        ALOGE("%s: Not a valid stream to handle buf", __func__);
+        return;
+    }
+
+    mm_camera_super_buf_t *frame =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: No mem for mm_camera_buf_def_t", __func__);
+        stream->bufDone(recvd_frame->bufs[0]->buf_idx);
+        return;
+    }
+    *frame = *recvd_frame;
+    stream->processDataNotify(frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcRoutine
+ *
+ * DESCRIPTION: function to process data in the main stream thread
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStream::dataProcRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    QCameraStream *pme = (QCameraStream *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+    cmdThread->setName("CAM_strmDatProc");
+
+    CDBG("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                      __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                CDBG("%s: Do next job", __func__);
+                mm_camera_super_buf_t *frame =
+                    (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
+                if (NULL != frame) {
+                    if (pme->mDataCB != NULL) {
+                        pme->mDataCB(frame, pme, pme->mUserData);
+                    } else {
+                        // no data cb routine, return buf here
+                        pme->bufDone(frame->bufs[0]->buf_idx);
+                        free(frame);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            CDBG("%s: Exit", __func__);
+            /* flush data buf queue */
+            pme->mDataQ.flush();
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    CDBG("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(uint32_t index)
+{
+    int32_t rc = NO_ERROR;
+
+    if (index >= mNumBufs || mBufDefs == NULL)
+        return BAD_INDEX;
+
+    rc = mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);
+    if (rc < 0)
+        return rc;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @opaque    : stream frame/metadata buf to be returned
+ *   @isMetaData: flag if returned opaque is a metadatabuf or the real frame ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(const void *opaque, bool isMetaData)
+{
+    int32_t rc = NO_ERROR;
+    int index = -1;
+
+    QCameraVideoMemory *lVideoMem = NULL;
+    if (mStreamInfo != NULL &&
+        mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+            index = mStreamBatchBufs->getMatchBufIndex(opaque, TRUE);
+            lVideoMem = (QCameraVideoMemory *)mStreamBatchBufs;
+            if (index == -1 || index >= mNumBufs || mBufDefs == NULL) {
+                ALOGE("%s: Cannot find buf for opaque data = %p", __func__, opaque);
+                return BAD_INDEX;
+            }
+            camera_memory_t *video_mem = mStreamBatchBufs->getMemory(index, true);
+            if (video_mem != NULL) {
+                media_metadata_buffer * packet =
+                    (media_metadata_buffer *)video_mem->data;
+                native_handle_t *nh = const_cast<native_handle_t *>(packet->pHandle);
+                if (NULL != nh) {
+                    if (native_handle_delete(nh)) {
+                        ALOGE("%s: Unable to delete native handle", __func__);
+                    }
+                } else {
+                    ALOGE("%s : native handle not available", __func__);
+                }
+            }
+        } else {
+            index = mStreamBufs->getMatchBufIndex(opaque, isMetaData);
+            lVideoMem = (QCameraVideoMemory *)mStreamBufs;
+            if (index == -1 || index >= mNumBufs || mBufDefs == NULL) {
+                ALOGE("%s: Cannot find buf for opaque data = %p", __func__, opaque);
+                return BAD_INDEX;
+            }
+            CDBG("%s: Buffer Index = %d, Frame Idx = %d", __func__, index,
+                    mBufDefs[index].frame_idx);
+        }
+    //Close and delete duplicated native handle and FD's.
+    if (lVideoMem != NULL) {
+        rc = lVideoMem->closeNativeHandle(opaque, isMetaData);
+        if (rc != NO_ERROR) {
+            CDBG_HIGH("Invalid video metadata");
+            return rc;
+        }
+    } else {
+        CDBG_HIGH("Possible FD leak. Release recording called after stop");
+    }
+    rc = bufDone((uint32_t)index);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumQueuedBuf
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t QCameraStream::getNumQueuedBuf()
+{
+    int32_t rc = -1;
+    if (mHandle > 0) {
+        rc = mCamOps->get_queued_buf_count(mCamHandle, mChannelHandle, mHandle);
+    }
+    if (rc == -1) {
+        ALOGE("%s: stream is not in active state. Invalid operation", __func__);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufs
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getBufs(cam_frame_len_offset_t *offset,
+        uint8_t *num_bufs,
+        uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs,
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+
+    if (!ops_tbl) {
+        ALOGE("%s: ops_tbl is NULL", __func__);
+        return INVALID_OPERATION;
+    }
+
+    mFrameLenOffset = *offset;
+
+    uint8_t numBufAlloc = mNumBufs;
+    mNumBufsNeedAlloc = 0;
+    if (mDynBufAlloc) {
+        numBufAlloc = CAMERA_MIN_ALLOCATED_BUFFERS;
+        if (numBufAlloc > mNumBufs) {
+            mDynBufAlloc = false;
+            numBufAlloc = mNumBufs;
+        } else {
+            mNumBufsNeedAlloc = (uint8_t)(mNumBufs - numBufAlloc);
+        }
+    }
+
+    //Allocate and map stream info buffer
+    mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+                                               mFrameLenOffset.frame_len,
+                                               mFrameLenOffset.mp[0].stride,
+                                               mFrameLenOffset.mp[0].scanline,
+                                               numBufAlloc);
+    mNumBufs = (uint8_t)(numBufAlloc + mNumBufsNeedAlloc);
+
+    if (!mStreamBufs) {
+        ALOGE("%s: Failed to allocate stream buffers", __func__);
+        return NO_MEMORY;
+    }
+
+    for (uint32_t i = 0; i < numBufAlloc; i++) {
+        ssize_t bufSize = mStreamBufs->getSize(i);
+        if (BAD_INDEX != bufSize) {
+            rc = ops_tbl->map_ops(i, -1, mStreamBufs->getFd(i),
+                    (uint32_t)bufSize, ops_tbl->userdata);
+            if (rc < 0) {
+                ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+                for (uint32_t j = 0; j < i; j++) {
+                    ops_tbl->unmap_ops(j, -1, ops_tbl->userdata);
+                }
+                mStreamBufs->deallocate();
+                delete mStreamBufs;
+                mStreamBufs = NULL;
+                return INVALID_OPERATION;
+            }
+        } else {
+            ALOGE("Failed to retrieve buffer size (bad index)");
+            return INVALID_OPERATION;
+        }
+    }
+
+    //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!regFlags) {
+        ALOGE("%s: Out of memory", __func__);
+        for (uint32_t i = 0; i < numBufAlloc; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        return NO_MEMORY;
+    }
+    memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+    mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+    if (mBufDefs == NULL) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (uint32_t i = 0; i < numBufAlloc; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+    for (uint32_t i = 0; i < numBufAlloc; i++) {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+    }
+
+    rc = mStreamBufs->getRegFlags(regFlags);
+    if (rc < 0) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (uint32_t i = 0; i < numBufAlloc; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    *num_bufs = mNumBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBufDefs;
+
+    if (mNumBufsNeedAlloc > 0) {
+        pthread_mutex_lock(&m_lock);
+        wait_for_cond = TRUE;
+        pthread_mutex_unlock(&m_lock);
+        CDBG_HIGH("%s: Still need to allocate %d buffers",
+              __func__, mNumBufsNeedAlloc);
+        // remember memops table
+        m_MemOpsTbl = *ops_tbl;
+        // start another thread to allocate the rest of buffers
+        pthread_create(&mBufAllocPid,
+                       NULL,
+                       BufAllocRoutine,
+                       this);
+        pthread_setname_np(mBufAllocPid, "CAM_strmBufAlloc");
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateBuffers
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::allocateBuffers()
+{
+    int rc = NO_ERROR;
+
+    mFrameLenOffset = mStreamInfo->buf_planes.plane_info;
+
+    //Allocate and map stream info buffer
+    mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+            mFrameLenOffset.frame_len,
+            mFrameLenOffset.mp[0].stride,
+            mFrameLenOffset.mp[0].scanline,
+            mNumBufs);
+
+    if (!mStreamBufs) {
+        ALOGE("%s: Failed to allocate stream buffers", __func__);
+        return NO_MEMORY;
+    }
+
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        ssize_t bufSize = mStreamBufs->getSize(i);
+        if (BAD_INDEX != bufSize) {
+            rc = mapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1,
+                    mStreamBufs->getFd(i), (size_t)bufSize);
+            ALOGE_IF((rc < 0), "%s: map_stream_buf failed: %d", __func__, rc);
+        } else {
+            ALOGE("%s: Bad index %u", __func__, i);
+            rc = BAD_INDEX;
+        }
+        if (rc < 0) {
+            ALOGE("%s: Cleanup after error: %d", __func__, rc);
+            for (uint32_t j = 0; j < i; j++) {
+                unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1);
+            }
+            mStreamBufs->deallocate();
+            delete mStreamBufs;
+            mStreamBufs = NULL;
+            return INVALID_OPERATION;
+        }
+    }
+
+    //regFlags array is allocated by us,
+    // but consumed and freed by mm-camera-interface
+    mRegFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!mRegFlags) {
+        ALOGE("%s: Out of memory", __func__);
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        return NO_MEMORY;
+    }
+    memset(mRegFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+    size_t bufDefsSize = mNumBufs * sizeof(mm_camera_buf_def_t);
+    mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
+    if (mBufDefs == NULL) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mRegFlags);
+        mRegFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    memset(mBufDefs, 0, bufDefsSize);
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+    }
+
+    rc = mStreamBufs->getRegFlags(mRegFlags);
+    if (rc < 0) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(mRegFlags);
+        mRegFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : releaseBuffs
+ *
+ * DESCRIPTION: method to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseBuffs()
+{
+    int rc = NO_ERROR;
+
+    if (NULL != mBufDefs) {
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            rc = unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1);
+            if (rc < 0) {
+                ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+            }
+        }
+
+        // mBufDefs just keep a ptr to the buffer
+        // mm-camera-interface owns the buffer, so no need to free
+        mBufDefs = NULL;
+        memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    }
+    if ( !mStreamBufsAcquired && mStreamBufs != NULL) {
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+    }
+
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : BufAllocRoutine
+ *
+ * DESCRIPTION: function to allocate additional stream buffers
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStream::BufAllocRoutine(void *data)
+{
+    QCameraStream *pme = (QCameraStream *)data;
+    int32_t rc = NO_ERROR;
+
+    CDBG_HIGH("%s: E", __func__);
+    pme->cond_wait();
+    if (pme->mNumBufsNeedAlloc > 0) {
+        uint8_t numBufAlloc = (uint8_t)(pme->mNumBufs - pme->mNumBufsNeedAlloc);
+        rc = pme->mAllocator.allocateMoreStreamBuf(pme->mStreamBufs,
+                                                   pme->mFrameLenOffset.frame_len,
+                                                   pme->mNumBufsNeedAlloc);
+        if (rc == NO_ERROR){
+            for (uint32_t i = numBufAlloc; i < pme->mNumBufs; i++) {
+                ssize_t bufSize = pme->mStreamBufs->getSize(i);
+                if (BAD_INDEX != bufSize) {
+                    rc = pme->m_MemOpsTbl.map_ops(i, -1, pme->mStreamBufs->getFd(i),
+                            (uint32_t)bufSize, pme->m_MemOpsTbl.userdata);
+                    if (rc == 0) {
+                        pme->mStreamBufs->getBufDef(pme->mFrameLenOffset, pme->mBufDefs[i], i);
+                        pme->mCamOps->qbuf(pme->mCamHandle, pme->mChannelHandle,
+                                &pme->mBufDefs[i]);
+                    } else {
+                        ALOGE("%s: map_stream_buf %d failed: %d", __func__, rc, i);
+                    }
+                } else {
+                    ALOGE("Failed to retrieve buffer size (bad index)");
+                }
+            }
+
+            pme->mNumBufsNeedAlloc = 0;
+        }
+    }
+    CDBG_HIGH("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : cond_signal
+ *
+ * DESCRIPTION: signal if flag "wait_for_cond" is set
+ *
+ *==========================================================================*/
+void QCameraStream::cond_signal()
+{
+    pthread_mutex_lock(&m_lock);
+    if(wait_for_cond == TRUE){
+        wait_for_cond = FALSE;
+        pthread_cond_signal(&m_cond);
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : cond_wait
+ *
+ * DESCRIPTION: wait on if flag "wait_for_cond" is set
+ *
+ *==========================================================================*/
+void QCameraStream::cond_wait()
+{
+    pthread_mutex_lock(&m_lock);
+    while (wait_for_cond == TRUE) {
+        pthread_cond_wait(&m_cond, &m_lock);
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : putBufs
+ *
+ * DESCRIPTION: deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+
+    if (mBufAllocPid != 0) {
+        CDBG_HIGH("%s: wait for buf allocation thread dead", __func__);
+        pthread_join(mBufAllocPid, NULL);
+        mBufAllocPid = 0;
+        CDBG_HIGH("%s: return from buf allocation thread", __func__);
+    }
+
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        rc = ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        if (rc < 0) {
+            ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+        }
+    }
+    mBufDefs = NULL; // mBufDefs just keep a ptr to the buffer
+                     // mm-camera-interface own the buffer, so no need to free
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    if ( !mStreamBufsAcquired ) {
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidateBuf
+ *
+ * DESCRIPTION: invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidateBuf(uint32_t index)
+{
+    return mStreamBufs->invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanInvalidateBuf
+ *
+ * DESCRIPTION: clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to clean invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::cleanInvalidateBuf(uint32_t index)
+{
+    return mStreamBufs->cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : isTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the stream is of the queried type
+ *
+ * PARAMETERS :
+ *   @type    : stream type as of queried
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+bool QCameraStream::isTypeOf(cam_stream_type_t type)
+{
+    if (mStreamInfo != NULL && (mStreamInfo->stream_type == type)) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isOrignalTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the original stream is of the
+ *              queried type if it's reproc stream
+ *
+ * PARAMETERS :
+ *   @type    : stream type as of queried
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+bool QCameraStream::isOrignalTypeOf(cam_stream_type_t type)
+{
+    if (mStreamInfo != NULL &&
+        mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+        mStreamInfo->reprocess_config.pp_type == CAM_ONLINE_REPROCESS_TYPE &&
+        mStreamInfo->reprocess_config.online.input_stream_type == type) {
+        return true;
+    } else if (
+        mStreamInfo != NULL &&
+        mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+        mStreamInfo->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE &&
+        mStreamInfo->reprocess_config.offline.input_type == type) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyType
+ *
+ * DESCRIPTION: return stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyType()
+{
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_type;
+    } else {
+        return CAM_STREAM_TYPE_DEFAULT;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyOriginalType
+ *
+ * DESCRIPTION: return original stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyOriginalType()
+{
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->reprocess_config.online.input_stream_type;
+    } else {
+        return CAM_STREAM_TYPE_DEFAULT;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameOffset
+ *
+ * DESCRIPTION: query stream buffer frame offset info
+ *
+ * PARAMETERS :
+ *   @offset  : reference to struct to store the queried frame offset info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameOffset(cam_frame_len_offset_t &offset)
+{
+    offset = mFrameLenOffset;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCropInfo
+ *
+ * DESCRIPTION: query crop info of the stream
+ *
+ * PARAMETERS :
+ *   @crop    : reference to struct to store the queried crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getCropInfo(cam_rect_t &crop)
+{
+    pthread_mutex_lock(&mCropLock);
+    crop = mCropInfo;
+    pthread_mutex_unlock(&mCropLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCropInfo
+ *
+ * DESCRIPTION: set crop info of the stream
+ *
+ * PARAMETERS :
+ *   @crop    : struct to store new crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setCropInfo(cam_rect_t crop)
+{
+    pthread_mutex_lock(&mCropLock);
+    mCropInfo = crop;
+    pthread_mutex_unlock(&mCropLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameDimension
+ *
+ * DESCRIPTION: query stream frame dimension info
+ *
+ * PARAMETERS :
+ *   @dim     : reference to struct to store the queried frame dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameDimension(cam_dimension_t &dim)
+{
+    if (mStreamInfo != NULL) {
+        dim = mStreamInfo->dim;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFormat
+ *
+ * DESCRIPTION: query stream format
+ *
+ * PARAMETERS :
+ *   @fmt     : reference to stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFormat(cam_format_t &fmt)
+{
+    if (mStreamInfo != NULL) {
+        fmt = mStreamInfo->fmt;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyServerID
+ *
+ * DESCRIPTION: query server stream ID
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : stream ID from server
+ *==========================================================================*/
+uint32_t QCameraStream::getMyServerID() {
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_svr_id;
+    } else {
+        return 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : acquireStreamBufs
+ *
+ * DESCRIPTION: acquire stream buffers and postpone their release.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::acquireStreamBufs()
+{
+    mStreamBufsAcquired = true;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBuf
+ *
+ * DESCRIPTION: map stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *   @fd       : fd of the buffer
+ *   @size     : lenght of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBuf(uint8_t buf_type, uint32_t buf_idx,
+        int32_t plane_idx, int fd, size_t size)
+{
+    return mCamOps->map_stream_buf(mCamHandle, mChannelHandle,
+                                   mHandle, buf_type,
+                                   buf_idx, plane_idx,
+                                   fd, size);
+
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapBuf
+ *
+ * DESCRIPTION: unmap stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx)
+{
+    return mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle,
+                                     mHandle, buf_type,
+                                     buf_idx, plane_idx);
+
+}
+
+/*===========================================================================
+ * FUNCTION   : setParameter
+ *
+ * DESCRIPTION: set stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be set
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+    pthread_mutex_lock(&mParameterLock);
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->set_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    pthread_mutex_unlock(&mParameterLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getParameter
+ *
+ * DESCRIPTION: get stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be red
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+
+    if (!m_bActive) {
+        ALOGE("%s : Stream not stopped!", __func__);
+        return NO_INIT;
+    }
+
+    pthread_mutex_lock(&mParameterLock);
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->get_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    pthread_mutex_unlock(&mParameterLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrameData
+ *
+ * DESCRIPTION: callback function to release frame data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraStream::releaseFrameData(void *data, void *user_data)
+{
+    QCameraStream *pme = (QCameraStream *)user_data;
+    mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)data;
+    if (NULL != pme) {
+        pme->bufDone(frame->bufs[0]->buf_idx);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : configStream
+ *
+ * DESCRIPTION: send stream configuration to back end
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::configStream()
+{
+    int rc = NO_ERROR;
+
+    // Configure the stream
+    mm_camera_stream_config_t stream_config;
+    stream_config.stream_info = mStreamInfo;
+    stream_config.mem_vtbl = mMemVtbl;
+    stream_config.stream_cb = dataNotifyCB;
+    stream_config.padding_info = mPaddingInfo;
+    stream_config.userdata = this;
+    rc = mCamOps->config_stream(mCamHandle,
+                mChannelHandle, mHandle, &stream_config);
+    if (rc < 0) {
+        ALOGE("Failed to config stream, rc = %d", rc);
+        mCamOps->unmap_stream_buf(mCamHandle,
+                mChannelHandle,
+                mHandle,
+                CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                0,
+                -1);
+        return UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraStream.h b/msm8974/QCamera2/HAL/QCameraStream.h
new file mode 100644
index 0000000..dbe0c71
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraStream.h
@@ -0,0 +1,201 @@
+/* Copyright (c) 2012-2016, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STREAM_H__
+#define __QCAMERA_STREAM_H__
+
+#include <hardware/camera.h>
+#include "QCameraCmdThread.h"
+#include "QCameraMem.h"
+#include "QCameraAllocator.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCameraStream;
+typedef void (*stream_cb_routine)(mm_camera_super_buf_t *frame,
+                                  QCameraStream *stream,
+                                  void *userdata);
+
+class QCameraStream
+{
+public:
+    QCameraStream(QCameraAllocator &allocator,
+                  uint32_t camHandle,
+                  uint32_t chId,
+                  mm_camera_ops_t *camOps,
+                  cam_padding_info_t *paddingInfo,
+                  bool deffered = false);
+    virtual ~QCameraStream();
+    virtual int32_t init(QCameraHeapMemory *streamInfoBuf,
+                         uint8_t minStreamBufNum,
+                         stream_cb_routine stream_cb,
+                         void *userdata,
+                         bool bDynallocBuf);
+    virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+                                    cam_crop_data_t &crop_info);
+    virtual int32_t bufDone(uint32_t index);
+    virtual int32_t bufDone(const void *opaque, bool isMetaData);
+    virtual int32_t processDataNotify(mm_camera_super_buf_t *bufs);
+    virtual int32_t start();
+    virtual int32_t stop();
+
+    /* Used for deffered allocation of buffers */
+    virtual int32_t allocateBuffers();
+    virtual int32_t releaseBuffs();
+
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void *dataProcRoutine(void *data);
+    static void *BufAllocRoutine(void *data);
+    uint32_t getMyHandle() const {return mHandle;}
+    bool isTypeOf(cam_stream_type_t type);
+    bool isOrignalTypeOf(cam_stream_type_t type);
+    int32_t getFrameOffset(cam_frame_len_offset_t &offset);
+    int32_t getCropInfo(cam_rect_t &crop);
+    int32_t setCropInfo(cam_rect_t crop);
+    int32_t getFrameDimension(cam_dimension_t &dim);
+    int32_t getFormat(cam_format_t &fmt);
+    QCameraMemory *getStreamBufs() {return mStreamBufs;};
+    QCameraHeapMemory *getStreamInfoBuf() {return mStreamInfoBuf;};
+    uint32_t getMyServerID();
+    cam_stream_type_t getMyType();
+    cam_stream_type_t getMyOriginalType();
+    int32_t acquireStreamBufs();
+
+    int32_t mapBuf(uint8_t buf_type, uint32_t buf_idx,
+            int32_t plane_idx, int fd, size_t size);
+    int32_t unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx);
+    int32_t setParameter(cam_stream_parm_buffer_t &param);
+    int32_t getParameter(cam_stream_parm_buffer_t &param);
+    int32_t syncRuntimeParams();
+    cam_stream_parm_buffer_t getOutputCrop() { return m_OutputCrop;};
+    cam_stream_parm_buffer_t getImgProp() { return m_ImgProp;};
+
+    static void releaseFrameData(void *data, void *user_data);
+    int32_t configStream();
+    bool isDeffered() const { return mDefferedAllocation; }
+    void deleteStream();
+
+    uint8_t getBufferCount() { return mNumBufs; }
+    uint32_t getChannelHandle() { return mChannelHandle; }
+    int32_t getNumQueuedBuf();
+
+    uint32_t mDumpFrame;
+    uint32_t mDumpMetaFrame;
+    uint32_t mDumpSkipCnt;
+
+    void cond_wait();
+    void cond_signal();
+
+private:
+    uint32_t mCamHandle;
+    uint32_t mChannelHandle;
+    uint32_t mHandle; // stream handle from mm-camera-interface
+    mm_camera_ops_t *mCamOps;
+    cam_stream_info_t *mStreamInfo; // ptr to stream info buf
+    mm_camera_stream_mem_vtbl_t mMemVtbl;
+    uint8_t mNumBufs;
+    uint8_t mNumBufsNeedAlloc;
+    uint8_t *mRegFlags;
+    stream_cb_routine mDataCB;
+    void *mUserData;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh; // thread for dataCB
+
+    QCameraHeapMemory *mStreamInfoBuf;
+    QCameraMemory *mStreamBufs;
+    QCameraMemory *mStreamBatchBufs;
+    QCameraAllocator &mAllocator;
+    mm_camera_buf_def_t *mBufDefs;
+    cam_frame_len_offset_t mFrameLenOffset;
+    cam_padding_info_t mPaddingInfo;
+    cam_rect_t mCropInfo;
+    pthread_mutex_t mCropLock; // lock to protect crop info
+    pthread_mutex_t mParameterLock; // lock to sync access to parameters
+    bool mStreamBufsAcquired;
+    bool m_bActive; // if stream mProcTh is active
+    bool mDynBufAlloc; // allow buf allocation in 2 steps
+    pthread_t mBufAllocPid;
+    mm_camera_map_unmap_ops_tbl_t m_MemOpsTbl;
+    cam_stream_parm_buffer_t m_OutputCrop;
+    cam_stream_parm_buffer_t m_ImgProp;
+
+    static int32_t get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+
+    static int32_t get_bufs_deffered(
+            cam_frame_len_offset_t *offset,
+            uint8_t *num_bufs,
+            uint8_t **initial_reg_flag,
+            mm_camera_buf_def_t **bufs,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+            void *user_data);
+
+    static int32_t put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+
+    static int32_t put_bufs_deffered(
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+            void *user_data);
+
+    static int32_t invalidate_buf(uint32_t index, void *user_data);
+    static int32_t clean_invalidate_buf(uint32_t index, void *user_data);
+
+    int32_t getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t invalidateBuf(uint32_t index);
+    int32_t cleanInvalidateBuf(uint32_t index);
+    int32_t calcOffset(cam_stream_info_t *streamInfo);
+    int32_t unmapStreamInfoBuf();
+    int32_t releaseStreamInfoBuf();
+    bool mDefferedAllocation;
+
+    bool wait_for_cond;
+    pthread_mutex_t m_lock;
+    pthread_cond_t m_cond;
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STREAM_H__ */
diff --git a/msm8974/QCamera2/HAL/QCameraThermalAdapter.cpp b/msm8974/QCamera2/HAL/QCameraThermalAdapter.cpp
new file mode 100644
index 0000000..7965ee8
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraThermalAdapter.cpp
@@ -0,0 +1,162 @@
+/* Copyright (c) 2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraThermalAdapter"
+
+#include <dlfcn.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+
+#include "QCamera2HWI.h"
+#include "QCameraThermalAdapter.h"
+
+using namespace android;
+
+namespace qcamera {
+
+
+QCameraThermalAdapter& QCameraThermalAdapter::getInstance()
+{
+    static QCameraThermalAdapter instance;
+    return instance;
+}
+
+QCameraThermalAdapter::QCameraThermalAdapter() :
+                                        mCallback(NULL),
+                                        mHandle(NULL),
+                                        mRegister(NULL),
+                                        mUnregister(NULL),
+                                        mCameraHandle(0),
+                                        mCamcorderHandle(0)
+{
+}
+
+int QCameraThermalAdapter::init(QCameraThermalCallback *thermalCb)
+{
+    const char *error = NULL;
+    int rc = NO_ERROR;
+
+    CDBG("%s E", __func__);
+    mHandle = dlopen("/vendor/lib/libthermalclient.so", RTLD_NOW);
+    if (!mHandle) {
+        error = dlerror();
+        ALOGE("%s: dlopen failed with error %s",
+                    __func__, error ? error : "");
+        rc = UNKNOWN_ERROR;
+        goto error;
+    }
+    *(void **)&mRegister = dlsym(mHandle, "thermal_client_register_callback");
+    if (!mRegister) {
+        error = dlerror();
+        ALOGE("%s: dlsym failed with error code %s",
+                    __func__, error ? error: "");
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+    *(void **)&mUnregister = dlsym(mHandle, "thermal_client_unregister_callback");
+    if (!mUnregister) {
+        error = dlerror();
+        ALOGE("%s: dlsym failed with error code %s",
+                    __func__, error ? error: "");
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+
+    // Register camera and camcorder callbacks
+    mCameraHandle = mRegister(mStrCamera, thermalCallback, NULL);
+    if (mCameraHandle < 0) {
+        ALOGE("%s: thermal_client_register_callback failed %d",
+                        __func__, mCameraHandle);
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+    mCamcorderHandle = mRegister(mStrCamcorder, thermalCallback, NULL);
+    if (mCamcorderHandle < 0) {
+        ALOGE("%s: thermal_client_register_callback failed %d",
+                        __func__, mCamcorderHandle);
+        rc = UNKNOWN_ERROR;
+        goto error3;
+    }
+
+    mCallback = thermalCb;
+    CDBG("%s X", __func__);
+    return rc;
+
+error3:
+    mCamcorderHandle = 0;
+    mUnregister(mCameraHandle);
+error2:
+    mCameraHandle = 0;
+    dlclose(mHandle);
+    mHandle = NULL;
+error:
+    CDBG("%s X", __func__);
+    return rc;
+}
+
+void QCameraThermalAdapter::deinit()
+{
+    CDBG("%s E", __func__);
+    if (mUnregister) {
+        if (mCameraHandle) {
+            mUnregister(mCameraHandle);
+            mCameraHandle = 0;
+        }
+        if (mCamcorderHandle) {
+            mUnregister(mCamcorderHandle);
+            mCamcorderHandle = 0;
+        }
+    }
+    if (mHandle)
+        dlclose(mHandle);
+
+    mHandle = NULL;
+    mRegister = NULL;
+    mUnregister = NULL;
+    mCallback = NULL;
+    CDBG("%s X", __func__);
+}
+
+char QCameraThermalAdapter::mStrCamera[] = "camera";
+char QCameraThermalAdapter::mStrCamcorder[] = "camcorder";
+
+int QCameraThermalAdapter::thermalCallback(int level,
+                void *userdata, void *data)
+{
+    int rc = 0;
+    CDBG("%s E", __func__);
+    QCameraThermalAdapter& instance = getInstance();
+    qcamera_thermal_level_enum_t lvl = (qcamera_thermal_level_enum_t) level;
+    if (instance.mCallback)
+        rc = instance.mCallback->thermalEvtHandle(lvl, userdata, data);
+    CDBG("%s X", __func__);
+    return rc;
+}
+
+}; //namespace qcamera
diff --git a/msm8974/QCamera2/HAL/QCameraThermalAdapter.h b/msm8974/QCamera2/HAL/QCameraThermalAdapter.h
new file mode 100644
index 0000000..80711a0
--- /dev/null
+++ b/msm8974/QCamera2/HAL/QCameraThermalAdapter.h
@@ -0,0 +1,85 @@
+/* Copyright (c) 2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_THERMAL_ADAPTER__
+#define __QCAMERA_THERMAL_ADAPTER__
+
+namespace qcamera {
+
+typedef enum {
+    QCAMERA_THERMAL_NO_ADJUSTMENT = 0,
+    QCAMERA_THERMAL_SLIGHT_ADJUSTMENT,
+    QCAMERA_THERMAL_BIG_ADJUSTMENT,
+    QCAMERA_THERMAL_SHUTDOWN
+} qcamera_thermal_level_enum_t;
+
+typedef enum {
+    QCAMERA_THERMAL_ADJUST_FPS,
+    QCAMERA_THERMAL_ADJUST_FRAMESKIP,
+} qcamera_thermal_mode;
+
+class QCameraThermalCallback
+{
+public:
+    virtual int thermalEvtHandle(qcamera_thermal_level_enum_t level,
+            void *userdata, void *data) = 0;
+    virtual ~QCameraThermalCallback() {}
+};
+
+class QCameraThermalAdapter
+{
+public:
+    static QCameraThermalAdapter& getInstance();
+
+    int init(QCameraThermalCallback *thermalCb);
+    void deinit();
+
+private:
+    static char mStrCamera[];
+    static char mStrCamcorder[];
+
+    static int thermalCallback(int level, void *userdata, void *data);
+
+    QCameraThermalCallback *mCallback;
+    void *mHandle;
+    int (*mRegister)(char *name,
+            int (*callback)(int, void *userdata, void *data), void *data);
+    int (*mUnregister)(int handle);
+    int mCameraHandle;
+    int mCamcorderHandle;
+
+    QCameraThermalAdapter();
+    QCameraThermalAdapter(QCameraThermalAdapter const& copy); // not implemented
+    QCameraThermalAdapter& operator=(QCameraThermalAdapter const& copy); // not implemented
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_THERMAL_ADAPTER__ */
diff --git a/msm8974/QCamera2/HAL/wrapper/QualcommCamera.cpp b/msm8974/QCamera2/HAL/wrapper/QualcommCamera.cpp
new file mode 100644
index 0000000..f491c10
--- /dev/null
+++ b/msm8974/QCamera2/HAL/wrapper/QualcommCamera.cpp
@@ -0,0 +1,450 @@
+/* Copyright (c) 2011-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommCamera"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/RefBase.h>
+
+#include "QualcommCamera.h"
+#include "QCamera2Factory.h"
+#include "QCamera2HWI.h"
+
+
+extern "C" {
+#include <sys/time.h>
+}
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+
+static hw_module_methods_t camera_module_methods = {
+    .open = camera_device_open,
+};
+
+static hw_module_t camera_common = {
+    .tag = HARDWARE_MODULE_TAG,
+    .module_api_version = CAMERA_MODULE_API_VERSION_1_0,
+    .hal_api_version = HARDWARE_HAL_API_VERSION,
+    .id = CAMERA_HARDWARE_MODULE_ID,
+    .name = "QCamera Module",
+    .author = "Quic on behalf of CAF",
+    .methods = &camera_module_methods,
+    .dso = NULL,
+    .reserved = {0},
+};
+
+using namespace qcamera;
+namespace android {
+
+typedef struct {
+    camera_device hw_dev;
+    QCamera2HardwareInterface *hardware;
+    int camera_released;
+    int cameraId;
+} camera_hardware_t;
+
+typedef struct {
+  camera_memory_t mem;
+  int32_t msgType;
+  sp<IMemory> dataPtr;
+  void* user;
+  unsigned int index;
+} q_cam_memory_t;
+
+QCamera2HardwareInterface *util_get_Hal_obj( struct camera_device * device)
+{
+    QCamera2HardwareInterface *hardware = NULL;
+    if(device && device->priv){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        hardware = camHal->hardware;
+    }
+    return hardware;
+}
+
+extern "C" int get_number_of_cameras()
+{
+    /* try to query every time we get the call!*/
+
+    CDBG_HIGH("Q%s: E", __func__);
+    return QCamera2Factory::get_number_of_cameras();
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+    int rc = -1;
+    CDBG_HIGH("Q%s: E", __func__);
+
+    if(info) {
+        QCamera2Factory::get_camera_info(camera_id, info);
+    }
+    CDBG("Q%s: X", __func__);
+    return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int  camera_device_open(
+  const struct hw_module_t* module, const char* id,
+          struct hw_device_t** hw_device)
+{
+    int rc = -1;
+    camera_device *device = NULL;
+
+    if(module && id && hw_device) {
+        if (!strcmp(module->name, camera_common.name)) {
+            int cameraId = atoi(id);
+
+            camera_hardware_t *camHal =
+                (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+            if(!camHal) {
+                *hw_device = NULL;
+                ALOGE("%s:  end in no mem", __func__);
+                return rc;
+            }
+            /* we have the camera_hardware obj malloced */
+            memset(camHal, 0, sizeof (camera_hardware_t));
+            camHal->hardware = new QCamera2HardwareInterface((uint32_t)cameraId);
+            if (camHal->hardware) {
+                camHal->cameraId = cameraId;
+                device = &camHal->hw_dev;
+                device->common.close = close_camera_device;
+                device->ops = &QCamera2HardwareInterface::mCameraOps;
+                device->priv = (void *)camHal;
+                rc =  0;
+            } else {
+                if (camHal->hardware) {
+                    delete camHal->hardware;
+                    camHal->hardware = NULL;
+                }
+                free(camHal);
+                device = NULL;
+                goto EXIT;
+            }
+        }
+        /* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
+        *hw_device = (hw_device_t*)&device->common;
+    }
+
+EXIT:
+
+    ALOGE("%s:  end rc %d", __func__, rc);
+    return rc;
+}
+
+extern "C"  int close_camera_device( hw_device_t *hw_dev)
+{
+    CDBG_HIGH("Q%s: device =%p E", __func__, hw_dev);
+    int rc =  -1;
+    camera_device_t *device = (camera_device_t *)hw_dev;
+
+    if(device) {
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        if(camHal ) {
+            QCamera2HardwareInterface *hardware = util_get_Hal_obj( device);
+            if(!camHal->camera_released) {
+                if(hardware != NULL) {
+                    hardware->release(device);
+                }
+            }
+            if(hardware != NULL)
+                delete hardware;
+            free(camHal);
+        }
+        rc = 0;
+    }
+    return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+        struct preview_stream_ops *window)
+{
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+
+    if(hardware != NULL) {
+        rc = hardware->set_preview_window(device, window);
+    }
+    return rc;
+}
+
+void set_CallBacks(struct camera_device * device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->set_CallBacks(device, notify_cb,data_cb, data_cb_timestamp, get_memory, user);
+    }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->enable_msg_type(device, msg_type);
+    }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    CDBG_HIGH("Q%s: E", __func__);
+    if(hardware != NULL){
+        hardware->disable_msg_type(device, msg_type);
+    }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->msg_type_enabled(device, msg_type);
+    }
+    return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->start_preview(device);
+    }
+    CDBG_HIGH("Q%s: X", __func__);
+    return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->stop_preview(device);
+    }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->preview_enabled(device);
+    }
+    return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+      rc = hardware->store_meta_data_in_buffers(device, enable);
+    }
+    return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->start_recording(device);
+    }
+    return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->stop_recording(device);
+    }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->recording_enabled(device);
+    }
+    return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+                const void *opaque)
+{
+    CDBG("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->release_recording_frame(device, opaque);
+    }
+}
+
+int auto_focus(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->auto_focus(device);
+    }
+    return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->cancel_auto_focus(device);
+    }
+    return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->take_picture(device);
+    }
+    return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->cancel_picture(device);
+    }
+    return rc;
+}
+
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL && parms){
+        rc = hardware->set_parameters(device, parms);
+  }
+  return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        char *parms = NULL;
+        parms = hardware->get_parameters(device);
+        return parms;
+    }
+    return NULL;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+      hardware->put_parameters(device, parm);
+    }
+}
+
+int send_command(struct camera_device * device,
+            int32_t cmd, int32_t arg1, int32_t arg2)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->send_command(device, cmd, arg1, arg2);
+    }
+    return rc;
+}
+
+void release(struct camera_device * device)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        hardware->release(device);
+        camHal->camera_released = true;
+    }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+    CDBG_HIGH("Q%s: E", __func__);
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->dump(device, fd);
+    }
+    return rc;
+}
+
+}; // namespace android
diff --git a/msm8974/QCamera2/HAL/wrapper/QualcommCamera.h b/msm8974/QCamera2/HAL/wrapper/QualcommCamera.h
new file mode 100644
index 0000000..f3def21
--- /dev/null
+++ b/msm8974/QCamera2/HAL/wrapper/QualcommCamera.h
@@ -0,0 +1,107 @@
+/* Copyright (c) 2011-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+
+#include "QCamera2HWI.h"
+
+extern "C" {
+
+  int get_number_of_cameras();
+  int get_camera_info(int camera_id, struct camera_info *info);
+
+  int camera_device_open(const struct hw_module_t* module, const char* id,
+          struct hw_device_t** device);
+
+  hw_device_t * open_camera_device(int cameraId);
+
+  int close_camera_device( hw_device_t *);
+
+namespace android {
+  int set_preview_window(struct camera_device *,
+          struct preview_stream_ops *window);
+  void set_CallBacks(struct camera_device *,
+          camera_notify_callback notify_cb,
+          camera_data_callback data_cb,
+          camera_data_timestamp_callback data_cb_timestamp,
+          camera_request_memory get_memory,
+          void *user);
+
+  void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+  void disable_msg_type(struct camera_device *, int32_t msg_type);
+  int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+  int start_preview(struct camera_device *);
+
+  void stop_preview(struct camera_device *);
+
+  int preview_enabled(struct camera_device *);
+  int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+  int start_recording(struct camera_device *);
+
+  void stop_recording(struct camera_device *);
+
+  int recording_enabled(struct camera_device *);
+
+  void release_recording_frame(struct camera_device *,
+                  const void *opaque);
+
+  int auto_focus(struct camera_device *);
+
+  int cancel_auto_focus(struct camera_device *);
+
+  int take_picture(struct camera_device *);
+
+  int cancel_picture(struct camera_device *);
+
+  int set_parameters(struct camera_device *, const char *parms);
+
+  char* get_parameters(struct camera_device *);
+
+  void put_parameters(struct camera_device *, char *);
+
+  int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+
+  void release(struct camera_device *);
+
+  int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/msm8974/QCamera2/stack/Android.mk b/msm8974/QCamera2/stack/Android.mk
new file mode 100644
index 0000000..5053e7d
--- /dev/null
+++ b/msm8974/QCamera2/stack/Android.mk
@@ -0,0 +1 @@
+include $(call all-subdir-makefiles)
diff --git a/msm8974/QCamera2/stack/common/cam_intf.h b/msm8974/QCamera2/stack/common/cam_intf.h
new file mode 100644
index 0000000..a804312
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/cam_intf.h
@@ -0,0 +1,662 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+#include <media/msmb_isp.h>
+#include <semaphore.h>
+#include "cam_types.h"
+
+/* (1024 * 1024) */
+#define ONE_MB_OF_PARAMS 1048576U
+
+#define CAM_PRIV_IOCTL_BASE (V4L2_CID_PRIVATE_BASE + 14)
+typedef enum {
+    /* session based parameters */
+    CAM_PRIV_PARM = CAM_PRIV_IOCTL_BASE,
+    /* session based action: do auto focus.*/
+    CAM_PRIV_DO_AUTO_FOCUS,
+    /* session based action: cancel auto focus.*/
+    CAM_PRIV_CANCEL_AUTO_FOCUS,
+    /* session based action: prepare for snapshot.*/
+    CAM_PRIV_PREPARE_SNAPSHOT,
+    /* sync stream info.*/
+    CAM_PRIV_STREAM_INFO_SYNC,
+    /* stream based parameters*/
+    CAM_PRIV_STREAM_PARM,
+    /* start ZSL snapshot.*/
+    CAM_PRIV_START_ZSL_SNAPSHOT,
+    /* stop ZSL snapshot.*/
+    CAM_PRIV_STOP_ZSL_SNAPSHOT,
+} cam_private_ioctl_enum_t;
+
+/* capability struct definition for HAL 1*/
+typedef struct{
+    cam_hal_version_t version;
+
+    cam_position_t position;                                /* sensor position: front, back */
+
+    uint8_t auto_hdr_supported;
+
+    /* supported iso modes */
+    size_t supported_iso_modes_cnt;
+    cam_iso_mode_type supported_iso_modes[CAM_ISO_MODE_MAX];
+
+    /* supported exposure time */
+    int32_t min_exposure_time;
+    int32_t max_exposure_time;
+
+    /* supported flash modes */
+    size_t supported_flash_modes_cnt;
+    cam_flash_mode_t supported_flash_modes[CAM_FLASH_MODE_MAX];
+
+    size_t zoom_ratio_tbl_cnt;                              /* table size for zoom ratios */
+    uint32_t zoom_ratio_tbl[MAX_ZOOMS_CNT];                 /* zoom ratios table */
+
+    /* supported effect modes */
+    size_t supported_effects_cnt;
+    cam_effect_mode_type supported_effects[CAM_EFFECT_MODE_MAX];
+
+    /* supported scene modes */
+    size_t supported_scene_modes_cnt;
+    cam_scene_mode_type supported_scene_modes[CAM_SCENE_MODE_MAX];
+
+    /* supported auto exposure modes */
+    size_t supported_aec_modes_cnt;
+    cam_auto_exposure_mode_type supported_aec_modes[CAM_AEC_MODE_MAX];
+
+    size_t fps_ranges_tbl_cnt;                              /* fps ranges table size */
+    cam_fps_range_t fps_ranges_tbl[MAX_SIZES_CNT];          /* fps ranges table */
+
+    /* supported antibanding modes */
+    size_t supported_antibandings_cnt;
+    cam_antibanding_mode_type supported_antibandings[CAM_ANTIBANDING_MODE_MAX];
+
+    /* supported white balance modes */
+    size_t supported_white_balances_cnt;
+    cam_wb_mode_type supported_white_balances[CAM_WB_MODE_MAX];
+
+    /* supported manual wb cct */
+    int32_t min_wb_cct;
+    int32_t max_wb_cct;
+
+    /* supported focus modes */
+    size_t supported_focus_modes_cnt;
+    cam_focus_mode_type supported_focus_modes[CAM_FOCUS_MODE_MAX];
+
+    /* supported manual focus position */
+    int32_t min_focus_pos[CAM_MANUAL_FOCUS_MODE_MAX];
+    int32_t max_focus_pos[CAM_MANUAL_FOCUS_MODE_MAX];
+
+    int32_t exposure_compensation_min;       /* min value of exposure compensation index */
+    int32_t exposure_compensation_max;       /* max value of exposure compensation index */
+    int32_t exposure_compensation_default;   /* default value of exposure compensation index */
+    float exposure_compensation_step;
+    cam_rational_type_t exp_compensation_step;    /* exposure compensation step value */
+
+    uint8_t video_stablization_supported; /* flag id video stablization is supported */
+
+    size_t picture_sizes_tbl_cnt;                           /* picture sizes table size */
+    cam_dimension_t picture_sizes_tbl[MAX_SIZES_CNT];       /* picture sizes table */
+
+    /* capabilities specific to HAL 1 */
+
+    int32_t modes_supported;                                /* mask of modes supported: 2D, 3D */
+    uint32_t sensor_mount_angle;                            /* sensor mount angle */
+
+    float focal_length;                                     /* focal length */
+    float hor_view_angle;                                   /* horizontal view angle */
+    float ver_view_angle;                                   /* vertical view angle */
+
+    size_t preview_sizes_tbl_cnt;                           /* preview sizes table size */
+    cam_dimension_t preview_sizes_tbl[MAX_SIZES_CNT];       /* preiew sizes table */
+
+    size_t video_sizes_tbl_cnt;                             /* video sizes table size */
+    cam_dimension_t video_sizes_tbl[MAX_SIZES_CNT];         /* video sizes table */
+
+    size_t livesnapshot_sizes_tbl_cnt;                      /* livesnapshot sizes table size */
+    cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
+
+    size_t hfr_tbl_cnt;                                     /* table size for HFR */
+    cam_hfr_info_t hfr_tbl[CAM_HFR_MODE_MAX];               /* HFR table */
+
+    /* supported preview formats */
+    size_t supported_preview_fmt_cnt;
+    cam_format_t supported_preview_fmts[CAM_FORMAT_MAX];
+
+    /* supported picture formats */
+    size_t supported_picture_fmt_cnt;
+    cam_format_t supported_picture_fmts[CAM_FORMAT_MAX];
+
+    /* dimension and supported output format of raw dump from camif */
+    cam_dimension_t raw_dim;
+    size_t supported_raw_fmt_cnt;
+    cam_format_t supported_raw_fmts[CAM_FORMAT_MAX];
+
+    /* supported focus algorithms */
+    size_t supported_focus_algos_cnt;
+    cam_focus_algorithm_type supported_focus_algos[CAM_FOCUS_ALGO_MAX];
+
+
+    uint8_t auto_wb_lock_supported;       /* flag if auto white balance lock is supported */
+    uint8_t zoom_supported;               /* flag if zoom is supported */
+    uint8_t smooth_zoom_supported;        /* flag if smooth zoom is supported */
+    uint8_t auto_exposure_lock_supported; /* flag if auto exposure lock is supported */
+    uint8_t video_snapshot_supported;     /* flag if video snapshot is supported */
+
+    uint8_t max_num_roi;                  /* max number of roi can be detected */
+    uint8_t max_num_focus_areas;          /* max num of focus areas */
+    uint8_t max_num_metering_areas;       /* max num opf metering areas */
+    uint8_t max_zoom_step;                /* max zoom step value */
+
+    /* QCOM specific control */
+    cam_control_range_t brightness_ctrl;  /* brightness */
+    cam_control_range_t sharpness_ctrl;   /* sharpness */
+    cam_control_range_t contrast_ctrl;    /* contrast */
+    cam_control_range_t saturation_ctrl;  /* saturation */
+    cam_control_range_t sce_ctrl;         /* skintone enhancement factor */
+
+    /* QCOM HDR specific control. Indicates number of frames and exposure needs for the frames */
+    cam_hdr_bracketing_info_t hdr_bracketing_setting;
+
+    uint32_t qcom_supported_feature_mask; /* mask of qcom specific features supported:
+                                           * such as CAM_QCOM_FEATURE_SUPPORTED_FACE_DETECTION*/
+    cam_padding_info_t padding_info;      /* padding information from PP */
+    uint32_t min_num_pp_bufs;             /* minimum number of buffers needed by postproc module */
+    uint32_t min_required_pp_mask;        /* min required pp feature masks for ZSL.
+                                           * depends on hardware limitation, i.e. for 8974,
+                                           * sharpness is required for all ZSL snapshot frames */
+
+    /* capabilities specific to HAL 3 */
+
+    float min_focus_distance;
+    float hyper_focal_distance;
+
+    float focal_lengths[CAM_FOCAL_LENGTHS_MAX];
+    uint8_t focal_lengths_count;
+
+    float apertures[CAM_APERTURES_MAX];
+    uint8_t apertures_count;
+
+    float filter_densities[CAM_FILTER_DENSITIES_MAX];
+    uint8_t filter_densities_count;
+
+    uint8_t optical_stab_modes[CAM_OPT_STAB_MAX];
+    uint8_t optical_stab_modes_count;
+
+    cam_dimension_t lens_shading_map_size;
+    float lens_shading_map[3 * CAM_MAX_MAP_WIDTH *
+              CAM_MAX_MAP_HEIGHT];
+
+    cam_dimension_t geo_correction_map_size;
+    float geo_correction_map[2 * 3 * CAM_MAX_MAP_WIDTH *
+              CAM_MAX_MAP_HEIGHT];
+
+    float lens_position[3];
+
+    /* nano seconds */
+    int64_t exposure_time_range[2];
+
+    /* nano seconds */
+    int64_t max_frame_duration;
+
+    cam_color_filter_arrangement_t color_arrangement;
+
+    float sensor_physical_size[2];
+
+    /* Dimensions of full pixel array, possibly including
+       black calibration pixels */
+    cam_dimension_t pixel_array_size;
+    /* Area of raw data which corresponds to only active
+       pixels; smaller or equal to pixelArraySize. */
+    cam_rect_t active_array_size;
+
+    /* Maximum raw value output by sensor */
+    int32_t white_level;
+
+    /* A fixed black level offset for each of the Bayer
+       mosaic channels */
+    int32_t black_level_pattern[4];
+
+    /* Time taken before flash can fire again in nano secs */
+    int64_t flash_charge_duration;
+
+    /* Maximum number of supported points in the tonemap
+       curve */
+    int32_t max_tone_map_curve_points;
+
+    /* supported formats */
+    size_t supported_scalar_format_cnt;
+    cam_format_t supported_scalar_fmts[CAM_FORMAT_MAX];
+
+    /* The minimum frame duration that is supported for above
+       raw resolution */
+    int64_t raw_min_duration;
+
+    size_t supported_sizes_tbl_cnt;
+    cam_dimension_t supported_sizes_tbl[MAX_SIZES_CNT];
+
+    /* The minimum frame duration that is supported for each
+     * resolution in availableProcessedSizes. Should correspond
+     * to the frame duration when only that processed stream
+     * is active, with all processing set to FAST */
+    int64_t min_duration[MAX_SIZES_CNT];
+
+    uint32_t max_face_detection_count;
+
+    uint8_t histogram_supported;
+    /* Number of histogram buckets supported */
+    int32_t histogram_size;
+    /* Maximum value possible for a histogram bucket */
+    int32_t max_histogram_count;
+
+    cam_dimension_t sharpness_map_size;
+
+    /* Maximum value possible for a sharpness map region */
+    int32_t max_sharpness_map_value;
+
+    cam_scene_mode_overrides_t scene_mode_overrides[CAM_SCENE_MODE_MAX];
+
+    /*Autoexposure modes for camera 3 api*/
+    size_t supported_ae_modes_cnt;
+    cam_ae_mode_type supported_ae_modes[CAM_AE_MODE_MAX];
+
+    /* picture sizes need scale*/
+    size_t scale_picture_sizes_cnt;
+    cam_dimension_t scale_picture_sizes[MAX_SCALE_SIZES_CNT];
+
+    uint8_t flash_available;
+
+    cam_rational_type_t base_gain_factor;    /* sensor base gain factor */
+    /* AF Bracketing info */
+    cam_af_bracketing_t  ubifocus_af_bracketing_need;
+    /* opti Zoom info */
+    cam_opti_zoom_t      opti_zoom_settings_need;
+    /* true Portrait info */
+    cam_true_portrait_t  true_portrait_settings_need;
+    /* FSSR info */
+    cam_fssr_t      fssr_settings_need;
+    /* AF bracketing info for multi-touch focus*/
+    cam_af_bracketing_t  mtf_af_bracketing_parm;
+    /* Sensor type information */
+    cam_sensor_type_t sensor_type;
+} cam_capability_t;
+
+typedef enum {
+    CAM_STREAM_CONSUMER_DISPLAY,    /* buf to be displayed */
+    CAM_STREAM_CONSUMER_VIDEO_ENC,  /* buf to be encoded by video */
+    CAM_STREAM_CONSUMER_JPEG_ENC,   /* ZSL YUV buf to be fed back to JPEG */
+} cam_stream_consumer_t;
+
+typedef enum {
+    CAM_STREAM_PARAM_TYPE_DO_REPROCESS = CAM_INTF_PARM_DO_REPROCESS,
+    CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO = CAM_INTF_PARM_SET_BUNDLE,
+    CAM_STREAM_PARAM_TYPE_SET_FLIP = CAM_INTF_PARM_STREAM_FLIP,
+    CAM_STREAM_PARAM_SET_STREAM_CONSUMER,
+    CAM_STREAM_PARAM_TYPE_GET_OUTPUT_CROP = CAM_INTF_PARM_GET_OUTPUT_CROP,
+    CAM_STREAM_PARAM_TYPE_GET_IMG_PROP = CAM_INTF_PARM_GET_IMG_PROP,
+    CAM_STREAM_PARAM_TYPE_MAX
+} cam_stream_param_type_e;
+
+typedef struct {
+    uint32_t buf_index;           /* buf index to the source frame buffer that needs reprocess,
+                                    (assume buffer is already mapped)*/
+    uint32_t frame_idx;           /* frame id of source frame to be reprocessed */
+    int32_t ret_val;              /* return value from reprocess. Could have different meanings.
+                                     i.e., faceID in the case of face registration. */
+    uint8_t meta_present;         /* if there is meta data associated with this reprocess frame */
+    uint32_t meta_stream_handle;  /* meta data stream ID. only valid if meta_present != 0 */
+    uint32_t meta_buf_index;      /* buf index to meta data buffer. only valid if meta_present != 0 */
+
+    cam_per_frame_pp_config_t frame_pp_config; /* per frame post-proc configuration */
+} cam_reprocess_param;
+
+typedef struct {
+    uint32_t flip_mask;
+} cam_flip_mode_t;
+
+#define IMG_NAME_SIZE 32
+typedef struct {
+    cam_rect_t crop;  /* crop info for the image */
+    cam_dimension_t input; /* input dimension of the image */
+    cam_dimension_t output; /* output dimension of the image */
+    char name[IMG_NAME_SIZE]; /* optional name of the ext*/
+    uint32_t is_raw_image; /* image is raw */
+    cam_format_t format; /* image format */
+    uint32_t analysis_image; /* image is used for analysis. hence skip thumbnail */
+    uint32_t size; /* size of the image */
+} cam_stream_img_prop_t;
+
+typedef struct {
+    cam_stream_param_type_e type;
+    union {
+        cam_reprocess_param reprocess;  /* do reprocess */
+        cam_bundle_config_t bundleInfo; /* set bundle info*/
+        cam_flip_mode_t flipInfo;       /* flip mode */
+        cam_stream_consumer_t consumer; /* stream consumer */
+        cam_crop_data_t outputCrop;     /* output crop for current frame */
+        cam_stream_img_prop_t imgProp;  /* image properties of current frame */
+    };
+} cam_stream_parm_buffer_t;
+
+/* stream info */
+typedef struct {
+    /* stream ID from server */
+    uint32_t stream_svr_id;
+
+    /* stream type */
+    cam_stream_type_t stream_type;
+
+    /* image format */
+    cam_format_t fmt;
+
+    /* image dimension */
+    cam_dimension_t dim;
+
+    /* buffer plane information, will be calc based on stream_type, fmt,
+       dim, and padding_info(from stream config). Info including:
+       offset_x, offset_y, stride, scanline, plane offset */
+    cam_stream_buf_plane_info_t buf_planes;
+
+    /* number of stream bufs will be allocated */
+    uint32_t num_bufs;
+
+    /* streaming type */
+    cam_streaming_mode_t streaming_mode;
+    /* num of frames needs to be generated.
+     * only valid when streaming_mode = CAM_STREAMING_MODE_BURST */
+    uint8_t num_of_burst;
+
+    /* stream specific pp config */
+    cam_pp_feature_config_t pp_config;
+
+    /* this section is valid if offline reprocess type stream */
+    cam_stream_reproc_config_t reprocess_config;
+
+    cam_stream_parm_buffer_t parm_buf;    /* stream based parameters */
+
+    uint8_t dis_enable;
+
+    /* Image Stabilization type */
+    cam_is_type_t is_type;
+
+    cam_stream_secure_t is_secure;
+
+} cam_stream_info_t;
+
+/*****************************************************************************
+ *                 Code for Domain Socket Based Parameters                   *
+ ****************************************************************************/
+
+#define POINTER_OF(PARAM_ID,TABLE_PTR)    \
+        (&(TABLE_PTR->entry[PARAM_ID].data))
+
+#define GET_FIRST_PARAM_ID(TABLE_PTR)     \
+        (TABLE_PTR->first_flagged_entry)
+
+#define SET_FIRST_PARAM_ID(TABLE_PTR,PARAM_ID)     \
+        TABLE_PTR->first_flagged_entry=PARAM_ID
+
+#define GET_NEXT_PARAM_ID(CURRENT_PARAM_ID,TABLE_PTR)    \
+        (TABLE_PTR->entry[CURRENT_PARAM_ID].next_flagged_entry)
+
+#define SET_NEXT_PARAM_ID(CURRENT_PARAM_ID,TABLE_PTR,NEXT_PARAM_ID)    \
+        TABLE_PTR->entry[CURRENT_PARAM_ID].next_flagged_entry=NEXT_PARAM_ID;
+
+#define INCLUDE(PARAM_ID,DATATYPE,COUNT)  \
+        DATATYPE member_variable_##PARAM_ID[ COUNT ]
+
+#define GET_NEXT_PARAM(TABLE_PTR, TYPE)    \
+        (TYPE *)((char *)TABLE_PTR +       \
+               TABLE_PTR->aligned_size)    \
+
+typedef union {
+/**************************************************************************************
+ *          ID from (cam_intf_parm_type_t)          DATATYPE                     COUNT
+ **************************************************************************************/
+    INCLUDE(CAM_INTF_PARM_HAL_VERSION,              int32_t,                     1);
+    /* Shared between HAL1 and HAL3 */
+    INCLUDE(CAM_INTF_PARM_ANTIBANDING,              int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EXPOSURE_COMPENSATION,    int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_LOCK,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ENABLE,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FPS_RANGE,                cam_fps_range_t,             1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_MODE,               uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_MANUAL_FOCUS_POS,         cam_manual_focus_parm_t,     1);
+    INCLUDE(CAM_INTF_PARM_AWB_LOCK,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AWB_ENABLE,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AF_ENABLE,                int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WHITE_BALANCE,            int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EFFECT,                   int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_BESTSHOT_MODE,            int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_DIS_ENABLE,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LED_MODE,                 int32_t,                     1);
+
+    /* HAL1 specific */
+    INCLUDE(CAM_INTF_PARM_QUERY_FLASH4SNAP,         int32_t,                     1); //read only
+    INCLUDE(CAM_INTF_PARM_EXPOSURE,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SHARPNESS,                int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_CONTRAST,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SATURATION,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_BRIGHTNESS,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ISO,                      int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EXPOSURE_TIME,            int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ZOOM,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ROLLOFF,                  int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_MODE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ALGO_TYPE,            int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_ALGO_TYPE,          int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ROI,                  cam_set_aec_roi_t,           1);
+    INCLUDE(CAM_INTF_PARM_AF_ROI,                   cam_roi_info_t,              1);
+    INCLUDE(CAM_INTF_PARM_SCE_FACTOR,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FD,                       cam_fd_set_parm_t,           1);
+    INCLUDE(CAM_INTF_PARM_MCE,                      int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HFR,                      int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_REDEYE_REDUCTION,         int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WAVELET_DENOISE,          cam_denoise_param_t,         1);
+    INCLUDE(CAM_INTF_PARM_HISTOGRAM,                int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ASD_ENABLE,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_RECORDING_HINT,           int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HDR,                      cam_hdr_param_t,             1);
+    INCLUDE(CAM_INTF_PARM_FRAMESKIP,                int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ZSL_MODE,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HDR_NEED_1X,              int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LOCK_CAF,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_VIDEO_HDR,                int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_VT,                       int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_GET_CHROMATIX,            tune_chromatix_t,            1);
+    INCLUDE(CAM_INTF_PARM_SET_RELOAD_CHROMATIX,     tune_chromatix_t,            1);
+    INCLUDE(CAM_INTF_PARM_GET_AFTUNE,               tune_autofocus_t,            1);
+    INCLUDE(CAM_INTF_PARM_SET_RELOAD_AFTUNE,        tune_autofocus_t,            1);
+    INCLUDE(CAM_INTF_PARM_SET_AUTOFOCUSTUNING,      tune_actuator_t,             1);
+    INCLUDE(CAM_INTF_PARM_SET_VFE_COMMAND,          tune_cmd_t,                  1);
+    INCLUDE(CAM_INTF_PARM_SET_PP_COMMAND,           tune_cmd_t,                  1);
+    INCLUDE(CAM_INTF_PARM_MAX_DIMENSION,            cam_dimension_t,             1);
+    INCLUDE(CAM_INTF_PARM_RAW_DIMENSION,            cam_dimension_t,             1);
+    INCLUDE(CAM_INTF_PARM_TINTLESS,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_CDS_MODE,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EZTUNE_CMD,               cam_eztune_cmd_data_t,       1);
+    INCLUDE(CAM_INTF_PARM_AF_MOBICAT_CMD,           int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LONGSHOT_ENABLE,          int8_t,                      1);
+
+    /* HAL3 specific */
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER,             uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_MODE,       uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM,  cam_color_correct_matrix_t,  1);
+    INCLUDE(CAM_INTF_META_AEC_MODE,                 uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AEC_ROI,                  cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,   cam_trigger_t,               1);
+    INCLUDE(CAM_INTF_META_AF_ROI,                   cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AF_TRIGGER,               cam_trigger_t,               1);
+    INCLUDE(CAM_INTF_META_AWB_REGIONS,              cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_CAPTURE_INTENT,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_MODE,                     uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_DEMOSAIC,                 int32_t,                     1);
+    INCLUDE(CAM_INTF_META_EDGE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SHARPNESS_STRENGTH,       int32_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_POWER,              uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_FIRING_TIME,        int64_t,                     1);
+    INCLUDE(CAM_INTF_META_GEOMETRIC_MODE,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_GEOMETRIC_STRENGTH,       uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_HOTPIXEL_MODE,            uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_LENS_APERTURE,            float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FILTERDENSITY,       float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCAL_LENGTH,        float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_DISTANCE,      float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_OPT_STAB_MODE,       uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_MODE,     uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SCALER_CROP_REGION,       cam_crop_region_t,           1);
+    INCLUDE(CAM_INTF_META_SENSOR_EXPOSURE_TIME,     int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_FRAME_DURATION,    int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_SENSITIVITY,       int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SHADING_MODE,             uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_SHADING_STRENGTH,         uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_FACEDETECT_MODE,    uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_HISTOGRAM_MODE,     uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_TONEMAP_CURVE_BLUE,       cam_tonemap_curve_t,         1);
+    INCLUDE(CAM_INTF_META_TONEMAP_CURVE_GREEN,      cam_tonemap_curve_t,         1);
+    INCLUDE(CAM_INTF_META_TONEMAP_CURVE_RED,        cam_tonemap_curve_t,         1);
+    INCLUDE(CAM_INTF_META_TONEMAP_MODE,             uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_MODE,               uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_STATS_DEBUG_MASK,         uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_ISP_DEBUG_MASK,           uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_ALGO_OPTIMIZATIONS_MASK,  uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_SENSOR_DEBUG_MASK,        uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_BRACKETING,         cam_af_bracketing_t,         1);
+    INCLUDE(CAM_INTF_PARM_MULTI_TOUCH_FOCUS_BRACKETING, cam_af_bracketing_t,     1);
+    INCLUDE(CAM_INTF_PARM_FLASH_BRACKETING,         cam_flash_bracketing_t,      1);
+} parm_type_t;
+
+typedef union {
+/**************************************************************************************
+ *  ID from (cam_intf_metadata_type_t)           DATATYPE                     COUNT
+ **************************************************************************************/
+    /* common between HAL1 and HAL3 */
+    INCLUDE(CAM_INTF_META_HISTOGRAM,                  cam_hist_stats_t,            1);
+    INCLUDE(CAM_INTF_META_FACE_DETECTION,             cam_face_detection_data_t,   1);
+    INCLUDE(CAM_INTF_META_AUTOFOCUS_DATA,             cam_auto_focus_data_t,       1);
+
+    /* Specific to HAl1 */
+    INCLUDE(CAM_INTF_META_CROP_DATA,                  cam_crop_data_t,             1);
+    INCLUDE(CAM_INTF_META_PREP_SNAPSHOT_DONE,         int32_t,                     1);
+    INCLUDE(CAM_INTF_META_GOOD_FRAME_IDX_RANGE,       cam_frame_idx_range_t,       1);
+    /* Specific to HAL3 */
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER_VALID,         int32_t,                     1);
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER,               uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_MODE,         uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AEC_PRECAPTURE_ID,          int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AEC_ROI,                    cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AEC_STATE,                  uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_MODE,                 uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AF_ROI,                     cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AF_STATE,                   uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AF_TRIGGER_ID,              int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WHITE_BALANCE,              int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AWB_REGIONS,                cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AWB_STATE,                  uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_MODE,                       uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_EDGE,                       int32_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_POWER,                uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_FIRING_TIME,          int64_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_MODE,                 uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_STATE,                int32_t,                     1);
+    INCLUDE(CAM_INTF_META_HOTPIXEL_MODE,              uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_LENS_APERTURE,              float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FILTERDENSITY,         float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCAL_LENGTH,          float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_DISTANCE,        float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_RANGE,           float,                       2);
+    INCLUDE(CAM_INTF_META_LENS_OPT_STAB_MODE,         uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_STATE,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_MODE,       uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_SCALER_CROP_REGION,         cam_crop_region_t,           1);
+    INCLUDE(CAM_INTF_META_SENSOR_EXPOSURE_TIME,       int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_FRAME_DURATION,      int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_SENSITIVITY,         int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_TIMESTAMP,           struct timeval,              1);
+    INCLUDE(CAM_INTF_META_SHADING_MODE,               uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_FACEDETECT_MODE,      uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_HISTOGRAM_MODE,       uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,   uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP,        cam_sharpness_map_t,         3);
+    INCLUDE(CAM_INTF_META_ASD_HDR_SCENE_DATA,      cam_asd_hdr_scene_data_t,       1);
+    INCLUDE(CAM_INTF_META_PRIVATE_DATA,               char,                        MAX_METADATA_PAYLOAD_SIZE);
+
+} metadata_type_t;
+
+/****************************DO NOT MODIFY BELOW THIS LINE!!!!*********************/
+
+typedef struct {
+    metadata_type_t data;
+    uint8_t next_flagged_entry;
+} metadata_entry_type_t;
+
+typedef struct {
+    uint8_t first_flagged_entry;
+    metadata_entry_type_t entry[CAM_INTF_PARM_MAX];
+} metadata_buffer_t;
+
+typedef struct {
+    parm_type_t data;
+    uint8_t next_flagged_entry;
+} parm_entry_type_t;
+
+//we need to align these contiguous param structures in memory
+typedef struct {
+    cam_intf_parm_type_t entry_type;
+    size_t size;
+    size_t aligned_size;
+    char data[1];
+} parm_entry_type_new_t;
+
+typedef struct {
+    uint8_t first_flagged_entry;
+    parm_entry_type_t entry[CAM_INTF_PARM_MAX];
+} parm_buffer_t;
+
+typedef struct {
+    size_t num_entry;
+    size_t tot_rem_size;
+    size_t curr_size;
+    char entry[1];
+} parm_buffer_new_t;
+
+#ifdef  __cplusplus
+extern "C" {
+#endif
+void *POINTER_OF_PARAM(cam_intf_parm_type_t PARAM_ID,
+                    void *TABLE_PTR);
+#ifdef  __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/msm8974/QCamera2/stack/common/cam_list.h b/msm8974/QCamera2/stack/common/cam_list.h
new file mode 100644
index 0000000..7f88d86
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/cam_list.h
@@ -0,0 +1,85 @@
+/* Copyright (c) 2012-2015, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+/* This file is a slave copy from /vendor/qcom/propreitary/mm-cammera/common,
+ * Please do not modify it directly here. */
+
+#ifndef __CAMLIST_H
+#define __CAMLIST_H
+
+#include <stddef.h>
+#include <string.h>
+#include <stdlib.h>
+
+#define member_of(ptr, type, member) ({ \
+  const typeof(((type *)0)->member) *__mptr = (ptr); \
+  (type *)((char *)__mptr - offsetof(type,member));})
+
+struct cam_list {
+  struct cam_list *next, *prev;
+};
+
+static inline void cam_list_init(struct cam_list *ptr)
+{
+  ptr->next = ptr;
+  ptr->prev = ptr;
+}
+
+static inline void cam_list_add_tail_node(struct cam_list *item,
+  struct cam_list *head)
+{
+  struct cam_list *prev = head->prev;
+
+  head->prev = item;
+  item->next = head;
+  item->prev = prev;
+  prev->next = item;
+}
+
+static inline void cam_list_insert_before_node(struct cam_list *item,
+  struct cam_list *node)
+{
+  item->next = node;
+  item->prev = node->prev;
+  item->prev->next = item;
+  node->prev = item;
+}
+
+static inline void cam_list_del_node(struct cam_list *ptr)
+{
+  struct cam_list *prev = ptr->prev;
+  struct cam_list *next = ptr->next;
+
+  next->prev = ptr->prev;
+  prev->next = ptr->next;
+  ptr->next = ptr;
+  ptr->prev = ptr;
+}
+
+#endif /* __CAMLIST_H */
diff --git a/msm8974/QCamera2/stack/common/cam_queue.h b/msm8974/QCamera2/stack/common/cam_queue.h
new file mode 100644
index 0000000..a23c622
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/cam_queue.h
@@ -0,0 +1,130 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "cam_list.h"
+
+typedef struct {
+    struct cam_list list;
+    void *data;
+} cam_node_t;
+
+typedef struct {
+    cam_node_t head; /* dummy head */
+    uint32_t size;
+    pthread_mutex_t lock;
+} cam_queue_t;
+
+static inline int32_t cam_queue_init(cam_queue_t *queue)
+{
+    pthread_mutex_init(&queue->lock, NULL);
+    cam_list_init(&queue->head.list);
+    queue->size = 0;
+    return 0;
+}
+
+static inline int32_t cam_queue_enq(cam_queue_t *queue, void *data)
+{
+    cam_node_t *node =
+        (cam_node_t *)malloc(sizeof(cam_node_t));
+    if (NULL == node) {
+        return -1;
+    }
+
+    memset(node, 0, sizeof(cam_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_add_tail_node(&node->list, &queue->head.list);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+}
+
+static inline void *cam_queue_deq(cam_queue_t *queue)
+{
+    cam_node_t *node = NULL;
+    void *data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+static inline int32_t cam_queue_flush(cam_queue_t *queue)
+{
+    cam_node_t *node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        pos = pos->next;
+        cam_list_del_node(&node->list);
+        queue->size--;
+
+        /* TODO later to consider ptr inside data */
+        /* for now we only assume there is no ptr inside data
+         * so we free data directly */
+        if (NULL != node->data) {
+            free(node->data);
+        }
+        free(node);
+
+    }
+    queue->size = 0;
+    pthread_mutex_unlock(&queue->lock);
+    return 0;
+}
+
+static inline int32_t cam_queue_deinit(cam_queue_t *queue)
+{
+    cam_queue_flush(queue);
+    pthread_mutex_destroy(&queue->lock);
+    return 0;
+}
diff --git a/msm8974/QCamera2/stack/common/cam_semaphore.h b/msm8974/QCamera2/stack/common/cam_semaphore.h
new file mode 100644
index 0000000..a52f907
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/cam_semaphore.h
@@ -0,0 +1,85 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_SEMAPHORE_H__
+#define __QCAMERA_SEMAPHORE_H__
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Implement semaphore with mutex and conditional variable.
+ * Reason being, POSIX semaphore on Android are not used or
+ * well tested.
+ */
+
+typedef struct {
+    int val;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond;
+} cam_semaphore_t;
+
+static inline void cam_sem_init(cam_semaphore_t *s, int n)
+{
+    pthread_mutex_init(&(s->mutex), NULL);
+    pthread_cond_init(&(s->cond), NULL);
+    s->val = n;
+}
+
+static inline void cam_sem_post(cam_semaphore_t *s)
+{
+    pthread_mutex_lock(&(s->mutex));
+    s->val++;
+    pthread_cond_signal(&(s->cond));
+    pthread_mutex_unlock(&(s->mutex));
+}
+
+static inline int cam_sem_wait(cam_semaphore_t *s)
+{
+    int rc = 0;
+    pthread_mutex_lock(&(s->mutex));
+    while (s->val == 0)
+        rc = pthread_cond_wait(&(s->cond), &(s->mutex));
+    s->val--;
+    pthread_mutex_unlock(&(s->mutex));
+    return rc;
+}
+
+static inline void cam_sem_destroy(cam_semaphore_t *s)
+{
+    pthread_mutex_destroy(&(s->mutex));
+    pthread_cond_destroy(&(s->cond));
+    s->val = 0;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_SEMAPHORE_H__ */
diff --git a/msm8974/QCamera2/stack/common/cam_types.h b/msm8974/QCamera2/stack/common/cam_types.h
new file mode 100644
index 0000000..a1f72a6
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/cam_types.h
@@ -0,0 +1,1624 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_TYPES_H__
+#define __QCAMERA_TYPES_H__
+
+#include <stdint.h>
+#include <pthread.h>
+#include <inttypes.h>
+#include <media/msmb_camera.h>
+#include <stdlib.h>
+#include <string.h>
+
+#define CAM_MAX_NUM_BUFS_PER_STREAM  (24)
+#define MAX_METADATA_PAYLOAD_SIZE    (1024)
+#define AWB_DEBUG_DATA_SIZE          (7027)
+#define AEC_DEBUG_DATA_SIZE          (1720)
+#define AF_DEBUG_DATA_SIZE           (643)
+#define ASD_DEBUG_DATA_SIZE          (100)
+#define STATS_BUFFER_DEBUG_DATA_SIZE (74756)
+
+#define CEILING64(X) (((X) + 0x0003F) & 0xFFFFFFC0)
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X)  (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X)  (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ZOOMS_CNT 79
+#define MAX_SIZES_CNT 24
+#define MAX_EXP_BRACKETING_LENGTH 32
+#define MAX_ROI 5
+#define MAX_STREAM_NUM_IN_BUNDLE 4
+#define MAX_NUM_STREAMS          8
+#define CHROMATIX_SIZE 21292
+#define COMMONCHROMATIX_SIZE 42044
+#define AFTUNE_SIZE 4000  //sizeof(actuator_driver_params_t) + sizeof(af_algo_tune_parms_t)
+#define MAX_SCALE_SIZES_CNT 8
+#define MAX_SAMP_DECISION_CNT     64
+
+#define MAX_ISP_DATA_SIZE 11500
+#define MAX_PP_DATA_SIZE 2000
+#define MAX_AE_STATS_DATA_SIZE  1000
+#define MAX_AWB_STATS_DATA_SIZE 1000
+#define MAX_AF_STATS_DATA_SIZE  1000
+
+
+
+#define TUNING_DATA_VERSION        1
+#define TUNING_SENSOR_DATA_MAX     0x10000 /*(need value from sensor team)*/
+#define TUNING_VFE_DATA_MAX        0x10000 /*(need value from vfe team)*/
+#define TUNING_CPP_DATA_MAX        0x10000 /*(need value from pproc team)*/
+#define TUNING_CAC_DATA_MAX        0x10000 /*(need value from imglib team)*/
+#define TUNING_DATA_MAX            (TUNING_SENSOR_DATA_MAX + \
+                                   TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX + \
+                                   TUNING_CAC_DATA_MAX)
+
+#define TUNING_SENSOR_DATA_OFFSET  0
+#define TUNING_VFE_DATA_OFFSET     TUNING_SENSOR_DATA_MAX
+#define TUNING_CPP_DATA_OFFSET     (TUNING_SENSOR_DATA_MAX + TUNING_VFE_DATA_MAX)
+#define TUNING_CAC_DATA_OFFSET     (TUNING_SENSOR_DATA_MAX + \
+                                   TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX)
+#define MAX_STATS_DATA_SIZE 4000
+
+#define MAX_AF_BRACKETING_VALUES 5
+
+typedef enum {
+    CAM_HAL_V1 = 1,
+    CAM_HAL_V3 = 3
+} cam_hal_version_t;
+
+typedef enum {
+    CAM_STATUS_SUCCESS,       /* Operation Succeded */
+    CAM_STATUS_FAILED,        /* Failure in doing operation */
+    CAM_STATUS_INVALID_PARM,  /* Inavlid parameter provided */
+    CAM_STATUS_NOT_SUPPORTED, /* Parameter/operation not supported */
+    CAM_STATUS_ACCEPTED,      /* Parameter accepted */
+    CAM_STATUS_MAX,
+} cam_status_t;
+
+typedef enum {
+    CAM_POSITION_BACK,
+    CAM_POSITION_FRONT
+} cam_position_t;
+
+typedef enum {
+    CAM_FORMAT_JPEG = 0,
+    CAM_FORMAT_YUV_420_NV12 = 1,
+    CAM_FORMAT_YUV_420_NV21,
+    CAM_FORMAT_YUV_420_NV21_ADRENO,
+    CAM_FORMAT_YUV_420_YV12,
+    CAM_FORMAT_YUV_422_NV16,
+    CAM_FORMAT_YUV_422_NV61,
+    CAM_FORMAT_YUV_420_NV12_VENUS,
+
+    /* Please note below are the defintions for raw image.
+     * Any format other than raw image format should be declared
+     * before this line!!!!!!!!!!!!! */
+
+    /* Note: For all raw formats, each scanline needs to be 16 bytes aligned */
+
+    /* Packed YUV/YVU raw format, 16 bpp: 8 bits Y and 8 bits UV.
+     * U and V are interleaved with Y: YUYV or YVYV */
+    CAM_FORMAT_YUV_RAW_8BIT_YUYV,
+    CAM_FORMAT_YUV_RAW_8BIT_YVYU,
+    CAM_FORMAT_YUV_RAW_8BIT_UYVY,
+    CAM_FORMAT_YUV_RAW_8BIT_VYUY,
+
+    /* QCOM RAW formats where data is packed into 64bit word.
+     * 8BPP: 1 64-bit word contains 8 pixels p0 - p7, where p0 is
+     *       stored at LSB.
+     * 10BPP: 1 64-bit word contains 6 pixels p0 - p5, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB.
+     * 12BPP: 1 64-bit word contains 5 pixels p0 - p4, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB. */
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR,
+    /* MIPI RAW formats based on MIPI CSI-2 specifiction.
+     * 8BPP: Each pixel occupies one bytes, starting at LSB.
+     *       Output with of image has no restrictons.
+     * 10BPP: Four pixels are held in every 5 bytes. The output
+     *       with of image must be a multiple of 4 pixels.
+     * 12BPP: Two pixels are held in every 3 bytes. The output
+     *       width of image must be a multiple of 2 pixels. */
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR,
+    /* Ideal raw formats where image data has gone through black
+     * correction, lens rolloff, demux/channel gain, bad pixel
+     * correction, and ABF.
+     * Ideal raw formats could output any of QCOM_RAW and MIPI_RAW
+     * formats, plus plain8 8bbp, plain16 800, plain16 10bpp, and
+     * plain 16 12bpp */
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR,
+
+    /* generic 8-bit raw */
+    CAM_FORMAT_JPEG_RAW_8BIT,
+    CAM_FORMAT_META_RAW_8BIT,
+    CAM_FORMAT_META_RAW_10BIT,
+
+    CAM_FORMAT_MAX
+} cam_format_t;
+
+typedef enum {
+    /* applies to HAL 1 */
+    CAM_STREAM_TYPE_DEFAULT,       /* default stream type */
+    CAM_STREAM_TYPE_PREVIEW,       /* preview */
+    CAM_STREAM_TYPE_POSTVIEW,      /* postview */
+    CAM_STREAM_TYPE_SNAPSHOT,      /* snapshot */
+    CAM_STREAM_TYPE_VIDEO,         /* video */
+
+    /* applies to HAL 3 */
+    CAM_STREAM_TYPE_IMPL_DEFINED, /* opaque format: could be display, video enc, ZSL YUV */
+    CAM_STREAM_TYPE_YUV,          /* app requested callback stream type */
+
+    /* applies to both HAL 1 and HAL 3 */
+    CAM_STREAM_TYPE_METADATA,      /* meta data */
+    CAM_STREAM_TYPE_RAW,           /* raw dump from camif */
+    CAM_STREAM_TYPE_OFFLINE_PROC,  /* offline process */
+    CAM_STREAM_TYPE_MAX,
+} cam_stream_type_t;
+
+typedef enum {
+    CAM_PAD_NONE = 1,
+    CAM_PAD_TO_2 = 2,
+    CAM_PAD_TO_4 = 4,
+    CAM_PAD_TO_WORD = CAM_PAD_TO_4,
+    CAM_PAD_TO_8 = 8,
+    CAM_PAD_TO_16 = 16,
+    CAM_PAD_TO_32 = 32,
+    CAM_PAD_TO_64 = 64,
+    CAM_PAD_TO_1K = 1024,
+    CAM_PAD_TO_2K = 2048,
+    CAM_PAD_TO_4K = 4096,
+    CAM_PAD_TO_8K = 8192
+} cam_pad_format_t;
+
+typedef enum {
+    /* followings are per camera */
+    CAM_MAPPING_BUF_TYPE_CAPABILITY,  /* mapping camera capability buffer */
+    CAM_MAPPING_BUF_TYPE_PARM_BUF,    /* mapping parameters buffer */
+
+    /* followings are per stream */
+    CAM_MAPPING_BUF_TYPE_STREAM_BUF,        /* mapping stream buffers */
+    CAM_MAPPING_BUF_TYPE_STREAM_INFO,       /* mapping stream information buffer */
+    CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, /* mapping offline process input buffer */
+    CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,  /* mapping offline meta buffer */
+    CAM_MAPPING_BUF_TYPE_MAX
+} cam_mapping_buf_type;
+
+typedef struct {
+    cam_mapping_buf_type type;
+    uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
+    uint32_t frame_idx;   /* frame index: valid if type is STREAM_BUF */
+    int32_t plane_idx;    /* planner index. valid if type is STREAM_BUF.
+                           * -1 means all planners shanre the same fd;
+                           * otherwise, each planner has its own fd */
+    uint32_t cookie;      /* could be job_id(uint32_t) to identify mapping job */
+    int fd;               /* origin fd */
+    size_t size;          /* size of the buffer */
+} cam_buf_map_type;
+
+typedef struct {
+    cam_mapping_buf_type type;
+    uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
+    uint32_t frame_idx;   /* frame index: valid if STREAM_BUF or HIST_BUF */
+    int32_t plane_idx;    /* planner index. valid if type is STREAM_BUF.
+                           * -1 means all planners shanre the same fd;
+                           * otherwise, each planner has its own fd */
+    uint32_t cookie;      /* could be job_id(uint32_t) to identify unmapping job */
+} cam_buf_unmap_type;
+
+typedef enum {
+    CAM_MAPPING_TYPE_FD_MAPPING,
+    CAM_MAPPING_TYPE_FD_UNMAPPING,
+    CAM_MAPPING_TYPE_MAX
+} cam_mapping_type;
+
+typedef struct {
+    cam_mapping_type msg_type;
+    union {
+        cam_buf_map_type buf_map;
+        cam_buf_unmap_type buf_unmap;
+    } payload;
+} cam_sock_packet_t;
+
+typedef enum {
+    CAM_MODE_2D = (1<<0),
+    CAM_MODE_3D = (1<<1)
+} cam_mode_t;
+
+typedef struct {
+    uint32_t len;
+    uint32_t y_offset;
+    uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+    uint32_t len;
+    uint32_t offset;
+    int32_t offset_x;
+    int32_t offset_y;
+    int32_t stride;
+    int32_t scanline;
+    int32_t width;    /* width without padding */
+    int32_t height;   /* height without padding */
+} cam_mp_len_offset_t;
+
+typedef struct {
+    uint32_t width_padding;
+    uint32_t height_padding;
+    uint32_t plane_padding;
+} cam_padding_info_t;
+
+typedef struct {
+    uint32_t num_planes;
+    union {
+        cam_sp_len_offset_t sp;
+        cam_mp_len_offset_t mp[VIDEO_MAX_PLANES];
+    };
+    uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+    int32_t width;
+    int32_t height;
+} cam_dimension_t;
+
+typedef struct {
+    cam_frame_len_offset_t plane_info;
+} cam_stream_buf_plane_info_t;
+
+typedef struct {
+    float min_fps;
+    float max_fps;
+    float video_min_fps;
+    float video_max_fps;
+} cam_fps_range_t;
+
+
+typedef enum {
+    CAM_HFR_MODE_OFF,
+    CAM_HFR_MODE_60FPS,
+    CAM_HFR_MODE_90FPS,
+    CAM_HFR_MODE_120FPS,
+    CAM_HFR_MODE_150FPS,
+    CAM_HFR_MODE_MAX
+} cam_hfr_mode_t;
+
+typedef struct {
+    cam_hfr_mode_t mode;
+    cam_dimension_t dim;
+    uint8_t frame_skip;
+    uint8_t livesnapshot_sizes_tbl_cnt;                     /* livesnapshot sizes table size */
+    cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
+} cam_hfr_info_t;
+
+typedef enum {
+    CAM_WB_MODE_AUTO,
+    CAM_WB_MODE_CUSTOM,
+    CAM_WB_MODE_INCANDESCENT,
+    CAM_WB_MODE_FLUORESCENT,
+    CAM_WB_MODE_WARM_FLUORESCENT,
+    CAM_WB_MODE_DAYLIGHT,
+    CAM_WB_MODE_CLOUDY_DAYLIGHT,
+    CAM_WB_MODE_TWILIGHT,
+    CAM_WB_MODE_SHADE,
+    CAM_WB_MODE_CCT,
+    CAM_WB_MODE_OFF,
+    CAM_WB_MODE_MAX
+} cam_wb_mode_type;
+
+typedef enum {
+    CAM_ANTIBANDING_MODE_OFF,
+    CAM_ANTIBANDING_MODE_60HZ,
+    CAM_ANTIBANDING_MODE_50HZ,
+    CAM_ANTIBANDING_MODE_AUTO,
+    CAM_ANTIBANDING_MODE_AUTO_50HZ,
+    CAM_ANTIBANDING_MODE_AUTO_60HZ,
+    CAM_ANTIBANDING_MODE_MAX,
+} cam_antibanding_mode_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+    CAM_ISO_MODE_AUTO,
+    CAM_ISO_MODE_DEBLUR,
+    CAM_ISO_MODE_100,
+    CAM_ISO_MODE_200,
+    CAM_ISO_MODE_400,
+    CAM_ISO_MODE_800,
+    CAM_ISO_MODE_1600,
+    CAM_ISO_MODE_3200,
+    CAM_ISO_MODE_MAX
+} cam_iso_mode_type;
+
+typedef enum {
+    CAM_AEC_MODE_FRAME_AVERAGE,
+    CAM_AEC_MODE_CENTER_WEIGHTED,
+    CAM_AEC_MODE_SPOT_METERING,
+    CAM_AEC_MODE_SMART_METERING,
+    CAM_AEC_MODE_USER_METERING,
+    CAM_AEC_MODE_SPOT_METERING_ADV,
+    CAM_AEC_MODE_CENTER_WEIGHTED_ADV,
+    CAM_AEC_MODE_MAX
+} cam_auto_exposure_mode_type;
+
+typedef enum {
+    CAM_AE_MODE_OFF,
+    CAM_AE_MODE_ON,
+    CAM_AE_MODE_MAX
+} cam_ae_mode_type;
+
+typedef enum {
+    CAM_FOCUS_ALGO_AUTO,
+    CAM_FOCUS_ALGO_SPOT,
+    CAM_FOCUS_ALGO_CENTER_WEIGHTED,
+    CAM_FOCUS_ALGO_AVERAGE,
+    CAM_FOCUS_ALGO_MAX
+} cam_focus_algorithm_type;
+
+/* Auto focus mode */
+typedef enum {
+    CAM_FOCUS_MODE_AUTO,
+    CAM_FOCUS_MODE_INFINITY,
+    CAM_FOCUS_MODE_MACRO,
+    CAM_FOCUS_MODE_FIXED,
+    CAM_FOCUS_MODE_EDOF,
+    CAM_FOCUS_MODE_CONTINOUS_VIDEO,
+    CAM_FOCUS_MODE_CONTINOUS_PICTURE,
+    CAM_FOCUS_MODE_MANUAL,
+    CAM_FOCUS_MODE_MAX
+} cam_focus_mode_type;
+
+typedef enum {
+    CAM_MANUAL_FOCUS_MODE_INDEX,
+    CAM_MANUAL_FOCUS_MODE_DAC_CODE,
+    CAM_MANUAL_FOCUS_MODE_MAX
+} cam_manual_focus_mode_type;
+
+typedef struct {
+    cam_manual_focus_mode_type flag;
+    int32_t af_manual_lens_position;
+} cam_manual_focus_parm_t;
+
+typedef enum {
+    CAM_SCENE_MODE_OFF,
+    CAM_SCENE_MODE_AUTO,
+    CAM_SCENE_MODE_LANDSCAPE,
+    CAM_SCENE_MODE_SNOW,
+    CAM_SCENE_MODE_BEACH,
+    CAM_SCENE_MODE_SUNSET,
+    CAM_SCENE_MODE_NIGHT,
+    CAM_SCENE_MODE_PORTRAIT,
+    CAM_SCENE_MODE_BACKLIGHT,
+    CAM_SCENE_MODE_SPORTS,
+    CAM_SCENE_MODE_ANTISHAKE,
+    CAM_SCENE_MODE_FLOWERS,
+    CAM_SCENE_MODE_CANDLELIGHT,
+    CAM_SCENE_MODE_FIREWORKS,
+    CAM_SCENE_MODE_PARTY,
+    CAM_SCENE_MODE_NIGHT_PORTRAIT,
+    CAM_SCENE_MODE_THEATRE,
+    CAM_SCENE_MODE_ACTION,
+    CAM_SCENE_MODE_AR,
+    CAM_SCENE_MODE_FACE_PRIORITY,
+    CAM_SCENE_MODE_BARCODE,
+    CAM_SCENE_MODE_HDR,
+    CAM_SCENE_MODE_MAX
+} cam_scene_mode_type;
+
+typedef enum {
+    CAM_EFFECT_MODE_OFF,
+    CAM_EFFECT_MODE_MONO,
+    CAM_EFFECT_MODE_NEGATIVE,
+    CAM_EFFECT_MODE_SOLARIZE,
+    CAM_EFFECT_MODE_SEPIA,
+    CAM_EFFECT_MODE_POSTERIZE,
+    CAM_EFFECT_MODE_WHITEBOARD,
+    CAM_EFFECT_MODE_BLACKBOARD,
+    CAM_EFFECT_MODE_AQUA,
+    CAM_EFFECT_MODE_EMBOSS,
+    CAM_EFFECT_MODE_SKETCH,
+    CAM_EFFECT_MODE_NEON,
+    CAM_EFFECT_MODE_MAX
+} cam_effect_mode_type;
+
+typedef enum {
+    CAM_FLASH_MODE_OFF,
+    CAM_FLASH_MODE_AUTO,
+    CAM_FLASH_MODE_ON,
+    CAM_FLASH_MODE_TORCH,
+    CAM_FLASH_MODE_MAX
+} cam_flash_mode_t;
+
+typedef enum {
+    CAM_AEC_TRIGGER_IDLE,
+    CAM_AEC_TRIGGER_START
+} cam_aec_trigger_type_t;
+
+typedef enum {
+    CAM_AF_TRIGGER_IDLE,
+    CAM_AF_TRIGGER_START,
+    CAM_AF_TRIGGER_CANCEL
+} cam_af_trigger_type_t;
+
+typedef enum {
+    CAM_AE_STATE_INACTIVE,
+    CAM_AE_STATE_SEARCHING,
+    CAM_AE_STATE_CONVERGED,
+    CAM_AE_STATE_LOCKED,
+    CAM_AE_STATE_FLASH_REQUIRED,
+    CAM_AE_STATE_PRECAPTURE
+} cam_ae_state_t;
+
+typedef enum {
+  CAM_CDS_MODE_OFF,
+  CAM_CDS_MODE_ON,
+  CAM_CDS_MODE_AUTO,
+  CAM_CDS_MODE_MAX
+} cam_cds_mode_type_t;
+
+typedef struct  {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} cam_rect_t;
+
+typedef struct  {
+    cam_rect_t rect;
+    int32_t weight; /* weight of the area, valid for focusing/metering areas */
+} cam_area_t;
+
+typedef enum {
+    CAM_STREAMING_MODE_CONTINUOUS, /* continous streaming */
+    CAM_STREAMING_MODE_BURST,      /* burst streaming */
+    CAM_STREAMING_MODE_BATCH,      /* stream frames in batches */
+    CAM_STREAMING_MODE_MAX
+} cam_streaming_mode_t;
+
+typedef enum {
+    IS_TYPE_NONE,
+    IS_TYPE_DIS,
+    IS_TYPE_GA_DIS,
+    IS_TYPE_EIS_1_0,
+    IS_TYPE_EIS_2_0
+} cam_is_type_t;
+
+typedef enum {
+    SECURE,
+    NON_SECURE
+} cam_stream_secure_t;
+
+#define CAM_REPROCESS_MASK_TYPE_WNR (1<<0)
+
+/* event from server */
+typedef enum {
+    CAM_EVENT_TYPE_MAP_UNMAP_DONE        = (1<<0),
+    CAM_EVENT_TYPE_AUTO_FOCUS_DONE       = (1<<1),
+    CAM_EVENT_TYPE_ZOOM_DONE             = (1<<2),
+    CAM_EVENT_TYPE_REPROCESS_STAGE_DONE  = (1<<3),
+    CAM_EVENT_TYPE_DAEMON_DIED           = (1<<4),
+    CAM_EVENT_TYPE_INT_TAKE_PIC          = (1<<5),
+    CAM_EVENT_TYPE_MAX
+} cam_event_type_t;
+
+typedef enum {
+    CAM_EXP_BRACKETING_OFF,
+    CAM_EXP_BRACKETING_ON
+} cam_bracket_mode;
+
+typedef struct {
+    cam_bracket_mode mode;
+    char values[MAX_EXP_BRACKETING_LENGTH];  /* user defined values */
+} cam_exp_bracketing_t;
+
+typedef struct {
+  uint32_t num_frames;
+  cam_exp_bracketing_t exp_val;
+} cam_hdr_bracketing_info_t;
+
+typedef struct {
+    uint8_t chromatixData[CHROMATIX_SIZE];
+    uint8_t snapchromatixData[CHROMATIX_SIZE];
+    uint8_t common_chromatixData[COMMONCHROMATIX_SIZE];
+} tune_chromatix_t;
+
+typedef struct {
+    uint8_t af_tuneData[AFTUNE_SIZE];
+} tune_autofocus_t;
+
+typedef struct {
+    uint8_t stepsize;
+    uint8_t direction;
+    int32_t num_steps;
+    uint8_t ttype;
+} tune_actuator_t;
+
+typedef struct {
+    uint8_t module;
+    uint8_t type;
+    int32_t value;
+} tune_cmd_t;
+
+typedef enum {
+    CAM_AEC_ROI_OFF,
+    CAM_AEC_ROI_ON
+} cam_aec_roi_ctrl_t;
+
+typedef enum {
+    CAM_AEC_ROI_BY_INDEX,
+    CAM_AEC_ROI_BY_COORDINATE,
+} cam_aec_roi_type_t;
+
+typedef struct {
+    uint32_t x;
+    uint32_t y;
+} cam_coordinate_type_t;
+
+typedef struct {
+    int32_t numerator;
+    int32_t denominator;
+} cam_rational_type_t;
+
+typedef struct {
+    cam_aec_roi_ctrl_t aec_roi_enable;
+    cam_aec_roi_type_t aec_roi_type;
+    union {
+        cam_coordinate_type_t coordinate[MAX_ROI];
+        uint32_t aec_roi_idx[MAX_ROI];
+    } cam_aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+    uint32_t frm_id;
+    uint8_t num_roi;
+    cam_rect_t roi[MAX_ROI];
+    int32_t weight[MAX_ROI];
+    uint8_t is_multiwindow;
+} cam_roi_info_t;
+
+typedef enum {
+    CAM_WAVELET_DENOISE_YCBCR_PLANE,
+    CAM_WAVELET_DENOISE_CBCR_ONLY,
+    CAM_WAVELET_DENOISE_STREAMLINE_YCBCR,
+    CAM_WAVELET_DENOISE_STREAMLINED_CBCR
+} cam_denoise_process_type_t;
+
+typedef struct {
+    uint8_t denoise_enable;
+    cam_denoise_process_type_t process_plates;
+} cam_denoise_param_t;
+
+#define CAM_FACE_PROCESS_MASK_DETECTION    (1U<<0)
+#define CAM_FACE_PROCESS_MASK_RECOGNITION  (1U<<1)
+typedef struct {
+    uint32_t fd_mode;          /* mask of face process */
+    uint32_t num_fd;
+} cam_fd_set_parm_t;
+
+typedef enum {
+    QCAMERA_FD_PREVIEW,
+    QCAMERA_FD_SNAPSHOT
+}qcamera_face_detect_type_t;
+
+typedef struct {
+    int8_t face_id;            /* unique id for face tracking within view unless view changes */
+    int8_t score;              /* score of confidence (0, -100) */
+    cam_rect_t face_boundary;  /* boundary of face detected */
+    cam_coordinate_type_t left_eye_center;  /* coordinate of center of left eye */
+    cam_coordinate_type_t right_eye_center; /* coordinate of center of right eye */
+    cam_coordinate_type_t mouth_center;     /* coordinate of center of mouth */
+    uint8_t smile_degree;      /* smile degree (0, -100) */
+    uint8_t smile_confidence;  /* smile confidence (0, 100) */
+    uint8_t face_recognised;   /* if face is recognised */
+    int8_t gaze_angle;         /* -90 -45 0 45 90 for head left to rigth tilt */
+    int8_t updown_dir;         /* up down direction (-90, 90) */
+    int8_t leftright_dir;      /* left right direction (-90, 90) */
+    int8_t roll_dir;           /* roll direction (-90, 90) */
+    int8_t left_right_gaze;    /* left right gaze degree (-50, 50) */
+    int8_t top_bottom_gaze;    /* up down gaze degree (-50, 50) */
+    uint8_t blink_detected;    /* if blink is detected */
+    uint8_t left_blink;        /* left eye blink degeree (0, -100) */
+    uint8_t right_blink;       /* right eye blink degree (0, - 100) */
+} cam_face_detection_info_t;
+
+typedef struct {
+    uint32_t frame_id;                         /* frame index of which faces are detected */
+    uint8_t num_faces_detected;                /* number of faces detected */
+    cam_face_detection_info_t faces[MAX_ROI];  /* detailed information of faces detected */
+    qcamera_face_detect_type_t fd_type;        /* face detect for preview or snapshot frame*/
+    cam_dimension_t fd_frame_dim;              /* frame dims on which fd is applied */
+} cam_face_detection_data_t;
+
+#define CAM_HISTOGRAM_STATS_SIZE 256
+typedef struct {
+    uint32_t max_hist_value;
+    uint32_t hist_buf[CAM_HISTOGRAM_STATS_SIZE]; /* buf holding histogram stats data */
+} cam_histogram_data_t;
+
+typedef struct {
+    cam_histogram_data_t r_stats;
+    cam_histogram_data_t b_stats;
+    cam_histogram_data_t gr_stats;
+    cam_histogram_data_t gb_stats;
+} cam_bayer_hist_stats_t;
+
+typedef enum {
+    CAM_HISTOGRAM_TYPE_BAYER,
+    CAM_HISTOGRAM_TYPE_YUV
+} cam_histogram_type_t;
+
+typedef struct {
+    cam_histogram_type_t type;
+    union {
+        cam_bayer_hist_stats_t bayer_stats;
+        cam_histogram_data_t yuv_stats;
+    };
+} cam_hist_stats_t;
+
+enum cam_focus_distance_index{
+  CAM_FOCUS_DISTANCE_NEAR_INDEX,  /* 0 */
+  CAM_FOCUS_DISTANCE_OPTIMAL_INDEX,
+  CAM_FOCUS_DISTANCE_FAR_INDEX,
+  CAM_FOCUS_DISTANCE_MAX_INDEX
+};
+
+typedef struct {
+  float focus_distance[CAM_FOCUS_DISTANCE_MAX_INDEX];
+} cam_focus_distances_info_t;
+
+/* Different autofocus cycle when calling do_autoFocus
+ * CAM_AF_COMPLETE_EXISTING_SWEEP: Complete existing sweep
+ * if one is ongoing, and lock.
+ * CAM_AF_DO_ONE_FULL_SWEEP: Do one full sweep, regardless
+ * of the current state, and lock.
+ * CAM_AF_START_CONTINUOUS_SWEEP: Start continous sweep.
+ * After do_autoFocus, HAL receives an event: CAM_AF_FOCUSED,
+ * or CAM_AF_NOT_FOCUSED.
+ * cancel_autoFocus stops any lens movement.
+ * Each do_autoFocus call only produces 1 FOCUSED/NOT_FOCUSED
+ * event, not both.
+ */
+typedef enum {
+    CAM_AF_COMPLETE_EXISTING_SWEEP,
+    CAM_AF_DO_ONE_FULL_SWEEP,
+    CAM_AF_START_CONTINUOUS_SWEEP
+} cam_autofocus_cycle_t;
+
+typedef enum {
+    CAM_AF_SCANNING,
+    CAM_AF_FOCUSED,
+    CAM_AF_NOT_FOCUSED,
+    CAM_AF_INACTIVE,
+    CAM_AF_PASSIVE_SCANNING,
+    CAM_AF_PASSIVE_FOCUSED,
+    CAM_AF_PASSIVE_UNFOCUSED,
+} cam_autofocus_state_t;
+
+typedef struct {
+    cam_autofocus_state_t focus_state;           /* state of focus */
+    cam_focus_distances_info_t focus_dist;       /* focus distance */
+    int32_t focus_pos;
+    uint32_t focused_frame_idx;
+} cam_auto_focus_data_t;
+
+typedef struct {
+  uint32_t is_hdr_scene;
+  float    hdr_confidence;
+} cam_asd_hdr_scene_data_t;
+
+typedef struct {
+    uint32_t stream_id;
+    cam_rect_t crop;
+} cam_stream_crop_info_t;
+
+typedef struct {
+    uint8_t num_of_streams;
+    cam_stream_crop_info_t crop_info[MAX_NUM_STREAMS];
+} cam_crop_data_t;
+
+typedef enum {
+    DO_NOT_NEED_FUTURE_FRAME,
+    NEED_FUTURE_FRAME,
+} cam_prep_snapshot_state_t;
+
+typedef struct {
+    uint32_t min_frame_idx;
+    uint32_t max_frame_idx;
+} cam_frame_idx_range_t;
+
+typedef enum {
+  S_NORMAL = 0,
+  S_SCENERY,
+  S_PORTRAIT,
+  S_PORTRAIT_BACKLIGHT,
+  S_SCENERY_BACKLIGHT,
+  S_BACKLIGHT,
+  S_MAX,
+} cam_auto_scene_t;
+
+typedef struct {
+   uint32_t meta_frame_id;
+} cam_meta_valid_t;
+
+typedef enum {
+    CAM_SENSOR_RAW,
+    CAM_SENSOR_YUV
+} cam_sensor_t;
+
+typedef struct {
+    cam_flash_mode_t flash_mode;
+    cam_sensor_t sens_type;
+    float aperture_value;
+} cam_sensor_params_t;
+
+typedef struct {
+    float exp_time;
+    float real_gain;
+    int32_t iso_value;
+    uint32_t flash_needed;
+    uint32_t settled;
+    uint32_t exp_index;
+    uint32_t line_count;
+} cam_ae_params_t;
+
+typedef struct {
+    int32_t cct_value;
+    int32_t decision;
+} cam_awb_params_t;
+
+typedef struct {
+    int32_t aec_debug_data_size;
+    char aec_private_debug_data[AEC_DEBUG_DATA_SIZE];
+} cam_ae_exif_debug_t;
+
+typedef struct {
+    int32_t awb_debug_data_size;
+    char awb_private_debug_data[AWB_DEBUG_DATA_SIZE];
+} cam_awb_exif_debug_t;
+
+typedef struct {
+    int32_t af_debug_data_size;
+    char af_private_debug_data[AF_DEBUG_DATA_SIZE];
+} cam_af_exif_debug_t;
+
+typedef struct {
+    int32_t asd_debug_data_size;
+    char asd_private_debug_data[ASD_DEBUG_DATA_SIZE];
+} cam_asd_exif_debug_t;
+
+typedef struct {
+    int32_t bg_stats_buffer_size;
+    int32_t bhist_stats_buffer_size;
+    char stats_buffer_private_debug_data[STATS_BUFFER_DEBUG_DATA_SIZE];
+} cam_stats_buffer_exif_debug_t;
+
+typedef struct {
+    uint32_t tuning_data_version;
+    size_t tuning_sensor_data_size;
+    size_t tuning_vfe_data_size;
+    size_t tuning_cpp_data_size;
+    size_t tuning_cac_data_size;
+    uint8_t  data[TUNING_DATA_MAX];
+}tuning_params_t;
+
+typedef struct {
+    cam_dimension_t dim;
+    size_t size;
+    char path[50];
+} cam_int_evt_params_t;
+
+typedef struct {
+  uint8_t private_mobicat_af_data[MAX_AF_STATS_DATA_SIZE];
+} cam_chromatix_mobicat_af_t;
+
+typedef struct {
+  uint8_t private_isp_data[MAX_ISP_DATA_SIZE];
+} cam_chromatix_lite_isp_t;
+
+typedef struct {
+  uint8_t private_pp_data[MAX_PP_DATA_SIZE];
+} cam_chromatix_lite_pp_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_AE_STATS_DATA_SIZE];
+} cam_chromatix_lite_ae_stats_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_AWB_STATS_DATA_SIZE];
+} cam_chromatix_lite_awb_stats_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_AF_STATS_DATA_SIZE];
+} cam_chromatix_lite_af_stats_t;
+
+typedef  struct {
+    uint8_t is_stats_valid;               /* if histgram data is valid */
+    cam_hist_stats_t stats_data;          /* histogram data */
+
+    uint8_t is_faces_valid;               /* if face detection data is valid */
+    cam_face_detection_data_t faces_data; /* face detection result */
+
+    uint8_t is_focus_valid;               /* if focus data is valid */
+    cam_auto_focus_data_t focus_data;     /* focus data */
+
+    uint8_t is_crop_valid;                /* if crop data is valid */
+    cam_crop_data_t crop_data;            /* crop data */
+
+    uint8_t is_prep_snapshot_done_valid;  /* if prep snapshot done is valid */
+    cam_prep_snapshot_state_t prep_snapshot_done_state;  /* prepare snapshot done state */
+
+    /* if good frame idx range is valid */
+    uint8_t is_good_frame_idx_range_valid;
+    /* good frame idx range, make sure:
+     * 1. good_frame_idx_range.min_frame_idx > current_frame_idx
+     * 2. good_frame_idx_range.min_frame_idx - current_frame_idx < 100 */
+    cam_frame_idx_range_t good_frame_idx_range;
+
+    uint32_t is_hdr_scene_data_valid;
+    cam_asd_hdr_scene_data_t hdr_scene_data;
+    uint8_t is_asd_decision_valid;
+    cam_auto_scene_t scene; //scene type as decided by ASD
+
+    char private_metadata[MAX_METADATA_PAYLOAD_SIZE];
+
+    /* AE parameters */
+    uint8_t is_ae_params_valid;
+    cam_ae_params_t ae_params;
+
+    /* AWB parameters */
+    uint8_t is_awb_params_valid;
+    cam_awb_params_t awb_params;
+
+    /* AE exif debug parameters */
+    uint8_t is_ae_exif_debug_valid;
+    cam_ae_exif_debug_t ae_exif_debug_params;
+
+    /* AWB exif debug parameters */
+    uint8_t is_awb_exif_debug_valid;
+    cam_awb_exif_debug_t awb_exif_debug_params;
+
+    /* AF exif debug parameters */
+    uint8_t is_af_exif_debug_valid;
+    cam_af_exif_debug_t af_exif_debug_params;
+
+    /* ASD exif debug parameters */
+    uint8_t is_asd_exif_debug_valid;
+    cam_asd_exif_debug_t asd_exif_debug_params;
+
+    /* Stats buffer exif debug parameters */
+    uint8_t is_stats_buffer_exif_debug_valid;
+    cam_stats_buffer_exif_debug_t stats_buffer_exif_debug_params;
+
+    /* sensor parameters */
+    uint8_t is_sensor_params_valid;
+    cam_sensor_params_t sensor_params;
+
+    /* Meta valid params */
+    uint8_t is_meta_valid;
+    cam_meta_valid_t meta_valid_params;
+
+    /* Meta valid params */
+    uint8_t is_preview_frame_skip_valid;
+    cam_frame_idx_range_t preview_frame_skip_idx_range;
+
+    /*Tuning Data*/
+    uint8_t is_tuning_params_valid;
+    tuning_params_t tuning_params;
+
+    uint8_t is_chromatix_mobicat_af_valid;
+    cam_chromatix_mobicat_af_t chromatix_mobicat_af_data;
+
+    uint8_t is_chromatix_lite_isp_valid;
+    cam_chromatix_lite_isp_t chromatix_lite_isp_data;
+
+    uint8_t is_chromatix_lite_pp_valid;
+    cam_chromatix_lite_pp_t chromatix_lite_pp_data;
+
+    uint8_t is_chromatix_lite_ae_stats_valid;
+    cam_chromatix_lite_ae_stats_t chromatix_lite_ae_stats_data;
+
+    uint8_t is_chromatix_lite_awb_stats_valid;
+    cam_chromatix_lite_awb_stats_t chromatix_lite_awb_stats_data;
+
+    uint8_t is_chromatix_lite_af_stats_valid;
+    cam_chromatix_lite_af_stats_t chromatix_lite_af_stats_data;
+
+    /* 3A mobicat debug params */
+    uint8_t is_mobicat_ae_params_valid;
+    cam_ae_exif_debug_t mobicat_ae_data;
+
+    uint8_t is_mobicat_awb_params_valid;
+    cam_awb_exif_debug_t mobicat_awb_data;
+
+    uint8_t is_mobicat_af_params_valid;
+    cam_af_exif_debug_t mobicat_af_data;
+
+    uint8_t is_mobicat_asd_params_valid;
+    cam_asd_exif_debug_t mobicat_asd_data;
+
+    uint8_t is_mobicat_stats_params_valid;
+    cam_stats_buffer_exif_debug_t mobicat_stats_buffer_data;
+} cam_metadata_info_t;
+
+typedef enum {
+    CAM_INTF_PARM_HAL_VERSION,
+    /* common between HAL1 and HAL3 */
+    CAM_INTF_PARM_ANTIBANDING,
+    CAM_INTF_PARM_EXPOSURE_COMPENSATION,
+    CAM_INTF_PARM_AEC_LOCK,
+    CAM_INTF_PARM_FPS_RANGE,
+    CAM_INTF_PARM_AWB_LOCK,
+    CAM_INTF_PARM_WHITE_BALANCE,
+    CAM_INTF_PARM_EFFECT,
+    CAM_INTF_PARM_BESTSHOT_MODE,
+    CAM_INTF_PARM_DIS_ENABLE,
+    CAM_INTF_PARM_LED_MODE,
+    CAM_INTF_META_HISTOGRAM, /* 10 */
+    CAM_INTF_META_FACE_DETECTION,
+    CAM_INTF_META_AUTOFOCUS_DATA,
+
+    /* specific to HAl1 */
+    CAM_INTF_PARM_QUERY_FLASH4SNAP,
+    CAM_INTF_PARM_EXPOSURE,
+    CAM_INTF_PARM_SHARPNESS,
+    CAM_INTF_PARM_CONTRAST,
+    CAM_INTF_PARM_SATURATION,
+    CAM_INTF_PARM_BRIGHTNESS,
+    CAM_INTF_PARM_ISO,
+    CAM_INTF_PARM_EXPOSURE_TIME,
+    CAM_INTF_PARM_ZOOM, /* 20 */
+    CAM_INTF_PARM_ROLLOFF,
+    CAM_INTF_PARM_MODE,             /* camera mode */
+    CAM_INTF_PARM_AEC_ALGO_TYPE,    /* auto exposure algorithm */
+    CAM_INTF_PARM_FOCUS_ALGO_TYPE,  /* focus algorithm */
+    CAM_INTF_PARM_AEC_ROI,
+    CAM_INTF_PARM_AF_ROI,
+    CAM_INTF_PARM_FOCUS_MODE,
+    CAM_INTF_PARM_MANUAL_FOCUS_POS,
+    CAM_INTF_PARM_SCE_FACTOR,
+    CAM_INTF_PARM_FD,
+    CAM_INTF_PARM_MCE, /* 30 */
+    CAM_INTF_PARM_HFR,
+    CAM_INTF_PARM_REDEYE_REDUCTION,
+    CAM_INTF_PARM_WAVELET_DENOISE,
+    CAM_INTF_PARM_HISTOGRAM,
+    CAM_INTF_PARM_ASD_ENABLE,
+    CAM_INTF_PARM_RECORDING_HINT,
+    CAM_INTF_PARM_HDR,
+    CAM_INTF_PARM_MAX_DIMENSION,
+    CAM_INTF_PARM_RAW_DIMENSION,
+    CAM_INTF_PARM_FRAMESKIP,
+    CAM_INTF_PARM_ZSL_MODE,  /* indicating if it's running in ZSL mode */
+    CAM_INTF_PARM_HDR_NEED_1X, /* if HDR needs 1x output */ /* 40 */
+    CAM_INTF_PARM_VIDEO_HDR,
+    CAM_INTF_PARM_SENSOR_HDR,
+    CAM_INTF_PARM_ROTATION,
+    CAM_INTF_PARM_SCALE,
+    CAM_INTF_PARM_VT, /* indicating if it's a Video Call Apllication */
+    CAM_INTF_META_CROP_DATA,
+    CAM_INTF_META_PREP_SNAPSHOT_DONE,
+    CAM_INTF_META_GOOD_FRAME_IDX_RANGE,
+    CAM_INTF_PARM_GET_CHROMATIX,
+    CAM_INTF_PARM_SET_RELOAD_CHROMATIX,
+    CAM_INTF_PARM_SET_AUTOFOCUSTUNING,
+    CAM_INTF_PARM_GET_AFTUNE,
+    CAM_INTF_PARM_SET_RELOAD_AFTUNE,
+    CAM_INTF_PARM_SET_VFE_COMMAND,
+    CAM_INTF_PARM_SET_PP_COMMAND,
+    CAM_INTF_PARM_TINTLESS,
+    CAM_INTF_PARM_CDS_MODE,
+    CAM_INTF_PARM_LONGSHOT_ENABLE,
+
+    /* stream based parameters */
+    CAM_INTF_PARM_DO_REPROCESS,
+    CAM_INTF_PARM_SET_BUNDLE,
+    CAM_INTF_PARM_STREAM_FLIP,
+    CAM_INTF_PARM_GET_OUTPUT_CROP,
+
+    CAM_INTF_PARM_AF_MOBICAT_CMD,
+    CAM_INTF_PARM_EZTUNE_CMD,
+    CAM_INTF_PARM_INT_EVT,
+
+    /* specific to HAL3 */
+    /* Whether the metadata maps to a valid frame number */
+    CAM_INTF_META_FRAME_NUMBER_VALID,
+    /* COLOR CORRECTION.*/
+    CAM_INTF_META_COLOR_CORRECT_MODE,
+    /* A transform matrix to chromatically adapt pixels in the CIE XYZ (1931)
+     * color space from the scene illuminant to the sRGB-standard D65-illuminant. */
+    CAM_INTF_META_COLOR_CORRECT_TRANSFORM, /* 50 */
+    /* CONTROL */
+//    CAM_INTF_META_REQUEST_ID,
+    /* A frame counter set by the framework. Must be maintained unchanged in
+     * output frame. */
+    CAM_INTF_META_FRAME_NUMBER,
+    /* Whether AE is currently updating the sensor exposure and sensitivity
+     * fields */
+    CAM_INTF_META_AEC_MODE,
+    /* List of areas to use for metering */
+    CAM_INTF_META_AEC_ROI,
+    /* Whether the HAL must trigger precapture metering.*/
+    CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
+    /* The ID sent with the latest CAMERA2_TRIGGER_PRECAPTURE_METERING call */
+    CAM_INTF_META_AEC_PRECAPTURE_ID,
+    /* Current state of AE algorithm */
+    CAM_INTF_META_AEC_STATE,
+    /* List of areas to use for focus estimation */
+    CAM_INTF_META_AF_ROI,
+    /* Whether the HAL must trigger autofocus. */
+    CAM_INTF_META_AF_TRIGGER,
+    /* Current state of AF algorithm */
+    CAM_INTF_META_AF_STATE,
+    /* The ID sent with the latest CAMERA2_TRIGGER_AUTOFOCUS call */
+    CAM_INTF_META_AF_TRIGGER_ID,
+    /* List of areas to use for illuminant estimation */
+    CAM_INTF_META_AWB_REGIONS,
+    /* Current state of AWB algorithm */
+    CAM_INTF_META_AWB_STATE,
+    /* Information to 3A routines about the purpose of this capture, to help
+     * decide optimal 3A strategy */
+    CAM_INTF_META_CAPTURE_INTENT,
+    /* Overall mode of 3A control routines. We need to have this parameter
+     * because not all android.control.* have an OFF option, for example,
+     * AE_FPS_Range, aePrecaptureTrigger */
+    CAM_INTF_META_MODE,
+    /* DEMOSAIC */
+    /* Controls the quality of the demosaicing processing */
+    CAM_INTF_META_DEMOSAIC,
+    /* EDGE */
+    /* Operation mode for edge enhancement */
+    CAM_INTF_META_EDGE,
+    /* Control the amount of edge enhancement applied to the images.*/
+    /* 1-10; 10 is maximum sharpening */
+    CAM_INTF_META_SHARPNESS_STRENGTH,
+    /* FLASH */
+    /* Power for flash firing/torch, 10 is max power; 0 is no flash. Linear */
+    CAM_INTF_META_FLASH_POWER,
+    /* Firing time of flash relative to start of exposure, in nanoseconds*/
+    CAM_INTF_META_FLASH_FIRING_TIME,
+    /* Current state of the flash unit */
+    CAM_INTF_META_FLASH_STATE,
+    /* GEOMETRIC */
+    /* Operating mode of geometric correction */
+    CAM_INTF_META_GEOMETRIC_MODE,
+    /* Control the amount of shading correction applied to the images */
+    CAM_INTF_META_GEOMETRIC_STRENGTH,
+    /* HOT PIXEL */
+    /* Set operational mode for hot pixel correction */
+    CAM_INTF_META_HOTPIXEL_MODE,
+    /* LENS */
+    /* Size of the lens aperture */
+    CAM_INTF_META_LENS_APERTURE,
+    /* State of lens neutral density filter(s) */
+    CAM_INTF_META_LENS_FILTERDENSITY,
+    /* Lens optical zoom setting */
+    CAM_INTF_META_LENS_FOCAL_LENGTH,
+    /* Distance to plane of sharpest focus, measured from frontmost surface
+     * of the lens */
+    CAM_INTF_META_LENS_FOCUS_DISTANCE,
+    /* The range of scene distances that are in sharp focus (depth of field) */
+    CAM_INTF_META_LENS_FOCUS_RANGE,
+    /* Whether optical image stabilization is enabled. */
+    CAM_INTF_META_LENS_OPT_STAB_MODE,
+    /* Current lens status */
+    CAM_INTF_META_LENS_STATE,
+    /* NOISE REDUCTION */
+    /* Mode of operation for the noise reduction algorithm */
+    CAM_INTF_META_NOISE_REDUCTION_MODE,
+   /* Control the amount of noise reduction applied to the images.
+    * 1-10; 10 is max noise reduction */
+    CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
+    /* SCALER */
+    /* Top-left corner and width of the output region to select from the active
+     * pixel array */
+    CAM_INTF_META_SCALER_CROP_REGION,
+    /* SENSOR */
+    /* Duration each pixel is exposed to light, in nanoseconds */
+    CAM_INTF_META_SENSOR_EXPOSURE_TIME,
+    /* Duration from start of frame exposure to start of next frame exposure,
+     * in nanoseconds */
+    CAM_INTF_META_SENSOR_FRAME_DURATION,
+    /* Gain applied to image data. Must be implemented through analog gain only
+     * if set to values below 'maximum analog sensitivity'. */
+    CAM_INTF_META_SENSOR_SENSITIVITY,
+    /* Time at start of exposure of first row */
+    CAM_INTF_META_SENSOR_TIMESTAMP,
+    /* SHADING */
+    /* Quality of lens shading correction applied to the image data */
+    CAM_INTF_META_SHADING_MODE,
+    /* Control the amount of shading correction applied to the images.
+     * unitless: 1-10; 10 is full shading compensation */
+    CAM_INTF_META_SHADING_STRENGTH,
+    /* STATISTICS */
+    /* State of the face detector unit */
+    CAM_INTF_META_STATS_FACEDETECT_MODE,
+    /* Operating mode for histogram generation */
+    CAM_INTF_META_STATS_HISTOGRAM_MODE,
+    /* Operating mode for sharpness map generation */
+    CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
+    /* A 3-channel sharpness map, based on the raw sensor data,
+     * If only a monochrome sharpness map is supported, all channels
+     * should have the same data
+     */
+    CAM_INTF_META_STATS_SHARPNESS_MAP,
+
+    /* TONEMAP */
+    /* Table mapping blue input values to output values */
+    CAM_INTF_META_TONEMAP_CURVE_BLUE,
+    /* Table mapping green input values to output values */
+    CAM_INTF_META_TONEMAP_CURVE_GREEN,
+    /* Table mapping red input values to output values */
+    CAM_INTF_META_TONEMAP_CURVE_RED,
+    /* Tone map mode */
+    CAM_INTF_META_TONEMAP_MODE,
+    CAM_INTF_META_FLASH_MODE,
+    CAM_INTF_META_ASD_HDR_SCENE_DATA,
+    CAM_INTF_META_PRIVATE_DATA,
+    CAM_INTF_PARM_STATS_DEBUG_MASK,
+    CAM_INTF_PARM_ISP_DEBUG_MASK,
+    CAM_INTF_PARM_ALGO_OPTIMIZATIONS_MASK,
+    CAM_INTF_PARM_SENSOR_DEBUG_MASK,
+    /* Indicates streams ID of all the requested buffers */
+    CAM_INTF_META_STREAM_ID,
+    CAM_INTF_PARM_FOCUS_BRACKETING,
+    CAM_INTF_PARM_MULTI_TOUCH_FOCUS_BRACKETING,
+    CAM_INTF_PARM_FLASH_BRACKETING,
+    CAM_INTF_PARM_GET_IMG_PROP,
+
+    CAM_INTF_PARM_MAX
+} cam_intf_parm_type_t;
+
+typedef struct {
+    uint32_t forced;
+    union {
+      uint32_t force_linecount_value;
+      float    force_gain_value;
+      float    force_snap_exp_value;
+      float    force_exp_value;
+      uint32_t force_snap_linecount_value;
+      float    force_snap_gain_value;
+    } u;
+} cam_ez_force_params_t;
+
+typedef enum {
+    CAM_EZTUNE_CMD_STATUS,
+    CAM_EZTUNE_CMD_AEC_ENABLE,
+    CAM_EZTUNE_CMD_AWB_ENABLE,
+    CAM_EZTUNE_CMD_AF_ENABLE,
+    CAM_EZTUNE_CMD_AEC_FORCE_LINECOUNT,
+    CAM_EZTUNE_CMD_AEC_FORCE_GAIN,
+    CAM_EZTUNE_CMD_AEC_FORCE_EXP,
+    CAM_EZTUNE_CMD_AEC_FORCE_SNAP_LC,
+    CAM_EZTUNE_CMD_AEC_FORCE_SNAP_GAIN,
+    CAM_EZTUNE_CMD_AEC_FORCE_SNAP_EXP,
+    CAM_EZTUNE_CMD_AWB_MODE,
+} cam_eztune_cmd_type_t;
+
+typedef struct {
+  cam_eztune_cmd_type_t   cmd;
+  union {
+    int32_t running;
+    int32_t aec_enable;
+    int32_t awb_enable;
+    int32_t af_enable;
+    cam_ez_force_params_t ez_force_param;
+    int32_t awb_mode;
+  } u;
+} cam_eztune_cmd_data_t;
+
+
+/*****************************************************************************
+ *                 Code for HAL3 data types                                  *
+ ****************************************************************************/
+typedef enum {
+    CAM_INTF_METADATA_MAX
+} cam_intf_metadata_type_t;
+
+typedef enum {
+    CAM_INTENT_CUSTOM,
+    CAM_INTENT_PREVIEW,
+    CAM_INTENT_STILL_CAPTURE,
+    CAM_INTENT_VIDEO_RECORD,
+    CAM_INTENT_VIDEO_SNAPSHOT,
+    CAM_INTENT_ZERO_SHUTTER_LAG,
+    CAM_INTENT_MAX,
+} cam_intent_t;
+
+typedef enum {
+    /* Full application control of pipeline. All 3A routines are disabled,
+     * no other settings in android.control.* have any effect */
+    CAM_CONTROL_OFF,
+    /* Use settings for each individual 3A routine. Manual control of capture
+     * parameters is disabled. All controls in android.control.* besides sceneMode
+     * take effect */
+    CAM_CONTROL_AUTO,
+    /* Use specific scene mode. Enabling this disables control.aeMode,
+     * control.awbMode and control.afMode controls; the HAL must ignore those
+     * settings while USE_SCENE_MODE is active (except for FACE_PRIORITY scene mode).
+     * Other control entries are still active. This setting can only be used if
+     * availableSceneModes != UNSUPPORTED. TODO: Should we remove this and handle this
+     * in HAL ?*/
+    CAM_CONTROL_USE_SCENE_MODE,
+    CAM_CONTROL_MAX
+} cam_control_mode_t;
+
+typedef enum {
+    /* Use the android.colorCorrection.transform matrix to do color conversion */
+    CAM_COLOR_CORRECTION_TRANSFORM_MATRIX,
+    /* Must not slow down frame rate relative to raw bayer output */
+    CAM_COLOR_CORRECTION_FAST,
+    /* Frame rate may be reduced by high quality */
+    CAM_COLOR_CORRECTION_HIGH_QUALITY,
+} cam_color_correct_mode_t;
+
+typedef struct {
+    /* 3x3 float matrix in row-major order. each element is in range of (0, 1) */
+    float transform[3][3];
+} cam_color_correct_matrix_t;
+
+#define CAM_FOCAL_LENGTHS_MAX     1
+#define CAM_APERTURES_MAX         1
+#define CAM_FILTER_DENSITIES_MAX  1
+#define CAM_MAX_MAP_HEIGHT        6
+#define CAM_MAX_MAP_WIDTH         6
+
+#define CAM_MAX_TONEMAP_CURVE_SIZE    128
+
+typedef struct {
+    size_t tonemap_points_cnt;
+
+    /* A 1D array of pairs of floats.
+     * Mapping a 0-1 input range to a 0-1 output range.
+     * The input range must be monotonically increasing with N,
+     * and values between entries should be linearly interpolated.
+     * For example, if the array is: [0.0, 0.0, 0.3, 0.5, 1.0, 1.0],
+     * then the input->output mapping for a few sample points would be:
+     * 0 -> 0, 0.15 -> 0.25, 0.3 -> 0.5, 0.5 -> 0.64 */
+    float tonemap_points[CAM_MAX_TONEMAP_CURVE_SIZE][2];
+} cam_tonemap_curve_t;
+
+typedef enum {
+    OFF,
+    FAST,
+    QUALITY,
+} cam_quality_preference_t;
+
+typedef enum {
+    CAM_FLASH_CTRL_OFF,
+    CAM_FLASH_CTRL_SINGLE,
+    CAM_FLASH_CTRL_TORCH
+} cam_flash_ctrl_t;
+
+typedef struct {
+    uint8_t ae_mode;
+    uint8_t awb_mode;
+    uint8_t af_mode;
+} cam_scene_mode_overrides_t;
+
+typedef struct {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} cam_crop_region_t;
+
+typedef struct {
+    /* Estimated sharpness for each region of the input image.
+     * Normalized to be between 0 and maxSharpnessMapValue.
+     * Higher values mean sharper (better focused) */
+    int32_t sharpness[CAM_MAX_MAP_WIDTH][CAM_MAX_MAP_HEIGHT];
+} cam_sharpness_map_t;
+
+typedef struct {
+    int32_t min_value;
+    int32_t max_value;
+    int32_t def_value;
+    int32_t step;
+} cam_control_range_t;
+
+#define CAM_QCOM_FEATURE_FACE_DETECTION (1U<<0)
+#define CAM_QCOM_FEATURE_DENOISE2D      (1U<<1)
+#define CAM_QCOM_FEATURE_CROP           (1U<<2)
+#define CAM_QCOM_FEATURE_CPP            (1U<<3)
+#define CAM_QCOM_FEATURE_FLIP           (1U<<4)
+#define CAM_QCOM_FEATURE_HDR            (1U<<5)
+#define CAM_QCOM_FEATURE_REGISTER_FACE  (1U<<6)
+#define CAM_QCOM_FEATURE_SHARPNESS      (1U<<7)
+#define CAM_QCOM_FEATURE_VIDEO_HDR      (1U<<8)
+#define CAM_QCOM_FEATURE_CAC            (1U<<9)
+#define CAM_QCOM_FEATURE_SCALE          (1U<<10)
+#define CAM_QCOM_FEATURE_EFFECT         (1U<<11)
+#define CAM_QCOM_FEATURE_UBIFOCUS       (1U<<12)
+#define CAM_QCOM_FEATURE_CHROMA_FLASH   (1U<<13)
+#define CAM_QCOM_FEATURE_OPTIZOOM       (1U<<14)
+#define CAM_QCOM_FEATURE_LLVD           (1U<<15)
+#define CAM_QCOM_FEATURE_DIS20          (1U<<16)
+#define CAM_QCOM_FEATURE_SENSOR_HDR     (1U<<17)
+#define CAM_QCOM_FEATURE_TRUEPORTRAIT   (1U<<18)
+#define CAM_QCOM_FEATURE_MULTI_TOUCH_FOCUS (1U<<19)
+#define CAM_QCOM_FEATURE_FSSR           (1U<<20)
+
+
+// Debug mask
+#define HAL_DEBUG_MASK_HAL                 (1U<<0)
+#define HAL_DEBUG_MASK_MM_CAMERA_INTERFACE (1U<<1)
+#define HAL_DEBUG_MASK_MM_JPEG_INTERFACE   (1U<<2)
+
+// Counter clock wise
+typedef enum {
+    ROTATE_0 = 1<<0,
+    ROTATE_90 = 1<<1,
+    ROTATE_180 = 1<<2,
+    ROTATE_270 = 1<<3,
+} cam_rotation_t;
+
+typedef enum {
+    FLIP_NONE = 0, /* 00b */
+    FLIP_H = 1,    /* 01b */
+    FLIP_V = 2,    /* 10b */
+    FLIP_V_H = 3,  /* 11b */
+} cam_flip_t;
+
+typedef struct {
+    uint32_t bundle_id;                            /* bundle id */
+    uint8_t num_of_streams;                        /* number of streams in the bundle */
+    uint32_t stream_ids[MAX_STREAM_NUM_IN_BUNDLE]; /* array of stream ids to be bundled */
+} cam_bundle_config_t;
+
+typedef enum {
+    CAM_ONLINE_REPROCESS_TYPE,    /* online reprocess, frames from running streams */
+    CAM_OFFLINE_REPROCESS_TYPE,   /* offline reprocess, frames from external source */
+} cam_reprocess_type_enum_t;
+
+typedef enum {
+    CAM_HDR_MODE_SINGLEFRAME,    /* Single frame HDR mode which does only tone mapping */
+    CAM_HDR_MODE_MULTIFRAME,     /* Multi frame HDR mode which needs two frames with 0.5x and 2x exposure respectively */
+} cam_hdr_mode_enum_t;
+
+typedef struct {
+    uint32_t hdr_enable;
+    uint32_t hdr_need_1x; /* when CAM_QCOM_FEATURE_HDR enabled, indicate if 1x is needed for output */
+    cam_hdr_mode_enum_t hdr_mode;
+} cam_hdr_param_t;
+
+typedef struct {
+    int32_t output_width;
+    int32_t output_height;
+} cam_scale_param_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+    int32_t focus_steps[MAX_AF_BRACKETING_VALUES];
+    uint8_t output_count;
+} cam_af_bracketing_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+} cam_flash_bracketing_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+} cam_fssr_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+    uint8_t zoom_threshold;
+} cam_opti_zoom_t;
+
+typedef struct {
+    uint8_t enable;
+    uint32_t meta_max_size;
+    uint32_t meta_header_size;
+    uint32_t body_mask_width;
+} cam_true_portrait_t;
+
+typedef enum {
+    CAM_FLASH_OFF,
+    CAM_FLASH_ON
+} cam_flash_value_t;
+
+typedef struct {
+    cam_sensor_t sens_type;
+    cam_format_t native_format;
+} cam_sensor_type_t;
+
+typedef struct {
+    /* reprocess feature mask */
+    uint32_t feature_mask;
+
+    /* individual setting for features to be reprocessed */
+    cam_denoise_param_t denoise2d;
+    cam_rect_t input_crop;
+    cam_rotation_t rotation;
+    uint32_t flip;
+    int32_t sharpness;
+    int32_t effect;
+    cam_hdr_param_t hdr_param;
+    cam_scale_param_t scale_param;
+
+    uint8_t zoom_level;
+    cam_flash_value_t flash_value;
+    cam_true_portrait_t tp_param;
+} cam_pp_feature_config_t;
+
+typedef struct {
+    uint32_t input_stream_id;
+    /* input source stream type */
+    cam_stream_type_t input_stream_type;
+} cam_pp_online_src_config_t;
+
+typedef struct {
+    /* image format */
+    cam_format_t input_fmt;
+
+    /* image dimension */
+    cam_dimension_t input_dim;
+
+    /* buffer plane information, will be calc based on stream_type, fmt,
+       dim, and padding_info(from stream config). Info including:
+       offset_x, offset_y, stride, scanline, plane offset */
+    cam_stream_buf_plane_info_t input_buf_planes;
+
+    /* number of input reprocess buffers */
+    uint8_t num_of_bufs;
+
+    /* input source type */
+    cam_stream_type_t input_type;
+} cam_pp_offline_src_config_t;
+
+/* reprocess stream input configuration */
+typedef struct {
+    /* input source config */
+    cam_reprocess_type_enum_t pp_type;
+    union {
+        cam_pp_online_src_config_t online;
+        cam_pp_offline_src_config_t offline;
+    };
+
+    /* pp feature config */
+    cam_pp_feature_config_t pp_feature_config;
+} cam_stream_reproc_config_t;
+
+typedef struct {
+    uint8_t crop_enabled;
+    cam_rect_t input_crop;
+} cam_crop_param_t;
+
+typedef struct {
+    uint8_t trigger;
+    int32_t trigger_id;
+} cam_trigger_t;
+
+typedef struct {
+    cam_denoise_param_t denoise;
+    cam_crop_param_t crop;
+    uint32_t flip;     /* 0 means no flip */
+    uint32_t uv_upsample; /* 0 means no chroma upsampling */
+    int32_t sharpness; /* 0 means no sharpness */
+} cam_per_frame_pp_config_t;
+
+typedef enum {
+    CAM_OPT_STAB_OFF,
+    CAM_OPT_STAB_ON,
+    CAM_OPT_STAB_MAX
+} cam_optical_stab_modes_t;
+
+typedef enum {
+    CAM_FILTER_ARRANGEMENT_RGGB,
+    CAM_FILTER_ARRANGEMENT_GRBG,
+    CAM_FILTER_ARRANGEMENT_GBRG,
+    CAM_FILTER_ARRANGEMENT_BGGR,
+
+    /* Sensor is not Bayer; output has 3 16-bit values for each pixel,
+     * instead of just 1 16-bit value per pixel.*/
+    CAM_FILTER_ARRANGEMENT_RGB
+} cam_color_filter_arrangement_t;
+
+typedef enum {
+    CAM_AF_STATE_INACTIVE,
+    CAM_AF_STATE_PASSIVE_SCAN,
+    CAM_AF_STATE_PASSIVE_FOCUSED,
+    CAM_AF_STATE_ACTIVE_SCAN,
+    CAM_AF_STATE_FOCUSED_LOCKED,
+    CAM_AF_STATE_NOT_FOCUSED_LOCKED,
+    CAM_AF_STATE_PASSIVE_UNFOCUSED
+} cam_af_state_t;
+
+typedef enum {
+    CAM_AWB_STATE_INACTIVE,
+    CAM_AWB_STATE_SEARCHING,
+    CAM_AWB_STATE_CONVERGED,
+    CAM_AWB_STATE_LOCKED
+} cam_awb_state_t;
+
+#endif /* __QCAMERA_TYPES_H__ */
diff --git a/msm8974/QCamera2/stack/common/mm_camera_interface.h b/msm8974/QCamera2/stack/common/mm_camera_interface.h
new file mode 100644
index 0000000..347f3c3
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/mm_camera_interface.h
@@ -0,0 +1,762 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_INTERFACE_H__
+#define __MM_CAMERA_INTERFACE_H__
+#include <linux/msm_ion.h>
+#include <linux/videodev2.h>
+#include <media/msmb_camera.h>
+#include "cam_intf.h"
+#include "cam_queue.h"
+
+#define MM_CAMERA_MAX_NUM_SENSORS MSM_MAX_CAMERA_SENSORS
+#define MM_CAMERA_MAX_NUM_FRAMES CAM_MAX_NUM_BUFS_PER_STREAM
+/* num of channels allowed in a camera obj */
+#define MM_CAMERA_CHANNEL_MAX 16
+
+#define PAD_TO_SIZE(size, padding) \
+        ((size + (typeof(size))(padding - 1)) & \
+        (typeof(size))(~(padding - 1)))
+
+#define CAM_FN_CNT 255
+/** CAM_DUMP_TO_FILE:
+ *  @filename: file name
+ *  @name:filename
+ *  @index: index of the file
+ *  @extn: file extension
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define CAM_DUMP_TO_FILE(path, name, index, extn, p_addr, len) ({ \
+  size_t rc = 0; \
+  char filename[CAM_FN_CNT]; \
+  if (index >= 0) \
+    snprintf(filename, CAM_FN_CNT, "%s/%s%d.%s", path, name, index, extn); \
+  else \
+    snprintf(filename, CAM_FN_CNT, "%s/%s.%s", path, name, extn); \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    ALOGE("%s:%d] written size %d", __func__, __LINE__, len); \
+    fclose(fp); \
+  } else { \
+    ALOGE("%s:%d] open %s failed", __func__, __LINE__, filename); \
+  } \
+})
+
+/** mm_camera_buf_def_t: structure for stream frame buf
+*    @stream_id : stream handler to uniquely identify a stream
+*               object
+*    @buf_idx : index of the buf within the stream bufs, to be
+*               filled during mem allocation
+*    @timespec_ts : time stamp, to be filled when DQBUF is
+*                 called
+*    @frame_idx : frame sequence num, to be filled when DQBUF
+*    @num_planes : num of planes for the frame buffer, to be
+*               filled during mem allocation
+*    @planes : plane info for the frame buffer, to be filled
+*               during mem allocation
+*    @fd : file descriptor of the frame buffer, to be filled
+*        during mem allocation
+*    @buffer : pointer to the frame buffer, to be filled during
+*            mem allocation
+*    @frame_len : length of the whole frame, to be filled during
+*               mem allocation
+*    @mem_info : user specific pointer to additional mem info
+**/
+typedef struct {
+    uint32_t stream_id;
+    cam_stream_type_t stream_type;
+    uint32_t buf_idx;
+    uint8_t is_uv_subsampled;
+    struct timespec ts;
+    uint32_t frame_idx;
+    int8_t num_planes;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    int fd;
+    void *buffer;
+    size_t frame_len;
+    void *mem_info;
+} mm_camera_buf_def_t;
+
+/** mm_camera_super_buf_t: super buf structure for bundled
+*   stream frames
+*    @camera_handle : camera handler to uniquely identify
+*              a camera object
+*    @ch_id : channel handler to uniquely ideentify a channel
+*           object
+*    @num_bufs : number of buffers in the super buf, should not
+*              exceeds MAX_STREAM_NUM_IN_BUNDLE
+*    @bufs : array of buffers in the bundle
+**/
+typedef struct {
+    uint32_t camera_handle;
+    uint32_t ch_id;
+    uint32_t num_bufs;
+    mm_camera_buf_def_t* bufs[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_super_buf_t;
+
+/** mm_camera_event_t: structure for event
+*    @server_event_type : event type from serer
+*    @status : status of an event, value could be
+*              CAM_STATUS_SUCCESS
+*              CAM_STATUS_FAILED
+**/
+typedef struct {
+    cam_event_type_t server_event_type;
+    uint32_t status;
+} mm_camera_event_t;
+
+/** mm_camera_event_notify_t: function definition for event
+*   notify handling
+*    @camera_handle : camera handler
+*    @evt : pointer to an event struct
+*    @user_data: user data pointer
+**/
+typedef void (*mm_camera_event_notify_t)(uint32_t camera_handle,
+                                         mm_camera_event_t *evt,
+                                         void *user_data);
+
+/** mm_camera_buf_notify_t: function definition for frame notify
+*   handling
+*    @mm_camera_super_buf_t : received frame buffers
+*    @user_data: user data pointer
+**/
+typedef void (*mm_camera_buf_notify_t) (mm_camera_super_buf_t *bufs,
+                                        void *user_data);
+
+/** map_stream_buf_op_t: function definition for operation of
+*   mapping stream buffers via domain socket
+*    @frame_idx : buffer index within stream buffers
+*    @plane_idx    : plane index. If all planes share the same
+*                   fd, plane_idx = -1; otherwise, plean_idx is
+*                   the index to plane (0..num_of_planes)
+*    @fd : file descriptor of the stream buffer
+*    @size: size of the stream buffer
+*    @userdata : user data pointer
+**/
+typedef int32_t (*map_stream_buf_op_t) (uint32_t frame_idx,
+                                        int32_t plane_idx,
+                                        int fd,
+                                        size_t size,
+                                        void *userdata);
+
+/** unmap_stream_buf_op_t: function definition for operation of
+*                          unmapping stream buffers via domain
+*                          socket
+*    @frame_idx : buffer index within stream buffers
+*    @plane_idx : plane index. If all planes share the same
+*                 fd, plane_idx = -1; otherwise, plean_idx is
+*                 the index to plane (0..num_of_planes)
+*    @userdata : user data pointer
+**/
+typedef int32_t (*unmap_stream_buf_op_t) (uint32_t frame_idx,
+                                          int32_t plane_idx,
+                                          void *userdata);
+
+/** mm_camera_map_unmap_ops_tbl_t: virtual table
+*                      for mapping/unmapping stream buffers via
+*                      domain socket
+*    @map_ops : operation for mapping
+*    @unmap_ops : operation for unmapping
+*    @userdata: user data pointer
+**/
+typedef struct {
+    map_stream_buf_op_t map_ops;
+    unmap_stream_buf_op_t unmap_ops;
+    void *userdata;
+} mm_camera_map_unmap_ops_tbl_t;
+
+/** mm_camera_stream_mem_vtbl_t: virtual table for stream
+*                      memory allocation and deallocation
+*    @get_bufs : function definition for allocating
+*                stream buffers
+*    @put_bufs : function definition for deallocating
+*                stream buffers
+*    @user_data: user data pointer
+**/
+typedef struct {
+  void *user_data;
+  int32_t (*get_bufs) (cam_frame_len_offset_t *offset,
+                       uint8_t *num_bufs,
+                       uint8_t **initial_reg_flag,
+                       mm_camera_buf_def_t **bufs,
+                       mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                       void *user_data);
+  int32_t (*put_bufs) (mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                       void *user_data);
+  int32_t (*invalidate_buf)(uint32_t index, void *user_data);
+  int32_t (*clean_invalidate_buf)(uint32_t index, void *user_data);
+} mm_camera_stream_mem_vtbl_t;
+
+/** mm_camera_stream_config_t: structure for stream
+*                              configuration
+*    @stream_info : pointer to a stream info structure
+*    @padding_info: padding info obtained from querycapability
+*    @mem_tbl : memory operation table for
+*              allocating/deallocating stream buffers
+*    @stream_cb : callback handling stream frame notify
+*    @userdata : user data pointer
+**/
+typedef struct {
+    cam_stream_info_t *stream_info;
+    cam_padding_info_t padding_info;
+    mm_camera_stream_mem_vtbl_t mem_vtbl;
+    mm_camera_buf_notify_t stream_cb;
+    void *userdata;
+} mm_camera_stream_config_t;
+
+/** mm_camera_super_buf_notify_mode_t: enum for super uffer
+*                                      notification mode
+*    @MM_CAMERA_SUPER_BUF_NOTIFY_BURST :
+*       ZSL use case: get burst of frames
+*    @MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS :
+*       get continuous frames: when the super buf is ready
+*       dispatch it to HAL
+**/
+typedef enum {
+    MM_CAMERA_SUPER_BUF_NOTIFY_BURST = 0,
+    MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS,
+    MM_CAMERA_SUPER_BUF_NOTIFY_MAX
+} mm_camera_super_buf_notify_mode_t;
+
+/** mm_camera_super_buf_priority_t: enum for super buffer
+*                                   matching priority
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL :
+*       Save the frame no matter focused or not. Currently only
+*       this type is supported.
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS :
+*       only queue the frame that is focused. Will enable meta
+*       data header to carry focus info
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING :
+*       after shutter, only queue matched exposure index
+**/
+typedef enum {
+    MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL = 0,
+    MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS,
+    MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING,
+    MM_CAMERA_SUPER_BUF_PRIORITY_MAX
+} mm_camera_super_buf_priority_t;
+
+/** mm_camera_advanced_capture_t: enum for advanced capture type.
+*    @MM_CAMERA_AF_BRACKETING :
+*       to enable AF Bracketig.
+*    @MM_CAMERA_AE_BRACKETING :
+*       to enable AF Bracketing.
+*    @MM_CAMERA_FLASH_BRACKETING :
+*       to enable Flash Bracketing.
+*    @MM_CAMERA_ZOOM_1X :
+*       to enable zoom 1x capture request
+**/
+typedef enum {
+   MM_CAMERA_AF_BRACKETING = 0,
+   MM_CAMERA_AE_BRACKETING,
+   MM_CAMERA_FLASH_BRACKETING,
+   MM_CAMERA_MTF_BRACKETING,
+   MM_CAMERA_ZOOM_1X,
+} mm_camera_advanced_capture_t;
+
+/** mm_camera_channel_attr_t: structure for defining channel
+*                             attributes
+*    @notify_mode : notify mode: burst or continuous
+*    @water_mark : queue depth. Only valid for burst mode
+*    @look_back : look back how many frames from last buf.
+*                 Only valid for burst mode
+*    @post_frame_skip : after send first frame to HAL, how many
+*                     frames needing to be skipped for next
+*                     delivery. Only valid for burst mode
+*    @max_unmatched_frames : max number of unmatched frames in
+*                     queue
+*    @priority : save matched priority frames only
+**/
+typedef struct {
+    mm_camera_super_buf_notify_mode_t notify_mode;
+    uint8_t water_mark;
+    uint8_t look_back;
+    uint8_t post_frame_skip;
+    uint8_t max_unmatched_frames;
+    mm_camera_super_buf_priority_t priority;
+} mm_camera_channel_attr_t;
+
+typedef struct {
+    /** query_capability: fucntion definition for querying static
+     *                    camera capabilities
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume cam_capability_t is already mapped
+     **/
+    int32_t (*query_capability) (uint32_t camera_handle);
+
+    /** register_event_notify: fucntion definition for registering
+     *                         for event notification
+     *    @camera_handle : camer handler
+     *    @evt_cb : callback for event notify
+     *    @user_data : user data poiner
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*register_event_notify) (uint32_t camera_handle,
+                                      mm_camera_event_notify_t evt_cb,
+                                      void *user_data);
+
+    /** close_camera: fucntion definition for closing a camera
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*close_camera) (uint32_t camera_handle);
+
+    /** map_buf: fucntion definition for mapping a camera buffer
+     *           via domain socket
+     *    @camera_handle : camer handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *                CAM_MAPPING_BUF_TYPE_CAPABILITY
+     *                CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+     *                CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+     *    @fd : file descriptor of the stream buffer
+     *    @size :  size of the stream buffer
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_buf) (uint32_t camera_handle,
+                        uint8_t buf_type,
+                        int fd,
+                        size_t size);
+
+    /** unmap_buf: fucntion definition for unmapping a camera buffer
+     *           via domain socket
+     *    @camera_handle : camer handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *                CAM_MAPPING_BUF_TYPE_CAPABILITY
+     *                CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+     *                CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*unmap_buf) (uint32_t camera_handle,
+                          uint8_t buf_type);
+
+    /** set_parms: fucntion definition for setting camera
+     *             based parameters to server
+     *    @camera_handle : camer handler
+     *    @parms : batch for parameters to be set, stored in
+     *               parm_buffer_t
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm_buffer_t is already mapped, and
+     *       according parameter entries to be set are filled in the
+     *       buf before this call
+     **/
+    int32_t (*set_parms) (uint32_t camera_handle,
+                          void *parms);
+
+    /** get_parms: fucntion definition for querying camera
+     *             based parameters from server
+     *    @camera_handle : camer handler
+     *    @parms : batch for parameters to be queried, stored in
+     *               parm_buffer_t
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm_buffer_t is already mapped, and
+     *       according parameter entries to be queried are filled in
+     *       the buf before this call
+     **/
+    int32_t (*get_parms) (uint32_t camera_handle,
+                          void *parms);
+
+    /** do_auto_focus: fucntion definition for performing auto focus
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: if this call success, we will always assume there will
+     *        be an auto_focus event following up.
+     **/
+    int32_t (*do_auto_focus) (uint32_t camera_handle);
+
+    /** cancel_auto_focus: fucntion definition for cancelling
+     *                     previous auto focus request
+     *    @camera_handle : camer handler
+    *  Return value: 0 -- success
+    *                -1 -- failure
+     **/
+    int32_t (*cancel_auto_focus) (uint32_t camera_handle);
+
+    /** prepare_snapshot: fucntion definition for preparing hardware
+     *                    for snapshot.
+     *    @camera_handle : camer handler
+     *    @do_af_flag    : flag indicating if AF needs to be done
+     *                     0 -- no AF needed
+     *                     1 -- AF needed
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*prepare_snapshot) (uint32_t camera_handle,
+                                 int32_t do_af_flag);
+
+    /** start_zsl_snapshot: function definition for starting
+     *                    zsl snapshot.
+     *    @camera_handle : camer handler
+     *    @ch_id         : channel id
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*start_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
+
+    /** stop_zsl_snapshot: function definition for stopping
+     *                    zsl snapshot.
+     *    @camera_handle : camer handler
+     *    @ch_id         : channel id
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*stop_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
+
+    /** add_channel: fucntion definition for adding a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @attr : pointer to channel attribute structure
+     *    @channel_cb : callbak to handle bundled super buffer
+     *    @userdata : user data pointer
+     *  Return value: channel id, zero is invalid ch_id
+     * Note: attr, channel_cb, and userdata can be NULL if no
+     *       superbufCB is needed
+     **/
+    uint32_t (*add_channel) (uint32_t camera_handle,
+                             mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t channel_cb,
+                             void *userdata);
+
+    /** delete_channel: fucntion definition for deleting a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*delete_channel) (uint32_t camera_handle,
+                               uint32_t ch_id);
+
+    /** get_bundle_info: function definition for querying bundle
+     *  info of the channel
+     *    @camera_handle : camera handler
+     *    @ch_id         : channel handler
+     *    @bundle_info   : bundle info to be filled in
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*get_bundle_info) (uint32_t camera_handle,
+                                uint32_t ch_id,
+                                cam_bundle_config_t *bundle_info);
+
+    /** add_stream: fucntion definition for adding a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: stream_id. zero is invalid stream_id
+     **/
+    uint32_t (*add_stream) (uint32_t camera_handle,
+                            uint32_t ch_id);
+
+    /** delete_stream: fucntion definition for deleting a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*delete_stream) (uint32_t camera_handle,
+                              uint32_t ch_id,
+                              uint32_t stream_id);
+
+    /** link_stream: function definition for linking a stream
+     *    @camera_handle : camera handle
+     *    @ch_id : channel handle from which the stream originates
+     *    @stream_id : stream handle
+     *    @linked_ch_id: channel handle in which the stream will be linked
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*link_stream) (uint32_t camera_handle,
+          uint32_t ch_id,
+          uint32_t stream_id,
+          uint32_t linked_ch_id);
+
+    /** config_stream: fucntion definition for configuring a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @confid : pointer to a stream configuration structure
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*config_stream) (uint32_t camera_handle,
+                              uint32_t ch_id,
+                              uint32_t stream_id,
+                              mm_camera_stream_config_t *config);
+
+    /** map_stream_buf: fucntion definition for mapping
+     *                 stream buffer via domain socket
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *             CAM_MAPPING_BUF_TYPE_STREAM_BUF
+     *             CAM_MAPPING_BUF_TYPE_STREAM_INFO
+     *             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+     *    @buf_idx : buffer index within the stream buffers
+     *    @plane_idx : plane index. If all planes share the same fd,
+     *               plane_idx = -1; otherwise, plean_idx is the
+     *               index to plane (0..num_of_planes)
+     *    @fd : file descriptor of the stream buffer
+     *    @size :  size of the stream buffer
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_stream_buf) (uint32_t camera_handle,
+                               uint32_t ch_id,
+                               uint32_t stream_id,
+                               uint8_t buf_type,
+                               uint32_t buf_idx,
+                               int32_t plane_idx,
+                               int fd,
+                               size_t size);
+
+    /** unmap_stream_buf: fucntion definition for unmapping
+     *                 stream buffer via domain socket
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *             CAM_MAPPING_BUF_TYPE_STREAM_BUF
+     *             CAM_MAPPING_BUF_TYPE_STREAM_INFO
+     *             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+     *    @buf_idx : buffer index within the stream buffers
+     *    @plane_idx : plane index. If all planes share the same fd,
+     *               plane_idx = -1; otherwise, plean_idx is the
+     *               index to plane (0..num_of_planes)
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*unmap_stream_buf) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t stream_id,
+                                 uint8_t buf_type,
+                                 uint32_t buf_idx,
+                                 int32_t plane_idx);
+
+    /** set_stream_parms: fucntion definition for setting stream
+     *                    specific parameters to server
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @parms : batch for parameters to be set
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm buffer is already mapped, and
+     *       according parameter entries to be set are filled in the
+     *       buf before this call
+     **/
+    int32_t (*set_stream_parms) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t s_id,
+                                 cam_stream_parm_buffer_t *parms);
+
+    /** get_stream_parms: fucntion definition for querying stream
+     *                    specific parameters from server
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @parms : batch for parameters to be queried
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm buffer is already mapped, and
+     *       according parameter entries to be queried are filled in
+     *       the buf before this call
+     **/
+    int32_t (*get_stream_parms) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t s_id,
+                                 cam_stream_parm_buffer_t *parms);
+
+    /** start_channel: fucntion definition for starting a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     * This call will start all streams belongs to the channel
+     **/
+    int32_t (*start_channel) (uint32_t camera_handle,
+                              uint32_t ch_id);
+
+    /** stop_channel: fucntion definition for stopping a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     * This call will stop all streams belongs to the channel
+     **/
+    int32_t (*stop_channel) (uint32_t camera_handle,
+                             uint32_t ch_id);
+
+    /** qbuf: fucntion definition for queuing a frame buffer back to
+     *        kernel for reuse
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @buf : a frame buffer to be queued back to kernel
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*qbuf) (uint32_t camera_handle,
+                     uint32_t ch_id,
+                     mm_camera_buf_def_t *buf);
+
+    /** get_queued_buf_count: fucntion definition for querying queued buf count
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *  Return value: queued buf count
+     **/
+    int32_t (*get_queued_buf_count) (uint32_t camera_handle,
+            uint32_t ch_id,
+            uint32_t stream_id);
+
+    /** request_super_buf: fucntion definition for requesting frames
+     *                     from superbuf queue in burst mode
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @num_buf_requested : number of super buffers requested
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*request_super_buf) (uint32_t camera_handle,
+                                  uint32_t ch_id,
+                                  uint32_t num_buf_requested);
+
+    /** cancel_super_buf_request: fucntion definition for canceling
+     *                     frames dispatched from superbuf queue in
+     *                     burst mode
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*cancel_super_buf_request) (uint32_t camera_handle,
+                                         uint32_t ch_id);
+
+    /** flush_super_buf_queue: function definition for flushing out
+     *                     all frames in the superbuf queue up to frame_idx,
+     *                     even if frames with frame_idx come in later than
+     *                     this call.
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @frame_idx : frame index up until which all superbufs are flushed
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*flush_super_buf_queue) (uint32_t camera_handle,
+                                      uint32_t ch_id, uint32_t frame_idx);
+
+    /** configure_notify_mode: function definition for configuring the
+     *                         notification mode of channel
+     *    @camera_handle : camera handler
+     *    @ch_id : channel handler
+     *    @notify_mode : notification mode
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*configure_notify_mode) (uint32_t camera_handle,
+                                      uint32_t ch_id,
+                                      mm_camera_super_buf_notify_mode_t notify_mode);
+
+     /** process_advanced_capture: function definition for start/stop advanced capture
+     *                    for snapshot.
+     *    @camera_handle : camera handle
+     *    @type :  advanced capture type.
+     *    @ch_id : channel handler
+     *    @start_flag    : flag indicating if advanced capture needs to be done
+     *                     0 -- stop advanced capture
+     *                     1 -- start advanced capture
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+     int32_t (*process_advanced_capture) (uint32_t camera_handle,
+                                          mm_camera_advanced_capture_t type,
+                                          uint32_t ch_id,
+                                          int8_t start_flag);
+} mm_camera_ops_t;
+
+/** mm_camera_vtbl_t: virtual table for camera operations
+*    @camera_handle : camera handler which uniquely identifies a
+*                   camera object
+*    @ops : API call table
+**/
+typedef struct {
+    uint32_t camera_handle;
+    mm_camera_ops_t *ops;
+} mm_camera_vtbl_t;
+
+/* return number of cameras */
+uint8_t get_num_of_cameras();
+
+/* return reference pointer of camera vtbl */
+mm_camera_vtbl_t * camera_open(uint8_t camera_idx);
+struct camera_info *get_cam_info(uint32_t camera_id);
+
+/* helper functions */
+int32_t mm_stream_calc_offset_preview(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_stream_info_t *stream_info,
+        cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+#endif /*__MM_CAMERA_INTERFACE_H__*/
diff --git a/msm8974/QCamera2/stack/common/mm_jpeg_interface.h b/msm8974/QCamera2/stack/common/mm_jpeg_interface.h
new file mode 100644
index 0000000..f00d9bf
--- /dev/null
+++ b/msm8974/QCamera2/stack/common/mm_jpeg_interface.h
@@ -0,0 +1,337 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INTERFACE_H_
+#define MM_JPEG_INTERFACE_H_
+#include "QOMX_JpegExtensions.h"
+#include "cam_intf.h"
+
+#define MM_JPEG_MAX_PLANES 3
+#define MM_JPEG_MAX_BUF CAM_MAX_NUM_BUFS_PER_STREAM
+#define MAX_AF_STATS_DATA_SIZE 1000
+
+typedef enum {
+  MM_JPEG_FMT_YUV,
+  MM_JPEG_FMT_BITSTREAM
+} mm_jpeg_format_t;
+
+typedef enum {
+   FLASH_NOT_FIRED,
+   FLASH_FIRED
+}exif_flash_fired_sate_t;
+
+typedef enum {
+   NO_STROBE_RETURN_DETECT = 0x00,
+   STROBE_RESERVED = 0x01,
+   STROBE_RET_LIGHT_NOT_DETECT = 0x02,
+   STROBE_RET_LIGHT_DETECT = 0x03
+}exif_strobe_state_t;
+
+typedef enum {
+   CAMERA_FLASH_UNKNOWN = 0x00,
+   CAMERA_FLASH_COMPULSORY = 0x08,
+   CAMERA_FLASH_SUPRESSION = 0x10,
+   CAMERA_FLASH_AUTO = 0x18
+}exif_flash_mode_t;
+
+typedef enum {
+   FLASH_FUNC_PRESENT = 0x00,
+   NO_FLASH_FUNC = 0x20
+}exif_flash_func_pre_t;
+
+typedef enum {
+   NO_REDEYE_MODE = 0x00,
+   REDEYE_MODE = 0x40
+}exif_redeye_t;
+
+typedef struct {
+  cam_ae_params_t ae_params;
+  cam_auto_focus_data_t af_params;
+  uint8_t af_mobicat_params[MAX_AF_STATS_DATA_SIZE];
+  cam_awb_params_t awb_params;
+  cam_ae_exif_debug_t ae_debug_params;
+  cam_awb_exif_debug_t awb_debug_params;
+  cam_af_exif_debug_t af_debug_params;
+  cam_asd_exif_debug_t asd_debug_params;
+  cam_stats_buffer_exif_debug_t stats_debug_params;
+  uint8_t ae_debug_params_valid;
+  uint8_t awb_debug_params_valid;
+  uint8_t af_debug_params_valid;
+  uint8_t asd_debug_params_valid;
+  uint8_t stats_debug_params_valid;
+  cam_sensor_params_t sensor_params;
+  cam_flash_mode_t ui_flash_mode;
+  exif_flash_func_pre_t flash_presence;
+  exif_redeye_t red_eye;
+} mm_jpeg_exif_params_t;
+
+typedef struct {
+  uint32_t sequence;          /* for jpeg bit streams, assembling is based on sequence. sequence starts from 0 */
+  uint8_t *buf_vaddr;        /* ptr to buf */
+  int fd;                    /* fd of buf */
+  size_t buf_size;         /* total size of buf (header + image) */
+  mm_jpeg_format_t format;   /* buffer format*/
+  cam_frame_len_offset_t offset; /* offset of all the planes */
+  uint32_t index; /* index used to identify the buffers */
+} mm_jpeg_buf_t;
+
+typedef struct {
+  uint8_t *buf_vaddr;        /* ptr to buf */
+  int fd;                    /* fd of buf */
+  size_t buf_filled_len;   /* used for output image. filled by the client */
+} mm_jpeg_output_t;
+
+typedef enum {
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1,
+  MM_JPEG_COLOR_FORMAT_MONOCHROME,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V2,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V1,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V2,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V1,
+  MM_JPEG_COLOR_FORMAT_MAX
+} mm_jpeg_color_format;
+
+typedef enum {
+  JPEG_JOB_STATUS_DONE = 0,
+  JPEG_JOB_STATUS_ERROR
+} jpeg_job_status_t;
+
+typedef void (*jpeg_encode_callback_t)(jpeg_job_status_t status,
+  uint32_t client_hdl,
+  uint32_t jobId,
+  mm_jpeg_output_t *p_output,
+  void *userData);
+
+typedef struct {
+  /* src img dimension */
+  cam_dimension_t src_dim;
+
+  /* jpeg output dimension */
+  cam_dimension_t dst_dim;
+
+  /* crop information */
+  cam_rect_t crop;
+} mm_jpeg_dim_t;
+
+typedef struct {
+  /* num of buf in src img */
+  uint32_t num_src_bufs;
+
+  /* num of src tmb bufs */
+  uint32_t num_tmb_bufs;
+
+  /* num of buf in src img */
+  uint32_t num_dst_bufs;
+
+  /* should create thumbnail from main image or not */
+  uint32_t encode_thumbnail;
+
+  /* src img bufs */
+  mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t src_thumb_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+  /* mainimage color format */
+  mm_jpeg_color_format color_format;
+
+  /* thumbnail color format */
+  mm_jpeg_color_format thumb_color_format;
+
+  /* jpeg quality: range 0~100 */
+  uint32_t quality;
+
+  jpeg_encode_callback_t jpeg_cb;
+  void* userdata;
+
+  /* thumbnail dimension */
+  mm_jpeg_dim_t thumb_dim;
+
+  /* rotation informaiton */
+  uint32_t rotation;
+
+  /* thumb rotation informaiton */
+  uint32_t thumb_rotation;
+
+  /* main image dimension */
+  mm_jpeg_dim_t main_dim;
+
+  /* enable encoder burst mode */
+  uint32_t burst_mode;
+
+  /* get memory function ptr */
+  int (*get_memory)( omx_jpeg_ouput_buf_t *p_out_buf);
+} mm_jpeg_encode_params_t;
+
+typedef struct {
+  /* num of buf in src img */
+  uint32_t num_src_bufs;
+
+  /* num of buf in src img */
+  uint32_t num_dst_bufs;
+
+  /* src img bufs */
+  mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+  /* color format */
+  mm_jpeg_color_format color_format;
+
+  jpeg_encode_callback_t jpeg_cb;
+  void* userdata;
+
+} mm_jpeg_decode_params_t;
+
+typedef struct {
+  /* active indices of the buffers for encoding */
+  int32_t src_index;
+  int32_t dst_index;
+  uint32_t thumb_index;
+  mm_jpeg_dim_t thumb_dim;
+
+  /* rotation informaiton */
+  uint32_t rotation;
+
+  /* main image dimension */
+  mm_jpeg_dim_t main_dim;
+
+  /*session id*/
+  uint32_t session_id;
+
+  /*Metadata stream*/
+  cam_metadata_info_t *p_metadata;
+
+  /* buf to exif entries, caller needs to
+   * take care of the memory manage with insider ptr */
+  QOMX_EXIF_INFO exif_info;
+
+  /* 3a parameters */
+  mm_jpeg_exif_params_t cam_exif_params;
+
+  /* flag to enable/disable mobicat */
+  uint8_t mobicat_mask;
+
+} mm_jpeg_encode_job_t;
+
+typedef struct {
+  /* active indices of the buffers for encoding */
+  int32_t src_index;
+  int32_t dst_index;
+  uint32_t tmb_dst_index;
+
+  /* rotation informaiton */
+  uint32_t rotation;
+
+  /* main image  */
+  mm_jpeg_dim_t main_dim;
+
+  /*session id*/
+  uint32_t session_id;
+} mm_jpeg_decode_job_t;
+
+typedef enum {
+  JPEG_JOB_TYPE_ENCODE,
+  JPEG_JOB_TYPE_DECODE,
+  JPEG_JOB_TYPE_MAX
+} mm_jpeg_job_type_t;
+
+typedef struct {
+  mm_jpeg_job_type_t job_type;
+  union {
+    mm_jpeg_encode_job_t encode_job;
+    mm_jpeg_decode_job_t decode_job;
+  };
+} mm_jpeg_job_t;
+
+typedef struct {
+  uint32_t w;
+  uint32_t h;
+} mm_dimension;
+
+typedef struct {
+  /* config a job -- async call */
+  int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+  /* abort a job -- sync call */
+  int (*abort_job)(uint32_t job_id);
+
+  /* create a session */
+  int (*create_session)(uint32_t client_hdl,
+    mm_jpeg_encode_params_t *p_params, uint32_t *p_session_id);
+
+  /* destroy session */
+  int (*destroy_session)(uint32_t session_id);
+
+  /* close a jpeg client -- sync call */
+  int (*close) (uint32_t clientHdl);
+} mm_jpeg_ops_t;
+
+typedef struct {
+  /* config a job -- async call */
+  int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+  /* abort a job -- sync call */
+  int (*abort_job)(uint32_t job_id);
+
+  /* create a session */
+  int (*create_session)(uint32_t client_hdl,
+    mm_jpeg_decode_params_t *p_params, uint32_t *p_session_id);
+
+  /* destroy session */
+  int (*destroy_session)(uint32_t session_id);
+
+  /* close a jpeg client -- sync call */
+  int (*close) (uint32_t clientHdl);
+} mm_jpegdec_ops_t;
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl will be filled in if open succeeds */
+uint32_t jpeg_open(mm_jpeg_ops_t *ops, mm_dimension picture_size);
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl will be filled in if open succeeds */
+uint32_t jpegdec_open(mm_jpegdec_ops_t *ops);
+
+#endif /* MM_JPEG_INTERFACE_H_ */
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/Android.mk b/msm8974/QCamera2/stack/mm-camera-interface/Android.mk
new file mode 100644
index 0000000..26a90c1
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/Android.mk
@@ -0,0 +1,41 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+    src/mm_camera_interface.c \
+    src/mm_camera.c \
+    src/mm_camera_channel.c \
+    src/mm_camera_stream.c \
+    src/mm_camera_thread.c \
+    src/mm_camera_sock.c \
+    src/cam_intf.c
+
+ifeq ($(call is-board-platform-in-list, msm8974 msm8916 msm8226 msm8610),true)
+    LOCAL_CFLAGS += -DVENUS_PRESENT
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_
+
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS += ../common/cam_intf.h
+LOCAL_COPY_HEADERS += ../common/cam_types.h
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common \
+    $(call project-path-for,qcom-media)/mm-core/inc \
+    system/media/camera/include
+
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_MODULE := libmmcamera_interface
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+LOCAL_VENDOR_MODULE := true
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
new file mode 100644
index 0000000..2847697
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -0,0 +1,653 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+#include <cam_semaphore.h>
+
+#include "mm_camera_interface.h"
+#include <hardware/camera.h>
+/**********************************************************************************
+* Data structure declare
+***********************************************************************************/
+/* num of callbacks allowed for an event type */
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+/* num of data callbacks allowed in a stream obj */
+#define MM_CAMERA_STREAM_BUF_CB_MAX 4
+/* num of data poll threads allowed in a channel obj */
+#define MM_CAMERA_CHANNEL_POLL_THREAD_MAX 1
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 2
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+#define THREAD_NAME_SIZE 15
+
+#define MM_CAMERA_POST_FLASH_PREVIEW_SKIP_CNT 3
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#define ARRAY_SIZE(a) (sizeof(a)/sizeof((a)[0]))
+
+struct mm_channel;
+struct mm_stream;
+struct mm_camera_obj;
+
+typedef enum
+{
+    MM_CAMERA_CMD_TYPE_DATA_CB,    /* dataB CMD */
+    MM_CAMERA_CMD_TYPE_EVT_CB,     /* evtCB CMD */
+    MM_CAMERA_CMD_TYPE_EXIT,       /* EXIT */
+    MM_CAMERA_CMD_TYPE_REQ_DATA_CB,/* request data */
+    MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB,    /* superbuf dataB CMD */
+    MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY, /* configure notify mode */
+    MM_CAMERA_CMD_TYPE_START_ZSL, /* start zsl snapshot for channel */
+    MM_CAMERA_CMD_TYPE_STOP_ZSL, /* stop zsl snapshot for channel */
+    MM_CAMERA_CMD_TYPE_FLUSH_QUEUE, /* flush queue */
+    MM_CAMERA_CMD_TYPE_GENERAL,  /* general cmd */
+    MM_CAMERA_CMD_TYPE_MAX
+} mm_camera_cmdcb_type_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint32_t frame_idx;
+    mm_camera_buf_def_t *buf; /* ref to buf */
+} mm_camera_buf_info_t;
+
+typedef struct {
+    uint32_t num_buf_requested;
+} mm_camera_req_buf_t;
+
+typedef enum {
+    MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_MTF_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X,
+} mm_camera_generic_cmd_type_t;
+
+typedef struct {
+    mm_camera_generic_cmd_type_t type;
+    uint32_t payload[32];
+} mm_camera_generic_cmd_t;
+
+typedef struct {
+    mm_camera_cmdcb_type_t cmd_type;
+    union {
+        mm_camera_buf_info_t buf;    /* frame buf if dataCB */
+        mm_camera_event_t evt;       /* evt if evtCB */
+        mm_camera_super_buf_t superbuf; /* superbuf if superbuf dataCB*/
+        mm_camera_req_buf_t req_buf; /* num of buf requested */
+        uint32_t frame_idx; /* frame idx boundary for flush superbuf queue*/
+        mm_camera_super_buf_notify_mode_t notify_mode; /* notification mode */
+        mm_camera_generic_cmd_t gen_cmd;
+    } u;
+} mm_camera_cmdcb_t;
+
+typedef void (*mm_camera_cmd_cb_t)(mm_camera_cmdcb_t * cmd_cb, void* user_data);
+
+typedef struct {
+    uint8_t is_active;     /*indicates whether thread is active or not */
+    cam_queue_t cmd_queue; /* cmd queue (queuing dataCB, asyncCB, or exitCMD) */
+    pthread_t cmd_pid;           /* cmd thread ID */
+    cam_semaphore_t cmd_sem;     /* semaphore for cmd thread */
+    mm_camera_cmd_cb_t cb;       /* cb for cmd */
+    void* user_data;             /* user_data for cb */
+    char threadName[THREAD_NAME_SIZE];
+} mm_camera_cmd_thread_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TYPE_EVT,
+    MM_CAMERA_POLL_TYPE_DATA,
+    MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+/* function ptr defined for poll notify CB,
+ * registered at poll thread with poll fd */
+typedef void (*mm_camera_poll_notify_t)(void *user_data);
+
+typedef struct {
+    int32_t fd;
+    mm_camera_poll_notify_t notify_cb;
+    uint32_t handler;
+    void* user_data;
+} mm_camera_poll_entry_t;
+
+typedef struct {
+    mm_camera_poll_thread_type_t poll_type;
+    /* array to store poll fd and cb info
+     * for MM_CAMERA_POLL_TYPE_EVT, only index 0 is valid;
+     * for MM_CAMERA_POLL_TYPE_DATA, depends on valid stream fd */
+    mm_camera_poll_entry_t poll_entries[MAX_STREAM_NUM_IN_BUNDLE];
+    int32_t pfds[2];
+    pthread_t pid;
+    int32_t state;
+    int timeoutms;
+    uint32_t cmd;
+    struct pollfd poll_fds[MAX_STREAM_NUM_IN_BUNDLE + 1];
+    uint8_t num_fds;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond_v;
+    int32_t status;
+    char threadName[THREAD_NAME_SIZE];
+    //void *my_obj;
+} mm_camera_poll_thread_t;
+
+/* mm_stream */
+typedef enum {
+    MM_STREAM_STATE_NOTUSED = 0,      /* not used */
+    MM_STREAM_STATE_INITED,           /* inited  */
+    MM_STREAM_STATE_ACQUIRED,         /* acquired, fd opened  */
+    MM_STREAM_STATE_CFG,              /* fmt & dim configured */
+    MM_STREAM_STATE_BUFFED,           /* buf allocated */
+    MM_STREAM_STATE_REG,              /* buf regged, stream off */
+    MM_STREAM_STATE_ACTIVE,           /* active */
+    MM_STREAM_STATE_MAX
+} mm_stream_state_type_t;
+
+typedef enum {
+    MM_STREAM_EVT_ACQUIRE,
+    MM_STREAM_EVT_RELEASE,
+    MM_STREAM_EVT_SET_FMT,
+    MM_STREAM_EVT_GET_BUF,
+    MM_STREAM_EVT_PUT_BUF,
+    MM_STREAM_EVT_REG_BUF,
+    MM_STREAM_EVT_UNREG_BUF,
+    MM_STREAM_EVT_START,
+    MM_STREAM_EVT_STOP,
+    MM_STREAM_EVT_QBUF,
+    MM_STREAM_EVT_SET_PARM,
+    MM_STREAM_EVT_GET_PARM,
+    MM_STREAM_EVT_DO_ACTION,
+    MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+    MM_STREAM_EVT_MAX
+} mm_stream_evt_type_t;
+
+typedef struct {
+    mm_camera_buf_notify_t cb;
+    void *user_data;
+    /* cb_count = -1: infinite
+     * cb_count > 0: register only for required times */
+    int8_t cb_count;
+} mm_stream_data_cb_t;
+
+typedef struct {
+    /* buf reference count */
+    uint8_t buf_refcnt;
+
+    /* This flag is to indicate if after allocation,
+     * the corresponding buf needs to qbuf into kernel
+     * (e.g. for preview usecase, display needs to hold two bufs,
+     * so no need to qbuf these two bufs initially) */
+    uint8_t initial_reg_flag;
+
+    /* indicate if buf is in kernel(1) or client(0) */
+    uint8_t in_kernel;
+} mm_stream_buf_status_t;
+
+typedef struct mm_stream {
+    uint32_t my_hdl; /* local stream id */
+    uint32_t server_stream_id; /* stream id from server */
+    int32_t fd;
+    mm_stream_state_type_t state;
+
+    /* stream info*/
+    cam_stream_info_t *stream_info;
+
+    /* padding info */
+    cam_padding_info_t padding_info;
+
+    /* offset */
+    cam_frame_len_offset_t frame_offset;
+
+    pthread_mutex_t cmd_lock; /* lock to protect cmd_thread */
+    mm_camera_cmd_thread_t cmd_thread;
+
+    /* dataCB registered on this stream obj */
+    pthread_mutex_t cb_lock; /* cb lock to protect buf_cb */
+    mm_stream_data_cb_t buf_cb[MM_CAMERA_STREAM_BUF_CB_MAX];
+
+    /* stream buffer management */
+    pthread_mutex_t buf_lock;
+    uint8_t buf_num; /* num of buffers allocated */
+    mm_camera_buf_def_t* buf; /* ptr to buf array */
+    mm_stream_buf_status_t* buf_status; /* ptr to buf status array */
+
+    /* reference to parent channel_obj */
+    struct mm_channel* ch_obj;
+
+    uint8_t is_bundled; /* flag if stream is bundled */
+
+    /* reference to linked channel_obj */
+    struct mm_channel* linked_obj;
+    struct mm_stream * linked_stream; /* original stream */
+    uint8_t is_linked; /* flag if stream is linked */
+
+    mm_camera_stream_mem_vtbl_t mem_vtbl; /* mem ops tbl */
+
+    int8_t queued_buffer_count;
+} mm_stream_t;
+
+/* mm_channel */
+typedef enum {
+    MM_CHANNEL_STATE_NOTUSED = 0,   /* not used */
+    MM_CHANNEL_STATE_STOPPED,       /* stopped */
+    MM_CHANNEL_STATE_ACTIVE,        /* active, at least one stream active */
+    MM_CHANNEL_STATE_PAUSED,        /* paused */
+    MM_CHANNEL_STATE_MAX
+} mm_channel_state_type_t;
+
+typedef enum {
+    MM_CHANNEL_EVT_ADD_STREAM,
+    MM_CHANNEL_EVT_DEL_STREAM,
+    MM_CHANNEL_EVT_LINK_STREAM,
+    MM_CHANNEL_EVT_CONFIG_STREAM,
+    MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+    MM_CHANNEL_EVT_START,
+    MM_CHANNEL_EVT_STOP,
+    MM_CHANNEL_EVT_PAUSE,
+    MM_CHANNEL_EVT_RESUME,
+    MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+    MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+    MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+    MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+    MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+    MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+    MM_CHANNEL_EVT_MAP_STREAM_BUF,
+    MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+    MM_CHANNEL_EVT_SET_STREAM_PARM,
+    MM_CHANNEL_EVT_GET_STREAM_PARM,
+    MM_CHANNEL_EVT_DO_STREAM_ACTION,
+    MM_CHANNEL_EVT_DELETE,
+    MM_CHANNEL_EVT_AF_BRACKETING,
+    MM_CHANNEL_EVT_AE_BRACKETING,
+    MM_CHANNEL_EVT_FLASH_BRACKETING,
+    MM_CHANNEL_EVT_MTF_BRACKETING,
+    MM_CHANNEL_EVT_ZOOM_1X,
+    MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+} mm_channel_evt_type_t;
+
+typedef struct {
+    uint32_t stream_id;
+    mm_camera_stream_config_t *config;
+} mm_evt_paylod_config_stream_t;
+
+typedef struct {
+    uint32_t stream_id;
+    cam_stream_parm_buffer_t *parms;
+} mm_evt_paylod_set_get_stream_parms_t;
+
+typedef struct {
+    uint32_t stream_id;
+    void *actions;
+} mm_evt_paylod_do_stream_action_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint8_t buf_type;
+    uint32_t buf_idx;
+    int32_t plane_idx;
+    int fd;
+    size_t size;
+} mm_evt_paylod_map_stream_buf_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint8_t buf_type;
+    uint32_t buf_idx;
+    int32_t plane_idx;
+} mm_evt_paylod_unmap_stream_buf_t;
+
+typedef struct {
+    uint8_t num_of_bufs;
+    mm_camera_buf_info_t super_buf[MAX_STREAM_NUM_IN_BUNDLE];
+    uint8_t matched;
+    uint32_t frame_idx;
+} mm_channel_queue_node_t;
+
+typedef struct {
+    cam_queue_t que;
+    uint8_t num_streams;
+    /* container for bundled stream handlers */
+    uint32_t bundled_streams[MAX_STREAM_NUM_IN_BUNDLE];
+    mm_camera_channel_attr_t attr;
+    uint32_t expected_frame_id;
+    uint32_t match_cnt;
+    uint32_t expected_frame_id_without_led;
+} mm_channel_queue_t;
+
+typedef struct {
+    uint8_t is_active; /* flag to indicate if bundle is valid */
+    /* queue to store bundled super buffers */
+    mm_channel_queue_t superbuf_queue;
+    mm_camera_buf_notify_t super_buf_notify_cb;
+    void *user_data;
+} mm_channel_bundle_t;
+
+typedef struct mm_channel {
+    uint32_t my_hdl;
+    mm_channel_state_type_t state;
+    pthread_mutex_t ch_lock; /* channel lock */
+
+    /* stream bundle info in the channel */
+    mm_channel_bundle_t bundle;
+
+    /* num of pending suferbuffers */
+    uint32_t pending_cnt;
+
+    /* cmd thread for superbuffer dataCB and async stop*/
+    mm_camera_cmd_thread_t cmd_thread;
+
+    /* cb thread for sending data cb */
+    mm_camera_cmd_thread_t cb_thread;
+
+    /* data poll thread
+    * currently one data poll thread per channel
+    * could extended to support one data poll thread per stream in the channel */
+    mm_camera_poll_thread_t poll_thread[MM_CAMERA_CHANNEL_POLL_THREAD_MAX];
+
+    /* container for all streams in channel */
+    mm_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+
+    /* reference to parent cam_obj */
+    struct mm_camera_obj* cam_obj;
+
+    /* manual zsl snapshot control */
+    uint8_t manualZSLSnapshot;
+
+    /* control for zsl led */
+    uint8_t startZSlSnapshotCalled;
+    uint8_t needLEDFlash;
+    uint8_t previewSkipCnt;
+
+    uint8_t need3ABracketing;
+    uint8_t isFlashBracketingEnabled;
+    uint8_t isZoom1xFrameRequested;
+    char threadName[THREAD_NAME_SIZE];
+} mm_channel_t;
+
+typedef struct {
+    mm_channel_t *ch;
+    uint32_t stream_id;
+} mm_camera_stream_link_t;
+
+/* struct to store information about pp cookie*/
+typedef struct {
+    uint32_t cam_hdl;
+    uint32_t ch_hdl;
+    uint32_t stream_hdl;
+    mm_channel_queue_node_t* super_buf;
+} mm_channel_pp_info_t;
+
+/* mm_camera */
+typedef struct {
+    mm_camera_event_notify_t evt_cb;
+    void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+    mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+    /* reg_count <=0: infinite
+     * reg_count > 0: register only for required times */
+    int reg_count;
+} mm_camera_evt_obj_t;
+
+typedef struct mm_camera_obj {
+    uint32_t my_hdl;
+    int ref_count;
+    int32_t ctrl_fd;
+    int32_t ds_fd; /* domain socket fd */
+    pthread_mutex_t cam_lock;
+    pthread_mutex_t cb_lock; /* lock for evt cb */
+    mm_channel_t ch[MM_CAMERA_CHANNEL_MAX];
+    mm_camera_evt_obj_t evt;
+    mm_camera_poll_thread_t evt_poll_thread; /* evt poll thread */
+    mm_camera_cmd_thread_t evt_thread;       /* thread for evt CB */
+    mm_camera_vtbl_t vtbl;
+
+    pthread_mutex_t evt_lock;
+    pthread_cond_t evt_cond;
+    mm_camera_event_t evt_rcvd;
+
+    pthread_mutex_t msg_lock; /* lock for sending msg through socket */
+} mm_camera_obj_t;
+
+typedef struct {
+    int8_t num_cam;
+    char video_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+    mm_camera_obj_t *cam_obj[MM_CAMERA_MAX_NUM_SENSORS];
+    struct camera_info info[MM_CAMERA_MAX_NUM_SENSORS];
+} mm_camera_ctrl_t;
+
+typedef enum {
+    mm_camera_async_call,
+    mm_camera_sync_call
+} mm_camera_call_type_t;
+
+/**********************************************************************************
+* external function declare
+***********************************************************************************/
+/* utility functions */
+/* set int32_t value */
+extern int32_t mm_camera_util_s_ctrl(int32_t fd,
+                                     uint32_t id,
+                                     int32_t *value);
+
+/* get int32_t value */
+extern int32_t mm_camera_util_g_ctrl(int32_t fd,
+                                     uint32_t id,
+                                     int32_t *value);
+
+/* send msg throught domain socket for fd mapping */
+extern int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+                                      void *msg,
+                                      size_t buf_size,
+                                      int sendfd);
+/* Check if hardware target is A family */
+uint8_t mm_camera_util_chip_is_a_family(void);
+
+/* mm-camera */
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+                                               mm_camera_event_notify_t evt_cb,
+                                               void * user_data);
+extern int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id,
+                              mm_camera_buf_def_t *buf);
+extern int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id);
+extern int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+                                   void *parms);
+extern int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+                                   void *parms);
+extern int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+                                 uint8_t buf_type,
+                                 int fd,
+                                 size_t size);
+extern int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+                                   uint8_t buf_type);
+extern int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+                                          int32_t do_af_flag);
+extern int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id);
+extern int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id);
+extern uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+                                      mm_camera_channel_attr_t *attr,
+                                      mm_camera_buf_notify_t channel_cb,
+                                      void *userdata);
+extern int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+                                     uint32_t ch_id);
+extern int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+                                         uint32_t ch_id,
+                                         cam_bundle_config_t *bundle_info);
+extern uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+                                     uint32_t ch_id);
+extern int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+                                    uint32_t ch_id,
+                                    uint32_t stream_id);
+extern uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+        uint32_t ch_id,
+        uint32_t stream_id,
+        uint32_t linked_ch_id);
+extern int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+                                       uint32_t ch_id,
+                                       uint32_t stream_id,
+                                       mm_camera_stream_config_t *config);
+extern int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,
+                                       uint32_t ch_id);
+extern int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+                                      uint32_t ch_id);
+extern int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+                                           uint32_t ch_id,
+                                           uint32_t num_buf_requested);
+extern int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj,
+                                                  uint32_t ch_id);
+extern int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj,
+                                               uint32_t ch_id,
+                                               uint32_t frame_idx);
+extern int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+                                               uint32_t ch_id,
+                                               mm_camera_super_buf_notify_mode_t notify_mode);
+extern int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t s_id,
+                                          cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t s_id,
+                                          cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+                                                        mm_camera_event_notify_t evt_cb,
+                                                        void * user_data);
+extern int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+                                        uint32_t ch_id,
+                                        uint32_t stream_id,
+                                        uint8_t buf_type,
+                                        uint32_t buf_idx,
+                                        int32_t plane_idx,
+                                        int fd,
+                                        size_t size);
+extern int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t stream_id,
+                                          uint8_t buf_type,
+                                          uint32_t buf_idx,
+                                          int32_t plane_idx);
+extern int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t stream_id,
+                                          void *actions);
+
+/* mm_channel */
+extern int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+                                 mm_channel_evt_type_t evt,
+                                 void * in_val,
+                                 void * out_val);
+extern int32_t mm_channel_init(mm_channel_t *my_obj,
+                               mm_camera_channel_attr_t *attr,
+                               mm_camera_buf_notify_t channel_cb,
+                               void *userdata);
+/* qbuf is a special case that not going through state machine.
+ * This is to avoid deadlock when trying to aquire ch_lock,
+ * from the context of dataCB, but async stop is holding ch_lock */
+extern int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                               mm_camera_buf_def_t *buf);
+/* mm_stream */
+extern int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+                                mm_stream_evt_type_t evt,
+                                void * in_val,
+                                void * out_val);
+/* Allow other stream to register dataCB at certain stream.
+ * This is for use case of video sized live snapshot,
+ * because snapshot stream need register one time CB at video stream.
+ * ext_image_mode and sensor_idx are used to identify the destinate stream
+ * to be register with dataCB. */
+extern int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+                                    mm_stream_data_cb_t *val);
+extern int32_t mm_stream_map_buf(mm_stream_t *my_obj,
+                                 uint8_t buf_type,
+                                 uint32_t frame_idx,
+                                 int32_t plane_idx,
+                                 int fd,
+                                 size_t size);
+extern int32_t mm_stream_unmap_buf(mm_stream_t *my_obj,
+                                   uint8_t buf_type,
+                                   uint32_t frame_idx,
+                                   int32_t plane_idx);
+
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_camera_util_generate_handler(uint8_t index);
+const char * mm_camera_util_get_dev_name(uint32_t cam_handler);
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler);
+
+/* poll/cmd thread functions */
+extern int32_t mm_camera_poll_thread_launch(
+                                mm_camera_poll_thread_t * poll_cb,
+                                mm_camera_poll_thread_type_t poll_type);
+extern int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb);
+extern int32_t mm_camera_poll_thread_add_poll_fd(
+                                mm_camera_poll_thread_t * poll_cb,
+                                uint32_t handler,
+                                int32_t fd,
+                                mm_camera_poll_notify_t nofity_cb,
+                                void *userdata,
+                                mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_del_poll_fd(
+                                mm_camera_poll_thread_t * poll_cb,
+                                uint32_t handler,
+                                mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_commit_updates(
+        mm_camera_poll_thread_t * poll_cb);
+extern int32_t mm_camera_cmd_thread_launch(
+                                mm_camera_cmd_thread_t * cmd_thread,
+                                mm_camera_cmd_cb_t cb,
+                                void* user_data);
+extern int32_t mm_camera_cmd_thread_name(const char* name);
+extern int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread);
+
+extern int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+                                               mm_camera_advanced_capture_t advanced_capturetype,
+                                               uint32_t ch_id,
+                                               uint32_t start_flag);
+#endif /* __MM_CAMERA_H__ */
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h b/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
new file mode 100644
index 0000000..3b8c06a
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
@@ -0,0 +1,76 @@
+/* Copyright (c) 2012, 2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+#define LOG_DEBUG 1
+/* Choose debug log level. This will not affect the error logs
+   0: turns off CDBG and CDBG_HIGH logs
+   1: turns-on CDBG_HIGH logs
+   2: turns-on CDBG_HIGH and CDBG logs */
+extern volatile uint32_t gMmCameraIntfLogLevel;
+
+#ifndef LOG_DEBUG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera-intf"
+    #include <utils/Log.h>
+  #else
+    #include <stdio.h>
+    #define ALOGE CDBG
+  #endif
+  #undef CDBG
+  #define CDBG(fmt, args...) do{}while(0)
+  #define CDBG_ERROR(fmt, args...) ALOGE(fmt, ##args)
+#else
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera-intf"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGD_IF(gMmCameraIntfLogLevel >= 2, fmt, ##args)
+  #else
+    #include <stdio.h>
+    #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+    #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+  #endif
+#endif
+
+#ifdef _ANDROID_
+  #define CDBG_HIGH(fmt, args...) ALOGD_IF(gMmCameraIntfLogLevel >= 1, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...)  ALOGE(fmt, ##args)
+#else
+  #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h b/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
new file mode 100644
index 0000000..e6f42be
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
@@ -0,0 +1,65 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+#include <inttypes.h>
+#include <sys/socket.h>
+#include <sys/uio.h>
+#include <sys/un.h>
+
+typedef enum {
+    MM_CAMERA_SOCK_TYPE_UDP,
+    MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+typedef union {
+    struct sockaddr addr;
+    struct sockaddr_un addr_un;
+} mm_camera_sock_addr_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  size_t buf_size,
+  int sendfd);
+
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/cam_intf.c b/msm8974/QCamera2/stack/mm-camera-interface/src/cam_intf.c
new file mode 100644
index 0000000..1349ea6
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/cam_intf.c
@@ -0,0 +1,53 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "cam_intf.h"
+
+void *POINTER_OF_PARAM(cam_intf_parm_type_t PARAM_ID,
+                 void *table_ptr)
+{
+  parm_buffer_new_t *TABLE_PTR = (parm_buffer_new_t *)table_ptr;
+  void *tmp_p;
+  size_t j = 0, i = TABLE_PTR->num_entry;
+  tmp_p = (void *) &TABLE_PTR->entry[0];
+  parm_entry_type_new_t *curr_param = (parm_entry_type_new_t *) tmp_p;
+
+  for (j = 0; j < i; j++) {
+    if (PARAM_ID == curr_param->entry_type) {
+      return (void *)&curr_param->data[0];
+    }
+    curr_param = GET_NEXT_PARAM(curr_param, parm_entry_type_new_t);
+  }
+  tmp_p = (void *) &TABLE_PTR->entry[0];
+  curr_param = (parm_entry_type_new_t *) tmp_p;
+  return (void *)&curr_param->data[0]; //should not be coming here
+                                       //this is just to prevent a crash
+                                       //for the caller
+}
+
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera.c
new file mode 100644
index 0000000..2484be2
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -0,0 +1,1893 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+    (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+    ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+/* internal function declare */
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                          uint8_t reg_flag);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event);
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_channel_by_handler
+ *
+ * DESCRIPTION: utility function to get a channel object from its handle
+ *
+ * PARAMETERS :
+ *   @cam_obj: ptr to a camera object
+ *   @handler: channel handle
+ *
+ * RETURN     : ptr to a channel object.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_channel_t * mm_camera_util_get_channel_by_handler(
+                                    mm_camera_obj_t * cam_obj,
+                                    uint32_t handler)
+{
+    int i;
+    mm_channel_t *ch_obj = NULL;
+    for(i = 0; i < MM_CAMERA_CHANNEL_MAX; i++) {
+        if (handler == cam_obj->ch[i].my_hdl) {
+            ch_obj = &cam_obj->ch[i];
+            break;
+        }
+    }
+    return ch_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_chip_is_a_family
+ *
+ * DESCRIPTION: utility function to check if the host is A family chip
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : TRUE if A family.
+ *              FALSE otherwise.
+ *==========================================================================*/
+uint8_t mm_camera_util_chip_is_a_family(void)
+{
+#ifdef USE_A_FAMILY
+    return TRUE;
+#else
+    return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_dispatch_app_event
+ *
+ * DESCRIPTION: dispatch event to apps who regitster for event notify
+ *
+ * PARAMETERS :
+ *   @cmd_cb: ptr to a struct storing event info
+ *   @user_data: user data ptr (camera object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
+                                         void* user_data)
+{
+    mm_camera_cmd_thread_name("mm_cam_event");
+    int i;
+    mm_camera_event_t *event = &cmd_cb->u.evt;
+    mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
+    if (NULL != my_obj) {
+        pthread_mutex_lock(&my_obj->cb_lock);
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(my_obj->evt.evt[i].evt_cb) {
+                my_obj->evt.evt[i].evt_cb(
+                    my_obj->my_hdl,
+                    event,
+                    my_obj->evt.evt[i].user_data);
+            }
+        }
+        pthread_mutex_unlock(&my_obj->cb_lock);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_event_notify
+ *
+ * DESCRIPTION: callback to handle event notify from kernel. This call will
+ *              dequeue event from kernel.
+ *
+ * PARAMETERS :
+ *   @user_data: user data ptr (camera object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_event_notify(void* user_data)
+{
+    struct v4l2_event ev;
+    struct msm_v4l2_event_data *msm_evt = NULL;
+    int rc;
+    mm_camera_event_t evt;
+    memset(&evt, 0, sizeof(mm_camera_event_t));
+
+    mm_camera_obj_t *my_obj = (mm_camera_obj_t*)user_data;
+    if (NULL != my_obj) {
+        /* read evt */
+        memset(&ev, 0, sizeof(ev));
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_DQEVENT, &ev);
+
+        if (rc >= 0 && ev.id == MSM_CAMERA_MSM_NOTIFY) {
+            msm_evt = (struct msm_v4l2_event_data *)ev.u.data;
+            switch (msm_evt->command) {
+            case CAM_EVENT_TYPE_MAP_UNMAP_DONE:
+                pthread_mutex_lock(&my_obj->evt_lock);
+                my_obj->evt_rcvd.server_event_type = msm_evt->command;
+                my_obj->evt_rcvd.status = msm_evt->status;
+                pthread_cond_signal(&my_obj->evt_cond);
+                pthread_mutex_unlock(&my_obj->evt_lock);
+                break;
+            case CAM_EVENT_TYPE_REPROCESS_STAGE_DONE:
+                {
+                    evt.server_event_type = CAM_EVENT_TYPE_REPROCESS_STAGE_DONE;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            case CAM_EVENT_TYPE_INT_TAKE_PIC:
+                {
+                    evt.server_event_type = CAM_EVENT_TYPE_INT_TAKE_PIC;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            case MSM_CAMERA_PRIV_SHUTDOWN:
+                {
+                    CDBG_ERROR("%s: Camera Event DAEMON DIED received", __func__);
+                    evt.server_event_type = CAM_EVENT_TYPE_DAEMON_DIED;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            default:
+                break;
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_enqueue_evt
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ *              event thread.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @event    : event to be queued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t *node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_EVT_CB;
+        node->u.evt = *event;
+
+        /* enqueue to evt cmd thread */
+        cam_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
+        /* wake up evt cmd thread */
+        cam_sem_post(&(my_obj->evt_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_open
+ *
+ * DESCRIPTION: open a camera
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_open(mm_camera_obj_t *my_obj)
+{
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+    int32_t rc = 0;
+    int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+    uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+    int cam_idx = 0;
+
+    CDBG("%s:  begin\n", __func__);
+
+    snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+             mm_camera_util_get_dev_name(my_obj->my_hdl));
+    sscanf(dev_name, "/dev/video%d", &cam_idx);
+    CDBG_ERROR("%s: dev name = %s, cam_idx = %d", __func__, dev_name, cam_idx);
+
+    do{
+        n_try--;
+        my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        CDBG("%s:  ctrl_fd = %d, errno == %d", __func__, my_obj->ctrl_fd, errno);
+        if((my_obj->ctrl_fd > 0) || (errno != EIO) || (n_try <= 0 )) {
+            CDBG_ERROR("%s:  opened, break out while loop", __func__);
+            break;
+        }
+        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+             __func__, sleep_msec);
+        usleep(sleep_msec * 1000U);
+    }while (n_try > 0);
+
+    if (my_obj->ctrl_fd <= 0) {
+        CDBG_ERROR("%s: cannot open control fd of '%s' (%s)\n",
+                 __func__, dev_name, strerror(errno));
+        rc = -1;
+        goto on_error;
+    }
+
+    /* open domain socket*/
+    n_try = MM_CAMERA_DEV_OPEN_TRIES;
+    do {
+        n_try--;
+        my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
+        CDBG("%s:  ds_fd = %d, errno = %d", __func__, my_obj->ds_fd, errno);
+        if((my_obj->ds_fd > 0) || (n_try <= 0 )) {
+            CDBG("%s:  opened, break out while loop", __func__);
+            break;
+        }
+        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+             __func__, sleep_msec);
+        usleep(sleep_msec * 1000U);
+    } while (n_try > 0);
+
+    if (my_obj->ds_fd <= 0) {
+        CDBG_ERROR("%s: cannot open domain socket fd of '%s'(%s)\n",
+                 __func__, dev_name, strerror(errno));
+        rc = -1;
+        goto on_error;
+    }
+    pthread_mutex_init(&my_obj->msg_lock, NULL);
+
+    pthread_mutex_init(&my_obj->cb_lock, NULL);
+    pthread_mutex_init(&my_obj->evt_lock, NULL);
+    pthread_cond_init(&my_obj->evt_cond, NULL);
+
+    CDBG("%s : Launch evt Thread in Cam Open",__func__);
+    snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Dispatch");
+    mm_camera_cmd_thread_launch(&my_obj->evt_thread,
+                                mm_camera_dispatch_app_event,
+                                (void *)my_obj);
+
+    /* launch event poll thread
+     * we will add evt fd into event poll thread upon user first register for evt */
+    CDBG("%s : Launch evt Poll Thread in Cam Open", __func__);
+    snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Poll");
+    mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
+                                 MM_CAMERA_POLL_TYPE_EVT);
+    mm_camera_evt_sub(my_obj, TRUE);
+
+    /* unlock cam_lock, we need release global intf_lock in camera_open(),
+     * in order not block operation of other Camera in dual camera use case.*/
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    CDBG("%s:  end (rc = %d)\n", __func__, rc);
+    return rc;
+
+on_error:
+    if (my_obj->ctrl_fd > 0) {
+        close(my_obj->ctrl_fd);
+        my_obj->ctrl_fd = 0;
+    }
+    if (my_obj->ds_fd > 0) {
+        mm_camera_socket_close(my_obj->ds_fd);
+       my_obj->ds_fd = 0;
+    }
+
+    /* unlock cam_lock, we need release global intf_lock in camera_open(),
+     * in order not block operation of other Camera in dual camera use case.*/
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_close
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ *              event thread.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @event    : event to be queued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+    CDBG("%s : unsubscribe evt", __func__);
+    mm_camera_evt_sub(my_obj, FALSE);
+
+    CDBG("%s : Close evt Poll Thread in Cam Close",__func__);
+    mm_camera_poll_thread_release(&my_obj->evt_poll_thread);
+
+    CDBG("%s : Close evt cmd Thread in Cam Close",__func__);
+    mm_camera_cmd_thread_release(&my_obj->evt_thread);
+
+    if(my_obj->ctrl_fd > 0) {
+        close(my_obj->ctrl_fd);
+        my_obj->ctrl_fd = 0;
+    }
+    if(my_obj->ds_fd > 0) {
+        mm_camera_socket_close(my_obj->ds_fd);
+        my_obj->ds_fd = 0;
+    }
+    pthread_mutex_destroy(&my_obj->msg_lock);
+
+    pthread_mutex_destroy(&my_obj->cb_lock);
+    pthread_mutex_destroy(&my_obj->evt_lock);
+    pthread_cond_destroy(&my_obj->evt_cond);
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_register_event_notify_internal
+ *
+ * DESCRIPTION: internal implementation for registering callback for event notify.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @evt_cb   : callback to be registered to handle event notify
+ *   @user_data: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+                                                 mm_camera_event_notify_t evt_cb,
+                                                 void * user_data)
+{
+    int i;
+    int rc = -1;
+    mm_camera_evt_obj_t *evt_array = NULL;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    evt_array = &my_obj->evt;
+    if(evt_cb) {
+        /* this is reg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == NULL) {
+                evt_array->evt[i].evt_cb = evt_cb;
+                evt_array->evt[i].user_data = user_data;
+                evt_array->reg_count++;
+                rc = 0;
+                break;
+            }
+        }
+    } else {
+        /* this is unreg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == user_data) {
+                evt_array->evt[i].evt_cb = NULL;
+                evt_array->evt[i].user_data = NULL;
+                evt_array->reg_count--;
+                rc = 0;
+                break;
+            }
+        }
+    }
+
+    pthread_mutex_unlock(&my_obj->cb_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_register_event_notify
+ *
+ * DESCRIPTION: registering a callback for event notify.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @evt_cb   : callback to be registered to handle event notify
+ *   @user_data: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+                                        mm_camera_event_notify_t evt_cb,
+                                        void * user_data)
+{
+    int rc = -1;
+    rc = mm_camera_register_event_notify_internal(my_obj,
+                                                  evt_cb,
+                                                  user_data);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+                       uint32_t ch_id,
+                       mm_camera_buf_def_t *buf)
+{
+    int rc = -1;
+    mm_channel_t * ch_obj = NULL;
+    ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+
+    /* we always assume qbuf will be done before channel/stream is fully stopped
+     * because qbuf is done within dataCB context
+     * in order to avoid deadlock, we are not locking ch_lock for qbuf */
+    if (NULL != ch_obj) {
+        rc = mm_channel_qbuf(ch_obj, buf);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id : stream id
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id)
+{
+    int rc = -1;
+    mm_channel_t * ch_obj = NULL;
+    uint32_t payload;
+    ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+    payload = stream_id;
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+        rc = mm_channel_fsm_fn(ch_obj,
+                MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+                (void *)&payload,
+                NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ *   @my_obj: camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_capability cap;
+
+    /* get camera capabilities */
+    memset(&cap, 0, sizeof(cap));
+    rc = ioctl(my_obj->ctrl_fd, VIDIOC_QUERYCAP, &cap);
+    if (rc != 0) {
+        CDBG_ERROR("%s: cannot get camera capabilities, rc = %d\n", __func__, rc);
+    }
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj, void *parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if ((parm_buffer_new_t *)parms !=  NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj, void *parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if ((parm_buffer_new_t *)parms != NULL) {
+        rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call success, we will always assume there will
+ *              be an auto_focus event following up.
+ *==========================================================================*/
+int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_DO_AUTO_FOCUS, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_CANCEL_AUTO_FOCUS, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @do_af_flag   : flag indicating if AF is needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+                                   int32_t do_af_flag)
+{
+    int32_t rc = -1;
+    int32_t value = do_af_flag;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PREPARE_SNAPSHOT, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+             CAM_PRIV_START_ZSL_SNAPSHOT, &value);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl capture
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+             CAM_PRIV_STOP_ZSL_SNAPSHOT, &value);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : uint32_t type of channel handle
+ *              0  -- invalid channel handle, meaning the op failed
+ *              >0 -- successfully added a channel with a valid handle
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+                               mm_camera_channel_attr_t *attr,
+                               mm_camera_buf_notify_t channel_cb,
+                               void *userdata)
+{
+    mm_channel_t *ch_obj = NULL;
+    uint8_t ch_idx = 0;
+    uint32_t ch_hdl = 0;
+
+    for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
+        if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
+            ch_obj = &my_obj->ch[ch_idx];
+            break;
+        }
+    }
+
+    if (NULL != ch_obj) {
+        /* initialize channel obj */
+        memset(ch_obj, 0, sizeof(mm_channel_t));
+        ch_hdl = mm_camera_util_generate_handler(ch_idx);
+        ch_obj->my_hdl = ch_hdl;
+        ch_obj->state = MM_CHANNEL_STATE_STOPPED;
+        ch_obj->cam_obj = my_obj;
+        pthread_mutex_init(&ch_obj->ch_lock, NULL);
+        mm_channel_init(ch_obj, attr, channel_cb, userdata);
+    }
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+
+    return ch_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DELETE,
+                               NULL,
+                               NULL);
+
+        pthread_mutex_destroy(&ch_obj->ch_lock);
+        memset(ch_obj, 0, sizeof(mm_channel_t));
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+                                  uint32_t ch_id,
+                                  cam_bundle_config_t *bundle_info)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+                               (void *)bundle_info,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_link_stream
+ *
+ * DESCRIPTION: link a stream into a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream that will be linked
+ *   @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+        uint32_t ch_id,
+        uint32_t stream_id,
+        uint32_t linked_ch_id)
+{
+    uint32_t s_hdl = 0;
+    mm_channel_t * ch_obj =
+            mm_camera_util_get_channel_by_handler(my_obj, linked_ch_id);
+    mm_channel_t * owner_obj =
+            mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if ((NULL != ch_obj) && (NULL != owner_obj)) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        mm_camera_stream_link_t stream_link;
+        memset(&stream_link, 0, sizeof(mm_camera_stream_link_t));
+        stream_link.ch = owner_obj;
+        stream_link.stream_id = stream_id;
+        mm_channel_fsm_fn(ch_obj,
+                          MM_CHANNEL_EVT_LINK_STREAM,
+                          (void*)&stream_link,
+                          (void*)&s_hdl);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id)
+{
+    uint32_t s_hdl = 0;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        mm_channel_fsm_fn(ch_obj,
+                          MM_CHANNEL_EVT_ADD_STREAM,
+                          NULL,
+                          (void *)&s_hdl);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+                             uint32_t ch_id,
+                             uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DEL_STREAM,
+                               (void *)&stream_id,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_zsl_snapshot_ch
+ *
+ * DESCRIPTION: starts zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_zsl_snapshot_ch
+ *
+ * DESCRIPTION: stops zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+                                uint32_t ch_id,
+                                uint32_t stream_id,
+                                mm_camera_stream_config_t *config)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+    mm_evt_paylod_config_stream_t payload;
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(mm_evt_paylod_config_stream_t));
+        payload.stream_id = stream_id;
+        payload.config = config;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CONFIG_STREAM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,
+                                uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_START,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+                               uint32_t ch_id)
+{
+    int32_t rc = 0;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_STOP,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+                                    uint32_t ch_id,
+                                    uint32_t num_buf_requested)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+                               (void *)&num_buf_requested,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj, uint32_t ch_id,
+                                                             uint32_t frame_idx)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+                               (void *)&frame_idx,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_config_channel_notify
+ *
+ * DESCRIPTION: configures the channel notification mode
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+                                        uint32_t ch_id,
+                                        mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+                               (void *)&notify_mode,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t s_id,
+                                   cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_set_get_stream_parms_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = s_id;
+        payload.parms = parms;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_SET_STREAM_PARM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t s_id,
+                                   cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_set_get_stream_parms_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = s_id;
+        payload.parms = parms;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_GET_STREAM_PARM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ *              if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @actions      : ptr to an action struct buf to be performed by server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Actions to be performed by server are already
+ *              filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t stream_id,
+                                   void *actions)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_do_stream_action_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.actions = actions;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DO_STREAM_ACTION,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+                                 uint32_t ch_id,
+                                 uint32_t stream_id,
+                                 uint8_t buf_type,
+                                 uint32_t buf_idx,
+                                 int32_t plane_idx,
+                                 int fd,
+                                 size_t size)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_map_stream_buf_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.buf_type = buf_type;
+        payload.buf_idx = buf_idx;
+        payload.plane_idx = plane_idx;
+        payload.fd = fd;
+        payload.size = size;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_MAP_STREAM_BUF,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t stream_id,
+                                   uint8_t buf_type,
+                                   uint32_t buf_idx,
+                                   int32_t plane_idx)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_unmap_stream_buf_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.buf_type = buf_type;
+        payload.buf_idx = buf_idx;
+        payload.plane_idx = plane_idx;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_evt_sub
+ *
+ * DESCRIPTION: subscribe/unsubscribe event notify from kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @reg_flag     : 1 -- subscribe ; 0 -- unsubscribe
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                          uint8_t reg_flag)
+{
+    int32_t rc = 0;
+    struct v4l2_event_subscription sub;
+
+    memset(&sub, 0, sizeof(sub));
+    sub.type = MSM_CAMERA_V4L2_EVENT_TYPE;
+    sub.id = MSM_CAMERA_MSM_NOTIFY;
+    if(FALSE == reg_flag) {
+        /* unsubscribe */
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+        if (rc < 0) {
+            CDBG_ERROR("%s: unsubscribe event rc = %d", __func__, rc);
+            return rc;
+        }
+        /* remove evt fd from the polling thraed when unreg the last event */
+        rc = mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread,
+                                               my_obj->my_hdl,
+                                               mm_camera_sync_call);
+    } else {
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+        if (rc < 0) {
+            CDBG_ERROR("%s: subscribe event rc = %d", __func__, rc);
+            return rc;
+        }
+        /* add evt fd to polling thread when subscribe the first event */
+        rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
+                                               my_obj->my_hdl,
+                                               my_obj->ctrl_fd,
+                                               mm_camera_event_notify,
+                                               (void*)my_obj,
+                                               mm_camera_sync_call);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_wait_for_event
+ *
+ * DESCRIPTION: utility function to wait for certain events
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @evt_mask     : mask for events to be waited. Any of event in the mask would
+ *                   trigger the wait to end
+ *   @status       : status of the event
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_camera_util_wait_for_event(mm_camera_obj_t *my_obj,
+                                   uint32_t evt_mask,
+                                   uint32_t *status)
+{
+    int rc = 0;
+    struct timespec ts;
+
+    pthread_mutex_lock(&my_obj->evt_lock);
+    while (!(my_obj->evt_rcvd.server_event_type & evt_mask)) {
+        clock_gettime(CLOCK_REALTIME, &ts);
+        ts.tv_sec++;
+        rc = pthread_cond_timedwait(&my_obj->evt_cond, &my_obj->evt_lock, &ts);
+        if (rc == ETIMEDOUT) {
+            ALOGE("%s pthread_cond_timedwait success\n", __func__);
+            my_obj->evt_rcvd.status = MSM_CAMERA_STATUS_FAIL;
+            break;
+        }
+    }
+    *status = my_obj->evt_rcvd.status;
+    /* reset local storage for recieved event for next event */
+    memset(&my_obj->evt_rcvd, 0, sizeof(mm_camera_event_t));
+    pthread_mutex_unlock(&my_obj->evt_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_sendmsg
+ *
+ * DESCRIPTION: utility function to send msg via domain socket
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @msg          : message to be sent
+ *   @buf_size     : size of the message to be sent
+ *   @sendfd       : >0 if any file descriptor need to be passed across process
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+                               void *msg,
+                               size_t buf_size,
+                               int sendfd)
+{
+    int32_t rc = -1;
+    uint32_t status;
+
+    /* need to lock msg_lock, since sendmsg until reposonse back is deemed as one operation*/
+    pthread_mutex_lock(&my_obj->msg_lock);
+    if(mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd) > 0) {
+        /* wait for event that mapping/unmapping is done */
+        mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+        if (MSM_CAMERA_STATUS_SUCCESS == status) {
+            rc = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->msg_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+                          uint8_t buf_type,
+                          int fd,
+                          size_t size)
+{
+    int32_t rc = 0;
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+    packet.payload.buf_map.type = buf_type;
+    packet.payload.buf_map.fd = fd;
+    packet.payload.buf_map.size = size;
+    rc = mm_camera_util_sendmsg(my_obj,
+                                &packet,
+                                sizeof(cam_sock_packet_t),
+                                fd);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+                            uint8_t buf_type)
+{
+    int32_t rc = 0;
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+    packet.payload.buf_unmap.type = buf_type;
+    rc = mm_camera_util_sendmsg(my_obj,
+                                &packet,
+                                sizeof(cam_sock_packet_t),
+                                0);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_s_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for s_ctrl
+ *
+ * PARAMETERS :
+ *   @fd      : file descritpor for sending ioctl
+ *   @id      : control id
+ *   @value   : value of the ioctl to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_s_ctrl(int32_t fd,  uint32_t id, int32_t *value)
+{
+    int rc = 0;
+    struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    if (value != NULL) {
+        control.value = *value;
+    }
+    rc = ioctl(fd, VIDIOC_S_CTRL, &control);
+
+    CDBG("%s: fd=%d, S_CTRL, id=0x%x, value = %p, rc = %d\n",
+         __func__, fd, id, value, rc);
+    if (value != NULL) {
+        *value = control.value;
+    }
+    return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_g_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for g_ctrl
+ *
+ * PARAMETERS :
+ *   @fd      : file descritpor for sending ioctl
+ *   @id      : control id
+ *   @value   : value of the ioctl to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+    int rc = 0;
+    struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    if (value != NULL) {
+        control.value = *value;
+    }
+    rc = ioctl(fd, VIDIOC_G_CTRL, &control);
+    CDBG("%s: fd=%d, G_CTRL, id=0x%x, rc = %d\n", __func__, fd, id, rc);
+    if (value != NULL) {
+        *value = control.value;
+    }
+    return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_channel_advanced_capture
+ *
+ * DESCRIPTION: sets the channel advanced capture
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @advanced_capture_type : advanced capture type.
+ *   @ch_id        : channel handle
+ *   @start_flag  : flag to indicate start/stop
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+                                        mm_camera_advanced_capture_t advanced_capture_type,
+                                        uint32_t ch_id,
+                                        uint32_t start_flag)
+{
+    CDBG("%s: E",__func__);
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+        switch (advanced_capture_type) {
+            case MM_CAMERA_AF_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_AF_BRACKETING,
+                                       (void *)&start_flag,
+                                       NULL);
+                break;
+            case MM_CAMERA_AE_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_AE_BRACKETING,
+                                       (void *)&start_flag,
+                                       NULL);
+                break;
+            case MM_CAMERA_FLASH_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_FLASH_BRACKETING,
+                                       (void *)&start_flag,
+                                       NULL);
+                break;
+            case MM_CAMERA_MTF_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_MTF_BRACKETING,
+                                       (void *)&start_flag,
+                                       NULL);
+                break;
+            case MM_CAMERA_ZOOM_1X:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_ZOOM_1X,
+                                       (void *)&start_flag,
+                                       NULL);
+                break;
+            default:
+                break;
+        }
+
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    CDBG("%s: X",__func__);
+    return rc;
+}
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
new file mode 100644
index 0000000..23390b1
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -0,0 +1,2576 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler);
+extern mm_channel_t * mm_camera_util_get_channel_by_handler(mm_camera_obj_t * cam_obj,
+                                                            uint32_t handler);
+
+/* internal function declare goes here */
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                        mm_camera_buf_def_t *buf);
+int32_t mm_channel_init(mm_channel_t *my_obj,
+                        mm_camera_channel_attr_t *attr,
+                        mm_camera_buf_notify_t channel_cb,
+                        void *userdata);
+void mm_channel_release(mm_channel_t *my_obj);
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj);
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+                                   uint32_t stream_id);
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+        mm_camera_stream_link_t *stream_link);
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+                                 uint32_t stream_id,
+                                 mm_camera_stream_config_t *config);
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+                                   cam_bundle_config_t *bundle_info);
+int32_t mm_channel_start(mm_channel_t *my_obj);
+int32_t mm_channel_stop(mm_channel_t *my_obj);
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+                                     uint32_t num_buf_requested);
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj);
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj,
+                                         uint32_t frame_idx);
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+                                      mm_camera_super_buf_notify_mode_t notify_mode);
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+        mm_channel_queue_t * queue, cam_stream_type_t cam_type);
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj,
+        uint32_t stream_id);
+
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+                                    mm_evt_paylod_do_stream_action_t *payload);
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+                                  mm_evt_paylod_map_stream_buf_t *payload);
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+                                    mm_evt_paylod_unmap_stream_buf_t *payload);
+
+/* state machine function declare */
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+
+/* channel super queue functions */
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_comp_and_enqueue(mm_channel_t *ch_obj,
+                                             mm_channel_queue_t * queue,
+                                             mm_camera_buf_info_t *buf);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t *my_obj,
+                                             mm_channel_queue_t *queue);
+int32_t mm_channel_superbuf_skip(mm_channel_t *my_obj,
+                                 mm_channel_queue_t *queue);
+
+static int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+                                           mm_camera_generic_cmd_t *p_gen_cmd);
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+                                          mm_channel_queue_t * queue);
+/*===========================================================================
+ * FUNCTION   : mm_channel_util_get_stream_by_handler
+ *
+ * DESCRIPTION: utility function to get a stream object from its handle
+ *
+ * PARAMETERS :
+ *   @cam_obj: ptr to a channel object
+ *   @handler: stream handle
+ *
+ * RETURN     : ptr to a stream object.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_stream_t * mm_channel_util_get_stream_by_handler(
+                                    mm_channel_t * ch_obj,
+                                    uint32_t handler)
+{
+    int i;
+    mm_stream_t *s_obj = NULL;
+    for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
+            (handler == ch_obj->streams[i].my_hdl)) {
+            s_obj = &ch_obj->streams[i];
+            break;
+        }
+    }
+    return s_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_dispatch_super_buf
+ *
+ * DESCRIPTION: dispatch super buffer of bundle to registered user
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing matched super buf information
+ *   @userdata: user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_channel_dispatch_super_buf(mm_camera_cmdcb_t *cmd_cb,
+                                          void* user_data)
+{
+    mm_channel_t * my_obj = (mm_channel_t *)user_data;
+
+    if (NULL == my_obj) {
+        return;
+    }
+
+    if (MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB != cmd_cb->cmd_type) {
+        CDBG_ERROR("%s: Wrong cmd_type (%d) for super buf dataCB",
+                   __func__, cmd_cb->cmd_type);
+        return;
+    }
+
+    if (my_obj->bundle.super_buf_notify_cb) {
+        my_obj->bundle.super_buf_notify_cb(&cmd_cb->u.superbuf, my_obj->bundle.user_data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_process_stream_buf
+ *
+ * DESCRIPTION: handle incoming buffer from stream in a bundle. In this function,
+ *              matching logic will be performed on incoming stream frames.
+ *              Will depends on the bundle attribute, either storing matched frames
+ *              in the superbuf queue, or sending matched superbuf frames to upper
+ *              layer through registered callback.
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing matched super buf information
+ *   @userdata: user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_channel_process_stream_buf(mm_camera_cmdcb_t * cmd_cb,
+                                          void *user_data)
+{
+    mm_camera_super_buf_notify_mode_t notify_mode;
+    mm_channel_queue_node_t *node = NULL;
+    mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+    if (NULL == ch_obj) {
+        return;
+    }
+    if (MM_CAMERA_CMD_TYPE_DATA_CB  == cmd_cb->cmd_type) {
+        /* comp_and_enqueue */
+        mm_channel_superbuf_comp_and_enqueue(
+                        ch_obj,
+                        &ch_obj->bundle.superbuf_queue,
+                        &cmd_cb->u.buf);
+    } else if (MM_CAMERA_CMD_TYPE_REQ_DATA_CB  == cmd_cb->cmd_type) {
+        /* skip frames if needed */
+        ch_obj->pending_cnt = cmd_cb->u.req_buf.num_buf_requested;
+        mm_channel_superbuf_skip(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+        if (ch_obj->pending_cnt > 0
+            && (ch_obj->needLEDFlash == TRUE ||
+                ch_obj->need3ABracketing == TRUE)
+            && (ch_obj->manualZSLSnapshot == FALSE)
+            && ch_obj->startZSlSnapshotCalled == FALSE) {
+
+            CDBG_HIGH("%s: need flash, start zsl snapshot", __func__);
+            mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+            ch_obj->startZSlSnapshotCalled = TRUE;
+            ch_obj->needLEDFlash = FALSE;
+            ch_obj->previewSkipCnt = 0;
+        } else if ((ch_obj->pending_cnt == 0 && ch_obj->startZSlSnapshotCalled == TRUE)
+                && (ch_obj->manualZSLSnapshot == FALSE)) {
+            CDBG_HIGH("%s: got picture cancelled, stop zsl snapshot", __func__);
+            mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+            ch_obj->startZSlSnapshotCalled = FALSE;
+            ch_obj->needLEDFlash = FALSE;
+            ch_obj->need3ABracketing = FALSE;
+        }
+    } else if (MM_CAMERA_CMD_TYPE_START_ZSL == cmd_cb->cmd_type) {
+            ch_obj->manualZSLSnapshot = TRUE;
+            mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+    } else if (MM_CAMERA_CMD_TYPE_STOP_ZSL == cmd_cb->cmd_type) {
+            ch_obj->manualZSLSnapshot = FALSE;
+            mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+    } else if (MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY == cmd_cb->cmd_type) {
+           ch_obj->bundle.superbuf_queue.attr.notify_mode = cmd_cb->u.notify_mode;
+    } else if (MM_CAMERA_CMD_TYPE_FLUSH_QUEUE  == cmd_cb->cmd_type) {
+        ch_obj->bundle.superbuf_queue.expected_frame_id = cmd_cb->u.frame_idx;
+        mm_channel_superbuf_flush(ch_obj,
+                &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+        return;
+    } else if (MM_CAMERA_CMD_TYPE_GENERAL == cmd_cb->cmd_type) {
+        CDBG_HIGH("%s:%d] MM_CAMERA_CMD_TYPE_GENERAL", __func__, __LINE__);
+        switch (cmd_cb->u.gen_cmd.type) {
+            case MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING:
+            case MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING:
+            case MM_CAMERA_GENERIC_CMD_TYPE_MTF_BRACKETING: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                CDBG_HIGH("%s:%d] MM_CAMERA_GENERIC_CMDTYPE_AF_BRACKETING %u",
+                    __func__, __LINE__, start);
+                mm_channel_superbuf_flush(ch_obj,
+                        &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+                if (start) {
+                    CDBG_HIGH("%s:%d] need AE bracketing, start zsl snapshot",
+                        __func__, __LINE__);
+                    ch_obj->need3ABracketing = TRUE;
+                } else {
+                    ch_obj->need3ABracketing = FALSE;
+                }
+            }
+                break;
+            case MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                CDBG_HIGH("%s:%d] MM_CAMERA_GENERIC_CMDTYPE_FLASH_BRACKETING %u",
+                    __func__, __LINE__, start);
+                mm_channel_superbuf_flush(ch_obj,
+                        &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+                if (start) {
+                    CDBG_HIGH("%s:%d] need flash bracketing",
+                        __func__, __LINE__);
+                    ch_obj->isFlashBracketingEnabled = TRUE;
+                } else {
+                    ch_obj->isFlashBracketingEnabled = FALSE;
+                }
+            }
+                break;
+            case MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                CDBG_HIGH("%s:%d] MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X %u",
+                    __func__, __LINE__, start);
+                mm_channel_superbuf_flush(ch_obj,
+                        &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+                if (start) {
+                    CDBG_HIGH("%s:%d] need zoom 1x frame",
+                        __func__, __LINE__);
+                    ch_obj->isZoom1xFrameRequested = TRUE;
+                } else {
+                    ch_obj->isZoom1xFrameRequested = FALSE;
+                }
+            }
+                break;
+            default:
+                CDBG_ERROR("%s:%d] Error: Invalid command", __func__, __LINE__);
+                break;
+        }
+    }
+    notify_mode = ch_obj->bundle.superbuf_queue.attr.notify_mode;
+
+    /* bufdone for overflowed bufs */
+    mm_channel_superbuf_bufdone_overflow(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+    /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
+    while ( (ch_obj->pending_cnt > 0) ||
+            (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode) ) {
+
+        /* dequeue */
+        node = mm_channel_superbuf_dequeue(&ch_obj->bundle.superbuf_queue);
+        if (NULL != node) {
+            /* decrease pending_cnt */
+            CDBG("%s: Super Buffer received, Call client callback, pending_cnt=%d",
+                 __func__, ch_obj->pending_cnt);
+            if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+                ch_obj->pending_cnt--;
+
+                if ((ch_obj->pending_cnt == 0 && ch_obj->startZSlSnapshotCalled == TRUE)
+                        && (ch_obj->manualZSLSnapshot == FALSE)) {
+                    CDBG_HIGH("%s: received all frames requested, stop zsl snapshot", __func__);
+                    ch_obj->previewSkipCnt = MM_CAMERA_POST_FLASH_PREVIEW_SKIP_CNT;
+                    mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+                    ch_obj->startZSlSnapshotCalled = FALSE;
+                }
+            }
+
+            /* dispatch superbuf */
+            if (NULL != ch_obj->bundle.super_buf_notify_cb) {
+                uint8_t i;
+                mm_camera_cmdcb_t* cb_node = NULL;
+
+                CDBG("%s: Send superbuf to HAL, pending_cnt=%d",
+                     __func__, ch_obj->pending_cnt);
+
+                /* send cam_sem_post to wake up cb thread to dispatch super buffer */
+                cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+                if (NULL != cb_node) {
+                    memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+                    cb_node->cmd_type = MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB;
+                    cb_node->u.superbuf.num_bufs = node->num_of_bufs;
+                    for (i=0; i<node->num_of_bufs; i++) {
+                        cb_node->u.superbuf.bufs[i] = node->super_buf[i].buf;
+                    }
+                    cb_node->u.superbuf.camera_handle = ch_obj->cam_obj->my_hdl;
+                    cb_node->u.superbuf.ch_id = ch_obj->my_hdl;
+
+                    /* enqueue to cb thread */
+                    cam_queue_enq(&(ch_obj->cb_thread.cmd_queue), cb_node);
+
+                    /* wake up cb thread */
+                    cam_sem_post(&(ch_obj->cb_thread.cmd_sem));
+                } else {
+                    CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+                    /* buf done with the nonuse super buf */
+                    for (i=0; i<node->num_of_bufs; i++) {
+                        mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+                    }
+                }
+            } else {
+                /* buf done with the nonuse super buf */
+                uint8_t i;
+                for (i=0; i<node->num_of_bufs; i++) {
+                    mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+                }
+            }
+            free(node);
+        } else {
+            /* no superbuf avail, break the loop */
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn
+ *
+ * DESCRIPTION: channel finite state machine entry function. Depends on channel
+ *              state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = -1;
+
+    CDBG("%s : E state = %d", __func__, my_obj->state);
+    switch (my_obj->state) {
+    case MM_CHANNEL_STATE_NOTUSED:
+        rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_STOPPED:
+        rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_ACTIVE:
+        rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_PAUSED:
+        rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
+        break;
+    default:
+        CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
+        break;
+    }
+
+    /* unlock ch_lock */
+    pthread_mutex_unlock(&my_obj->ch_lock);
+    CDBG("%s : X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_notused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in NOT_USED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+                                  mm_channel_evt_type_t evt,
+                                  void * in_val,
+                                  void * out_val)
+{
+    int32_t rc = -1;
+
+    switch (evt) {
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_stopped
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in STOPPED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+                                  mm_channel_evt_type_t evt,
+                                  void * in_val,
+                                  void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s : E evt = %d", __func__, evt);
+    switch (evt) {
+    case MM_CHANNEL_EVT_ADD_STREAM:
+        {
+            uint32_t s_hdl = 0;
+            s_hdl = mm_channel_add_stream(my_obj);
+            *((uint32_t*)out_val) = s_hdl;
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_LINK_STREAM:
+        {
+            mm_camera_stream_link_t *stream_link = NULL;
+            uint32_t s_hdl = 0;
+            stream_link = (mm_camera_stream_link_t *) in_val;
+            s_hdl = mm_channel_link_stream(my_obj, stream_link);
+            *((uint32_t*)out_val) = s_hdl;
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_DEL_STREAM:
+        {
+            uint32_t s_id = *((uint32_t *)in_val);
+            rc = mm_channel_del_stream(my_obj, s_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_START:
+        {
+            rc = mm_channel_start(my_obj);
+            /* first stream started in stopped state
+             * move to active state */
+            if (0 == rc) {
+                my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_CONFIG_STREAM:
+        {
+            mm_evt_paylod_config_stream_t *payload =
+                (mm_evt_paylod_config_stream_t *)in_val;
+            rc = mm_channel_config_stream(my_obj,
+                                          payload->stream_id,
+                                          payload->config);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
+        {
+            cam_bundle_config_t *payload =
+                (cam_bundle_config_t *)in_val;
+            rc = mm_channel_get_bundle_info(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DELETE:
+        {
+            mm_channel_release(my_obj);
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_SET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_set_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+        {
+            uint32_t stream_id = *((uint32_t *)in_val);
+            rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_get_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+        {
+            mm_evt_paylod_do_stream_action_t *payload =
+                (mm_evt_paylod_do_stream_action_t *)in_val;
+            rc = mm_channel_do_stream_action(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+        {
+            mm_evt_paylod_map_stream_buf_t *payload =
+                (mm_evt_paylod_map_stream_buf_t *)in_val;
+            rc = mm_channel_map_stream_buf(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+        {
+            mm_evt_paylod_unmap_stream_buf_t *payload =
+                (mm_evt_paylod_unmap_stream_buf_t *)in_val;
+            rc = mm_channel_unmap_stream_buf(my_obj, payload);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d)",
+                   __func__, my_obj->state, evt);
+        break;
+    }
+    CDBG("%s : E rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_active
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in ACTIVE state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+
+    CDBG("%s : E evt = %d", __func__, evt);
+    switch (evt) {
+    case MM_CHANNEL_EVT_STOP:
+        {
+            rc = mm_channel_stop(my_obj);
+            my_obj->state = MM_CHANNEL_STATE_STOPPED;
+        }
+        break;
+    case MM_CHANNEL_EVT_REQUEST_SUPER_BUF:
+        {
+            uint32_t num_buf_requested = *((uint32_t *)in_val);
+            rc = mm_channel_request_super_buf(my_obj, num_buf_requested);
+        }
+        break;
+    case MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF:
+        {
+            rc = mm_channel_cancel_super_buf_request(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE:
+        {
+            uint32_t frame_idx = *((uint32_t *)in_val);
+            rc = mm_channel_flush_super_buf_queue(my_obj, frame_idx);
+        }
+        break;
+    case MM_CHANNEL_EVT_START_ZSL_SNAPSHOT:
+        {
+            rc = mm_channel_start_zsl_snapshot(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT:
+        {
+            rc = mm_channel_stop_zsl_snapshot(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE:
+        {
+            mm_camera_super_buf_notify_mode_t notify_mode =
+                *((mm_camera_super_buf_notify_mode_t *)in_val);
+            rc = mm_channel_config_notify_mode(my_obj, notify_mode);
+        }
+        break;
+    case MM_CHANNEL_EVT_SET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_set_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+        {
+            uint32_t stream_id = *((uint32_t *)in_val);
+            rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_get_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+        {
+            mm_evt_paylod_do_stream_action_t *payload =
+                (mm_evt_paylod_do_stream_action_t *)in_val;
+            rc = mm_channel_do_stream_action(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+        {
+            mm_evt_paylod_map_stream_buf_t *payload =
+                (mm_evt_paylod_map_stream_buf_t *)in_val;
+            if (payload != NULL) {
+                uint8_t type = payload->buf_type;
+                if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+                        (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+                    rc = mm_channel_map_stream_buf(my_obj, payload);
+                }
+            } else {
+                CDBG_ERROR("%s: cannot map regualr stream buf in active state", __func__);
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+        {
+            mm_evt_paylod_unmap_stream_buf_t *payload =
+                (mm_evt_paylod_unmap_stream_buf_t *)in_val;
+            if (payload != NULL) {
+                uint8_t type = payload->buf_type;
+                if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+                        (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+                    rc = mm_channel_unmap_stream_buf(my_obj, payload);
+                }
+            } else {
+                CDBG_ERROR("%s: cannot unmap regualr stream buf in active state", __func__);
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_AF_BRACKETING:
+        {
+            CDBG_HIGH("MM_CHANNEL_EVT_AF_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_MTF_BRACKETING:
+        {
+            CDBG_HIGH("MM_CHANNEL_EVT_MTF_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_MTF_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_AE_BRACKETING:
+        {
+            CDBG_HIGH("MM_CHANNEL_EVT_AE_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_FLASH_BRACKETING:
+        {
+            CDBG_HIGH("MM_CHANNEL_EVT_FLASH_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_ZOOM_1X:
+        {
+            CDBG_HIGH("MM_CHANNEL_EVT_ZOOM_1X");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+     default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+        break;
+    }
+    CDBG("%s : X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_paused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in PAUSED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+
+    /* currently we are not supporting pause/resume channel */
+    CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+               __func__, my_obj->state, evt, in_val, out_val);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_init
+ *
+ * DESCRIPTION: initialize a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object be to initialized
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+int32_t mm_channel_init(mm_channel_t *my_obj,
+                        mm_camera_channel_attr_t *attr,
+                        mm_camera_buf_notify_t channel_cb,
+                        void *userdata)
+{
+    int32_t rc = 0;
+
+    my_obj->bundle.super_buf_notify_cb = channel_cb;
+    my_obj->bundle.user_data = userdata;
+    if (NULL != attr) {
+        my_obj->bundle.superbuf_queue.attr = *attr;
+    }
+
+    CDBG("%s : Launch data poll thread in channel open", __func__);
+    snprintf(my_obj->threadName, THREAD_NAME_SIZE, "CAM_DataPoll");
+    mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
+                                 MM_CAMERA_POLL_TYPE_DATA);
+
+    /* change state to stopped state */
+    my_obj->state = MM_CHANNEL_STATE_STOPPED;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_release
+ *
+ * DESCRIPTION: release a channel resource. Channel state will move to UNUSED
+ *              state after this call.
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_channel_release(mm_channel_t *my_obj)
+{
+    /* stop data poll thread */
+    mm_camera_poll_thread_release(&my_obj->poll_thread[0]);
+
+    /* change state to notused state */
+    my_obj->state = MM_CHANNEL_STATE_NOTUSED;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_link_stream
+ *
+ * DESCRIPTION: link a stream from external channel into this channel
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @stream_link  : channel and stream to be linked
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+        mm_camera_stream_link_t *stream_link)
+{
+    uint8_t idx = 0;
+    uint32_t s_hdl = 0;
+    mm_stream_t *stream_obj = NULL;
+    mm_stream_t *stream = NULL;
+
+    if (NULL == stream_link) {
+        CDBG_ERROR("%s : Invalid stream link", __func__);
+        return 0;
+    }
+
+    stream = mm_channel_util_get_stream_by_handler(stream_link->ch,
+            stream_link->stream_id);
+    if (NULL == stream) {
+        return 0;
+    }
+
+    /* check available stream */
+    for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+        if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+            stream_obj = &my_obj->streams[idx];
+            break;
+        }
+    }
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s: streams reach max, no more stream allowed to add",
+                __func__);
+        return s_hdl;
+    }
+
+    /* initialize stream object */
+    *stream_obj = *stream;
+    stream_obj->linked_stream = stream;
+    s_hdl = stream->my_hdl;
+
+    CDBG("%s : stream handle = %d", __func__, s_hdl);
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_add_stream
+ *
+ * DESCRIPTION: add a stream into the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    uint8_t idx = 0;
+    uint32_t s_hdl = 0;
+    mm_stream_t *stream_obj = NULL;
+
+    CDBG("%s : E", __func__);
+    /* check available stream */
+    for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+        if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+            stream_obj = &my_obj->streams[idx];
+            break;
+        }
+    }
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s: streams reach max, no more stream allowed to add", __func__);
+        return s_hdl;
+    }
+
+    /* initialize stream object */
+    memset(stream_obj, 0, sizeof(mm_stream_t));
+    stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
+    stream_obj->ch_obj = my_obj;
+    pthread_mutex_init(&stream_obj->buf_lock, NULL);
+    pthread_mutex_init(&stream_obj->cb_lock, NULL);
+    pthread_mutex_init(&stream_obj->cmd_lock, NULL);
+    stream_obj->state = MM_STREAM_STATE_INITED;
+
+    /* acquire stream */
+    rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
+    if (0 == rc) {
+        s_hdl = stream_obj->my_hdl;
+    } else {
+        /* error during acquire, de-init */
+        pthread_mutex_destroy(&stream_obj->buf_lock);
+        pthread_mutex_destroy(&stream_obj->cb_lock);
+        pthread_mutex_destroy(&stream_obj->cmd_lock);
+        memset(stream_obj, 0, sizeof(mm_stream_t));
+    }
+    CDBG("%s : stream handle = %d", __func__, s_hdl);
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_del_stream
+ *
+ * DESCRIPTION: delete a stream from the channel bu its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : assume steam is stooped before it can be deleted
+ *==========================================================================*/
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+                              uint32_t stream_id)
+{
+    int rc = -1;
+    mm_stream_t * stream_obj = NULL;
+    stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s :Invalid Stream Object for stream_id = %d",
+                   __func__, stream_id);
+        return rc;
+    }
+
+    if (stream_obj->ch_obj != my_obj) {
+        /* Only unlink stream */
+        pthread_mutex_lock(&stream_obj->linked_stream->buf_lock);
+        stream_obj->linked_stream->is_linked = 0;
+        stream_obj->linked_stream->linked_obj = NULL;
+        pthread_mutex_unlock(&stream_obj->linked_stream->buf_lock);
+        memset(stream_obj, 0, sizeof(mm_stream_t));
+
+        return 0;
+    }
+
+    rc = mm_stream_fsm_fn(stream_obj,
+                          MM_STREAM_EVT_RELEASE,
+                          NULL,
+                          NULL);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+                                   uint32_t stream_id,
+                                   mm_camera_stream_config_t *config)
+{
+    int rc = -1;
+    mm_stream_t * stream_obj = NULL;
+    CDBG("%s : E stream ID = %d", __func__, stream_id);
+    stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s :Invalid Stream Object for stream_id = %d", __func__, stream_id);
+        return rc;
+    }
+
+    if (stream_obj->ch_obj != my_obj) {
+        /* No op. on linked streams */
+        return 0;
+    }
+
+    /* set stream fmt */
+    rc = mm_stream_fsm_fn(stream_obj,
+                          MM_STREAM_EVT_SET_FMT,
+                          (void *)config,
+                          NULL);
+    CDBG("%s : X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel, which should include all
+ *              streams within this channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+                                   cam_bundle_config_t *bundle_info)
+{
+    int i;
+    mm_stream_t *s_obj = NULL;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+    int32_t rc = 0;
+
+    memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+    bundle_info->bundle_id = my_obj->my_hdl;
+    bundle_info->num_of_streams = 0;
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                stream_type = s_obj->stream_info->stream_type;
+                if ((CAM_STREAM_TYPE_METADATA != stream_type) &&
+                        (s_obj->ch_obj == my_obj)) {
+                    bundle_info->stream_ids[bundle_info->num_of_streams++] =
+                                                        s_obj->server_stream_id;
+                }
+            } else {
+                CDBG_ERROR("%s: cannot find stream obj (%d) by handler (%d)",
+                           __func__, i, my_obj->streams[i].my_hdl);
+                rc = -1;
+                break;
+            }
+        }
+    }
+    if (rc != 0) {
+        /* error, reset to 0 */
+        memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_start
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    int i, j;
+    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+    uint8_t num_streams_to_start = 0;
+    mm_stream_t *s_obj = NULL;
+    int meta_stream_idx = 0;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                stream_type = s_obj->stream_info->stream_type;
+                /* remember meta data stream index */
+                if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
+                        (s_obj->ch_obj == my_obj)) {
+                    meta_stream_idx = num_streams_to_start;
+                }
+                s_objs[num_streams_to_start++] = s_obj;
+            }
+        }
+    }
+
+    if (meta_stream_idx > 0 ) {
+        /* always start meta data stream first, so switch the stream object with the first one */
+        s_obj = s_objs[0];
+        s_objs[0] = s_objs[meta_stream_idx];
+        s_objs[meta_stream_idx] = s_obj;
+    }
+
+    if (NULL != my_obj->bundle.super_buf_notify_cb) {
+        /* need to send up cb, therefore launch thread */
+        /* init superbuf queue */
+        mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
+        my_obj->bundle.superbuf_queue.num_streams = num_streams_to_start;
+        my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+        my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
+
+        for (i = 0; i < num_streams_to_start; i++) {
+            /* Only bundle streams that belong to the channel */
+            if(s_objs[i]->ch_obj == my_obj) {
+                /* set bundled flag to streams */
+                s_objs[i]->is_bundled = 1;
+            }
+            /* init bundled streams to invalid value -1 */
+            my_obj->bundle.superbuf_queue.bundled_streams[i] = s_objs[i]->my_hdl;
+        }
+
+        /* launch cb thread for dispatching super buf through cb */
+        snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
+        mm_camera_cmd_thread_launch(&my_obj->cb_thread,
+                                    mm_channel_dispatch_super_buf,
+                                    (void*)my_obj);
+
+        /* launch cmd thread for super buf dataCB */
+        snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
+        mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+                                    mm_channel_process_stream_buf,
+                                    (void*)my_obj);
+
+        /* set flag to TRUE */
+        my_obj->bundle.is_active = TRUE;
+    }
+
+    for (i = 0; i < num_streams_to_start; i++) {
+        /* stream that are linked to this channel should not be started */
+        if (s_objs[i]->ch_obj != my_obj) {
+            pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+            s_objs[i]->linked_stream->linked_obj = my_obj;
+            s_objs[i]->linked_stream->is_linked = 1;
+            pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+            continue;
+        }
+
+        /* all streams within a channel should be started at the same time */
+        if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
+            CDBG_ERROR("%s: stream already started idx(%d)", __func__, i);
+            rc = -1;
+            break;
+        }
+
+        /* allocate buf */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_GET_BUF,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            CDBG_ERROR("%s: get buf failed at idx(%d)", __func__, i);
+            break;
+        }
+
+        /* reg buf */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_REG_BUF,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            CDBG_ERROR("%s: reg buf failed at idx(%d)", __func__, i);
+            break;
+        }
+
+        /* start stream */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_START,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            CDBG_ERROR("%s: start stream failed at idx(%d)", __func__, i);
+            break;
+        }
+    }
+
+    /* error handling */
+    if (0 != rc) {
+        for (j=0; j<=i; j++) {
+            if (s_objs[j]->ch_obj != my_obj) {
+                /* Only unlink stream */
+                pthread_mutex_lock(&s_objs[j]->linked_stream->buf_lock);
+                s_objs[j]->linked_stream->is_linked = 0;
+                s_objs[j]->linked_stream->linked_obj = NULL;
+                pthread_mutex_unlock(&s_objs[j]->linked_stream->buf_lock);
+
+                if (TRUE == my_obj->bundle.is_active) {
+                    mm_channel_superbuf_flush(my_obj,
+                            &my_obj->bundle.superbuf_queue,
+                            s_objs[j]->stream_info->stream_type);
+                }
+                memset(s_objs[j], 0, sizeof(mm_stream_t));
+
+                continue;
+            }
+            /* stop streams*/
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_STOP,
+                             NULL,
+                             NULL);
+
+            /* unreg buf */
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_UNREG_BUF,
+                             NULL,
+                             NULL);
+
+            /* put buf back */
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_PUT_BUF,
+                             NULL,
+                             NULL);
+        }
+
+        /* destroy super buf cmd thread */
+        if (TRUE == my_obj->bundle.is_active) {
+            /* first stop bundle thread */
+            mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+            mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+            /* deinit superbuf queue */
+            mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+            /* memset bundle info */
+            memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    int i;
+    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+    uint8_t num_streams_to_stop = 0;
+    mm_stream_t *s_obj = NULL;
+    int meta_stream_idx = 0;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                stream_type = s_obj->stream_info->stream_type;
+                /* remember meta data stream index */
+                if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
+                        (s_obj->ch_obj == my_obj)) {
+                    meta_stream_idx = num_streams_to_stop;
+                }
+                s_objs[num_streams_to_stop++] = s_obj;
+            }
+        }
+    }
+
+    if (meta_stream_idx < num_streams_to_stop - 1 ) {
+        /* always stop meta data stream last, so switch the stream object with the last one */
+        s_obj = s_objs[num_streams_to_stop - 1];
+        s_objs[num_streams_to_stop - 1] = s_objs[meta_stream_idx];
+        s_objs[meta_stream_idx] = s_obj;
+    }
+
+    for (i = 0; i < num_streams_to_stop; i++) {
+        /* stream that are linked to this channel should not be stopped */
+        if (s_objs[i]->ch_obj != my_obj) {
+            /* Only unlink stream */
+            pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+            s_objs[i]->linked_stream->is_linked = 0;
+            s_objs[i]->linked_stream->linked_obj = NULL;
+            pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+
+            if (TRUE == my_obj->bundle.is_active) {
+                mm_channel_superbuf_flush(my_obj,
+                        &my_obj->bundle.superbuf_queue,
+                        s_objs[i]->stream_info->stream_type);
+            }
+
+            memset(s_objs[i], 0, sizeof(mm_stream_t));
+            continue;
+        }
+
+        /* stream off */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_STOP,
+                         NULL,
+                         NULL);
+
+        /* unreg buf at kernel */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_UNREG_BUF,
+                         NULL,
+                         NULL);
+    }
+
+    /* destroy super buf cmd thread */
+    if (TRUE == my_obj->bundle.is_active) {
+        /* first stop bundle thread */
+        mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+        mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+        /* deinit superbuf queue */
+        mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+    }
+
+    /* since all streams are stopped, we are safe to
+     * release all buffers allocated in stream */
+    for (i = 0; i < num_streams_to_stop; i++) {
+        if (s_objs[i]->ch_obj != my_obj) {
+            continue;
+        }
+        /* put buf back */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_PUT_BUF,
+                         NULL,
+                         NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj, uint32_t num_buf_requested)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    /* set pending_cnt
+     * will trigger dispatching super frames if pending_cnt > 0 */
+    /* send cam_sem_post to wake up cmd thread to dispatch super buffer */
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+        node->u.req_buf.num_buf_requested = num_buf_requested;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    /* reset pending_cnt */
+    rc = mm_channel_request_super_buf(my_obj, 0);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @frame_idx : frame idx until which to flush all superbufs
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj, uint32_t frame_idx)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_FLUSH_QUEUE;
+        node->u.frame_idx = frame_idx;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_config_notify_mode
+ *
+ * DESCRIPTION: configure notification mode
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @notify_mode : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+                                      mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->u.notify_mode = notify_mode;
+        node->cmd_type = MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_START_ZSL;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_STOP_ZSL;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                        mm_camera_buf_def_t *buf)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);
+
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* Redirect to linked stream */
+            rc = mm_stream_fsm_fn(s_obj->linked_stream,
+                    MM_STREAM_EVT_QBUF,
+                    (void *)buf,
+                    NULL);
+        } else {
+            rc = mm_stream_fsm_fn(s_obj,
+                    MM_STREAM_EVT_QBUF,
+                    (void *)buf,
+                    NULL);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj, uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* Redirect to linked stream */
+            rc = mm_stream_fsm_fn(s_obj->linked_stream,
+                    MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+                    NULL,
+                    NULL);
+        } else {
+            rc = mm_stream_fsm_fn(s_obj,
+                    MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+                    NULL,
+                    NULL);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_SET_PARM,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_GET_PARM,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ *              if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @actions      : ptr to an action struct buf to be performed by server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Actions to be performed by server are already
+ *              filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+                                   mm_evt_paylod_do_stream_action_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_DO_ACTION,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to payload for mapping
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+                                  mm_evt_paylod_map_stream_buf_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_map_buf(s_obj,
+                               payload->buf_type,
+                               payload->buf_idx,
+                               payload->plane_idx,
+                               payload->fd,
+                               payload->size);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to unmap payload
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+                                    mm_evt_paylod_unmap_stream_buf_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_unmap_buf(s_obj, payload->buf_type,
+                                 payload->buf_idx, payload->plane_idx);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_queue_init
+ *
+ * DESCRIPTION: initialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ *   @queue   : ptr to superbuf queue to be initialized
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue)
+{
+    return cam_queue_init(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_queue_deinit
+ *
+ * DESCRIPTION: deinitialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ *   @queue   : ptr to superbuf queue to be deinitialized
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue)
+{
+    return cam_queue_deinit(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_util_seq_comp_w_rollover
+ *
+ * DESCRIPTION: utility function to handle sequence number comparison with rollover
+ *
+ * PARAMETERS :
+ *   @v1      : first value to be compared
+ *   @v2      : second value to be compared
+ *
+ * RETURN     : int8_t type of comparison result
+ *              >0  -- v1 larger than v2
+ *              =0  -- vi equal to v2
+ *              <0  -- v1 smaller than v2
+ *==========================================================================*/
+int8_t mm_channel_util_seq_comp_w_rollover(uint32_t v1,
+                                           uint32_t v2)
+{
+    int8_t ret = 0;
+
+    /* TODO: need to handle the case if v2 roll over to 0 */
+    if (v1 > v2) {
+        ret = 1;
+    } else if (v1 < v2) {
+        ret = -1;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_handle_metadata
+ *
+ * DESCRIPTION: Handle frame matching logic change due to metadata
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_handle_metadata(
+                        mm_channel_t* ch_obj,
+                        mm_channel_queue_t * queue,
+                        mm_camera_buf_info_t *buf_info)
+{
+
+    int rc = 0 ;
+    mm_stream_t* stream_obj = NULL;
+    uint8_t is_crop_1x_found = 0;
+    uint32_t snapshot_stream_id = 0;
+    uint32_t i;
+    /* Set expected frame id to a future frame idx, large enough to wait
+    * for good_frame_idx_range, and small enough to still capture an image */
+    const uint32_t max_future_frame_offset = 100U;
+
+    stream_obj = mm_channel_util_get_stream_by_handler(ch_obj,
+                buf_info->stream_id);
+
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s: Invalid Stream Object for stream_id = %d",
+                   __func__, buf_info->stream_id);
+        rc = -1;
+        goto end;
+    }
+    if (NULL == stream_obj->stream_info) {
+        CDBG_ERROR("%s: NULL stream info for stream_id = %d",
+                    __func__, buf_info->stream_id);
+        rc = -1;
+        goto end;
+    }
+
+    if ((CAM_STREAM_TYPE_METADATA == stream_obj->stream_info->stream_type) &&
+            (stream_obj->ch_obj == ch_obj)) {
+        const cam_metadata_info_t *metadata;
+        metadata = (const cam_metadata_info_t *)buf_info->buf->buffer;
+
+        if (NULL == metadata) {
+            CDBG_ERROR("%s: NULL metadata buffer for metadata stream",
+                       __func__);
+            rc = -1;
+            goto end;
+        }
+
+        for (i=0; i<ARRAY_SIZE(ch_obj->streams); i++) {
+            if (CAM_STREAM_TYPE_SNAPSHOT ==
+                    ch_obj->streams[i].stream_info->stream_type) {
+                snapshot_stream_id = ch_obj->streams[i].server_stream_id;
+                break;
+            }
+        }
+
+        if (metadata->is_crop_valid) {
+            for (i=0; i<metadata->crop_data.num_of_streams; i++) {
+                if (snapshot_stream_id == metadata->crop_data.crop_info[i].stream_id) {
+                    if (!metadata->crop_data.crop_info[i].crop.left &&
+                            !metadata->crop_data.crop_info[i].crop.top) {
+                        is_crop_1x_found = 1;
+                        break;
+                    }
+                }
+            }
+        }
+
+        if (metadata->is_prep_snapshot_done_valid &&
+                metadata->is_good_frame_idx_range_valid) {
+            CDBG_ERROR("%s: prep_snapshot_done and good_idx_range shouldn't be valid at the same time", __func__);
+            rc = -1;
+            goto end;
+        }
+
+        if (ch_obj->isZoom1xFrameRequested) {
+            if (is_crop_1x_found) {
+                ch_obj->isZoom1xFrameRequested = 0;
+                queue->expected_frame_id = buf_info->frame_idx + 1;
+            } else {
+                queue->expected_frame_id += max_future_frame_offset;
+                /* Flush unwanted frames */
+                mm_channel_superbuf_flush_matched(ch_obj, queue);
+            }
+            goto end;
+        }
+
+        if (metadata->is_prep_snapshot_done_valid) {
+            if (metadata->prep_snapshot_done_state == NEED_FUTURE_FRAME) {
+                queue->expected_frame_id += max_future_frame_offset;
+
+                mm_channel_superbuf_flush(ch_obj,
+                        queue, CAM_STREAM_TYPE_DEFAULT);
+
+                ch_obj->needLEDFlash = TRUE;
+            } else {
+                ch_obj->needLEDFlash = FALSE;
+            }
+        } else if (metadata->is_good_frame_idx_range_valid) {
+            if (metadata->good_frame_idx_range.min_frame_idx >
+                queue->expected_frame_id) {
+                CDBG_HIGH("%s: min_frame_idx %d is greater than expected_frame_id %d",
+                    __func__, metadata->good_frame_idx_range.min_frame_idx,
+                    queue->expected_frame_id);
+            }
+            queue->expected_frame_id =
+                metadata->good_frame_idx_range.min_frame_idx;
+        } else if (ch_obj->need3ABracketing &&
+                   !metadata->is_good_frame_idx_range_valid) {
+               /* Flush unwanted frames */
+               mm_channel_superbuf_flush_matched(ch_obj, queue);
+               queue->expected_frame_id += max_future_frame_offset;
+        }
+        if (ch_obj->isFlashBracketingEnabled &&
+            metadata->is_good_frame_idx_range_valid) {
+            /* Flash bracketing needs two frames, with & without led flash.
+            * in valid range min frame is with led flash and max frame is
+            * without led flash */
+            queue->expected_frame_id =
+                metadata->good_frame_idx_range.min_frame_idx;
+            /* max frame is without led flash */
+            queue->expected_frame_id_without_led =
+                metadata->good_frame_idx_range.max_frame_idx;
+        } else if (metadata->is_good_frame_idx_range_valid) {
+             if (metadata->good_frame_idx_range.min_frame_idx >
+                 queue->expected_frame_id) {
+                 CDBG_HIGH("%s: min_frame_idx %d is greater than expected_frame_id %d",
+                     __func__, metadata->good_frame_idx_range.min_frame_idx,
+                     queue->expected_frame_id);
+             }
+             queue->expected_frame_id =
+                 metadata->good_frame_idx_range.min_frame_idx;
+             ch_obj->need3ABracketing = FALSE;
+        }
+    }
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_comp_and_enqueue
+ *
+ * DESCRIPTION: implementation for matching logic for superbuf
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_comp_and_enqueue(
+                        mm_channel_t* ch_obj,
+                        mm_channel_queue_t *queue,
+                        mm_camera_buf_info_t *buf_info)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+    uint8_t buf_s_idx, i, found_super_buf, unmatched_bundles;
+    struct cam_list *last_buf, *insert_before_buf;
+
+    CDBG("%s: E", __func__);
+    for (buf_s_idx = 0; buf_s_idx < queue->num_streams; buf_s_idx++) {
+        if (buf_info->stream_id == queue->bundled_streams[buf_s_idx]) {
+            break;
+        }
+    }
+    if (buf_s_idx == queue->num_streams) {
+        CDBG_ERROR("%s: buf from stream (%d) not bundled", __func__, buf_info->stream_id);
+        return -1;
+    }
+
+    if (mm_channel_handle_metadata(ch_obj, queue, buf_info) < 0) {
+        mm_channel_qbuf(ch_obj, buf_info->buf);
+        return -1;
+    }
+
+   mm_stream_t* stream_obj = mm_channel_util_get_stream_by_handler(ch_obj,
+               buf_info->stream_id);
+   if (stream_obj == NULL) {
+       CDBG_ERROR("%s: returned NULL",__func__);
+       return -1;
+   }
+
+   if (CAM_STREAM_TYPE_METADATA == stream_obj->stream_info->stream_type) {
+    const cam_metadata_info_t *metadata;
+    metadata = (const cam_metadata_info_t *)buf_info->buf->buffer;
+    CDBG("meta_valid: frame_id = %d meta_valid = %d\n",
+      metadata->meta_valid_params.meta_frame_id,
+      metadata->is_meta_valid);
+    if (!(metadata->is_meta_valid)) {
+      mm_channel_qbuf(ch_obj, buf_info->buf);
+      return 0;
+    }
+   }
+    if (mm_channel_util_seq_comp_w_rollover(buf_info->frame_idx,
+                                            queue->expected_frame_id) < 0) {
+        /* incoming buf is older than expected buf id, will discard it */
+        mm_channel_qbuf(ch_obj, buf_info->buf);
+        return 0;
+    }
+
+    if (MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL != queue->attr.priority) {
+        /* TODO */
+        /* need to decide if we want to queue the frame based on focus or exposure
+         * if frame not to be queued, we need to qbuf it back */
+    }
+
+    /* comp */
+    pthread_mutex_lock(&queue->que.lock);
+    head = &queue->que.head.list;
+    /* get the last one in the queue which is possibly having no matching */
+    pos = head->next;
+
+    found_super_buf = 0;
+    unmatched_bundles = 0;
+    last_buf = NULL;
+    insert_before_buf = NULL;
+    while (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if (NULL != super_buf) {
+            if (super_buf->matched) {
+                /* find a matched super buf, move to next one */
+                pos = pos->next;
+                continue;
+            } else if ( buf_info->frame_idx == super_buf->frame_idx ) {
+                /* have an unmatched super buf that matches our frame idx,
+                 *  break the loop */
+                found_super_buf = 1;
+                break;
+            } else {
+                unmatched_bundles++;
+                if ( NULL == last_buf ) {
+                    if ( super_buf->frame_idx < buf_info->frame_idx ) {
+                        last_buf = pos;
+                    }
+                }
+                if ( NULL == insert_before_buf ) {
+                    if ( super_buf->frame_idx > buf_info->frame_idx ) {
+                        insert_before_buf = pos;
+                    }
+                }
+                pos = pos->next;
+            }
+        }
+    }
+
+    if ( found_super_buf ) {
+            if(super_buf->super_buf[buf_s_idx].frame_idx != 0) {
+               CDBG_ERROR(" %s : **** ERROR CASE Same stream is already in queue! **** ", __func__);
+            }
+            super_buf->super_buf[buf_s_idx] = *buf_info;
+            /* check if superbuf is all matched */
+            super_buf->matched = 1;
+            for (i=0; i < super_buf->num_of_bufs; i++) {
+                if (super_buf->super_buf[i].frame_idx == 0) {
+                    super_buf->matched = 0;
+                    break;
+                }
+            }
+
+            if (super_buf->matched) {
+                if(ch_obj->isFlashBracketingEnabled) {
+                    queue->expected_frame_id =
+                        queue->expected_frame_id_without_led;
+                    ch_obj->isFlashBracketingEnabled = FALSE;
+                } else {
+                   queue->expected_frame_id = buf_info->frame_idx
+                                              + queue->attr.post_frame_skip;
+                }
+                queue->match_cnt++;
+                /* Any older unmatched buffer need to be released */
+                if ( last_buf ) {
+                    while ( last_buf != pos ) {
+                        node = member_of(last_buf, cam_node_t, list);
+                        super_buf = (mm_channel_queue_node_t*)node->data;
+                        if (NULL != super_buf) {
+                            for (i=0; i<super_buf->num_of_bufs; i++) {
+                                if (super_buf->super_buf[i].frame_idx != 0) {
+                                        mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                                }
+                            }
+                            queue->que.size--;
+                            last_buf = last_buf->next;
+                            cam_list_del_node(&node->list);
+                            free(node);
+                            free(super_buf);
+                        } else {
+                            CDBG_ERROR(" %s : Invalid superbuf in queue!", __func__);
+                            break;
+                        }
+                    }
+                }
+            }
+    } else {
+        if (  ( queue->attr.max_unmatched_frames < unmatched_bundles ) &&
+              ( NULL == last_buf ) ) {
+            /* incoming frame is older than the last bundled one */
+            mm_channel_qbuf(ch_obj, buf_info->buf);
+        } else {
+            if ( queue->attr.max_unmatched_frames < unmatched_bundles ) {
+                /* release the oldest bundled superbuf */
+                node = member_of(last_buf, cam_node_t, list);
+                super_buf = (mm_channel_queue_node_t*)node->data;
+                for (i=0; i<super_buf->num_of_bufs; i++) {
+                    if (super_buf->super_buf[i].frame_idx != 0) {
+                            mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                    }
+                }
+                queue->que.size--;
+                node = member_of(last_buf, cam_node_t, list);
+                cam_list_del_node(&node->list);
+                free(node);
+                free(super_buf);
+            }
+            /* insert the new frame at the appropriate position. */
+
+            mm_channel_queue_node_t *new_buf = NULL;
+            cam_node_t* new_node = NULL;
+
+            new_buf = (mm_channel_queue_node_t*)malloc(sizeof(mm_channel_queue_node_t));
+            new_node = (cam_node_t*)malloc(sizeof(cam_node_t));
+            if (NULL != new_buf && NULL != new_node) {
+                memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+                memset(new_node, 0, sizeof(cam_node_t));
+                new_node->data = (void *)new_buf;
+                new_buf->num_of_bufs = queue->num_streams;
+                new_buf->super_buf[buf_s_idx] = *buf_info;
+                new_buf->frame_idx = buf_info->frame_idx;
+
+                /* enqueue */
+                if ( insert_before_buf ) {
+                    cam_list_insert_before_node(&new_node->list, insert_before_buf);
+                } else {
+                    cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+                }
+                queue->que.size++;
+
+                if(queue->num_streams == 1) {
+                    new_buf->matched = 1;
+
+                    queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+                    queue->match_cnt++;
+                }
+            } else {
+                /* No memory */
+                if (NULL != new_buf) {
+                    free(new_buf);
+                }
+                if (NULL != new_node) {
+                    free(new_node);
+                }
+                /* qbuf the new buf since we cannot enqueue */
+                mm_channel_qbuf(ch_obj, buf_info->buf);
+            }
+        }
+    }
+
+    pthread_mutex_unlock(&queue->que.lock);
+
+    CDBG("%s: X", __func__);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue   : superbuf queue
+ *   @matched_only : if dequeued buf should be matched
+ *
+ * RETURN     : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_internal(mm_channel_queue_t * queue,
+                                                              uint8_t matched_only)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    head = &queue->que.head.list;
+    pos = head->next;
+    if (pos != head) {
+        /* get the first node */
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if ( (NULL != super_buf) &&
+             (matched_only == TRUE) &&
+             (super_buf->matched == FALSE) ) {
+            /* require to dequeue matched frame only, but this superbuf is not matched,
+               simply set return ptr to NULL */
+            super_buf = NULL;
+        }
+        if (NULL != super_buf) {
+            /* remove from the queue */
+            cam_list_del_node(&node->list);
+            queue->que.size--;
+            if (super_buf->matched == TRUE) {
+                queue->match_cnt--;
+            }
+            free(node);
+        }
+    }
+
+    return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue
+ *
+ * DESCRIPTION: dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(mm_channel_queue_t * queue)
+{
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_bufdone_overflow
+ *
+ * DESCRIPTION: keep superbuf queue no larger than watermark set by upper layer
+ *              via channel attribute
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+                                             mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+        /* for continuous streaming mode, no overflow is needed */
+        return 0;
+    }
+
+    CDBG("%s: before match_cnt=%d, water_mark=%d",
+         __func__, queue->match_cnt, queue->attr.water_mark);
+    /* bufdone overflowed bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    while (queue->match_cnt > queue->attr.water_mark) {
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+        if (NULL != super_buf) {
+            for (i=0; i<super_buf->num_of_bufs; i++) {
+                if (NULL != super_buf->super_buf[i].buf) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+            free(super_buf);
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+    CDBG("%s: after match_cnt=%d, water_mark=%d",
+         __func__, queue->match_cnt, queue->attr.water_mark);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_skip
+ *
+ * DESCRIPTION: depends on the lookback configuration of the channel attribute,
+ *              unwanted superbufs will be removed from the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+                                 mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+        /* for continuous streaming mode, no skip is needed */
+        return 0;
+    }
+
+    /* bufdone overflowed bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    while (queue->match_cnt > queue->attr.look_back) {
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+        if (NULL != super_buf) {
+            for (i=0; i<super_buf->num_of_bufs; i++) {
+                if (NULL != super_buf->super_buf[i].buf) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+            free(super_buf);
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_flush
+ *
+ * DESCRIPTION: flush the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @cam_type: flush only particular type (default flushes all)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+        mm_channel_queue_t * queue, cam_stream_type_t cam_type)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+    /* bufdone bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE);
+    while (super_buf != NULL) {
+        for (i=0; i<super_buf->num_of_bufs; i++) {
+            if (NULL != super_buf->super_buf[i].buf) {
+                stream_type = super_buf->super_buf[i].buf->stream_type;
+                if ((CAM_STREAM_TYPE_DEFAULT == cam_type) ||
+                        (cam_type == stream_type)) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+        }
+        free(super_buf);
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE);
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_proc_general_cmd
+ *
+ * DESCRIPTION: process general command
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @notify_mode : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+                                      mm_camera_generic_cmd_t *p_gen_cmd)
+{
+    CDBG("%s: E",__func__);
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->u.gen_cmd = *p_gen_cmd;
+        node->cmd_type = MM_CAMERA_CMD_TYPE_GENERAL;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+    CDBG("%s: X",__func__);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_flush_matched
+ *
+ * DESCRIPTION: flush matched buffers from the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+                                  mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    /* bufdone bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+    while (super_buf != NULL) {
+        for (i=0; i<super_buf->num_of_bufs; i++) {
+            if (NULL != super_buf->super_buf[i].buf) {
+                mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+            }
+        }
+        free(super_buf);
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
new file mode 100644
index 0000000..fca4c9f
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -0,0 +1,1789 @@
+/* Copyright (c) 2012-2015, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/media.h>
+#include <signal.h>
+#include <media/msm_cam_sensor.h>
+#include <cutils/properties.h>
+#include <stdlib.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera_sock.h"
+#include "mm_camera.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl = {0, {{0}}, {0}, {{0, 0, 0, 0, 0, 0, 0}}};
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+volatile uint32_t gMmCameraIntfLogLevel = 0;
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_generate_handler
+ *
+ * DESCRIPTION: utility function to generate handler for camera/channel/stream
+ *
+ * PARAMETERS :
+ *   @index: index of the object to have handler
+ *
+ * RETURN     : uint32_t type of handle that uniquely identify the object
+ *==========================================================================*/
+uint32_t mm_camera_util_generate_handler(uint8_t index)
+{
+    uint32_t handler = 0;
+    pthread_mutex_lock(&g_handler_lock);
+    g_handler_history_count++;
+    if (0 == g_handler_history_count) {
+        g_handler_history_count++;
+    }
+    handler = g_handler_history_count;
+    handler = (handler<<8) | index;
+    pthread_mutex_unlock(&g_handler_lock);
+    return handler;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_index_by_handler
+ *
+ * DESCRIPTION: utility function to get index from handle
+ *
+ * PARAMETERS :
+ *   @handler: object handle
+ *
+ * RETURN     : uint8_t type of index derived from handle
+ *==========================================================================*/
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
+{
+    return (handler&0x000000ff);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_dev_name
+ *
+ * DESCRIPTION: utility function to get device name from camera handle
+ *
+ * PARAMETERS :
+ *   @cam_handle: camera handle
+ *
+ * RETURN     : char ptr to the device name stored in global variable
+ * NOTE       : caller should not free the char ptr
+ *==========================================================================*/
+const char *mm_camera_util_get_dev_name(uint32_t cam_handle)
+{
+    char *dev_name = NULL;
+    uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+    if(cam_idx < MM_CAMERA_MAX_NUM_SENSORS) {
+        dev_name = g_cam_ctrl.video_dev_name[cam_idx];
+    }
+    return dev_name;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_camera_by_handler
+ *
+ * DESCRIPTION: utility function to get camera object from camera handle
+ *
+ * PARAMETERS :
+ *   @cam_handle: camera handle
+ *
+ * RETURN     : ptr to the camera object stored in global variable
+ * NOTE       : caller should not free the camera object ptr
+ *==========================================================================*/
+mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handle)
+{
+    mm_camera_obj_t *cam_obj = NULL;
+    uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+
+    if (cam_idx < MM_CAMERA_MAX_NUM_SENSORS &&
+        (NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+        (cam_handle == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
+        cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+    }
+    return cam_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_query_capability(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s E: camera_handler = %d ", __func__, camera_handle);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_query_capability(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_parms(uint32_t camera_handle, void *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_set_parms(my_obj, parms);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_parms(uint32_t camera_handle, void *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_parms(my_obj, parms);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call success, we will always assume there will
+ *              be an auto_focus event following up.
+ *==========================================================================*/
+static int32_t mm_camera_intf_do_auto_focus(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_do_auto_focus(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_auto_focus(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_cancel_auto_focus(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @do_af_flag   : flag indicating if AF is needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_prepare_snapshot(uint32_t camera_handle,
+                                               int32_t do_af_flag)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_prepare_snapshot(my_obj, do_af_flag);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_close
+ *
+ * DESCRIPTION: close a camera by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_close(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    uint8_t cam_idx = camera_handle & 0x00ff;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s E: camera_handler = %d ", __func__, camera_handle);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if (my_obj){
+        my_obj->ref_count--;
+
+        if(my_obj->ref_count > 0) {
+            /* still have reference to obj, return here */
+            CDBG("%s: ref_count=%d\n", __func__, my_obj->ref_count);
+            pthread_mutex_unlock(&g_intf_lock);
+            rc = 0;
+        } else {
+            /* need close camera here as no other reference
+             * first empty g_cam_ctrl's referent to cam_obj */
+            g_cam_ctrl.cam_obj[cam_idx] = NULL;
+
+            pthread_mutex_lock(&my_obj->cam_lock);
+            pthread_mutex_unlock(&g_intf_lock);
+
+            rc = mm_camera_close(my_obj);
+
+            pthread_mutex_destroy(&my_obj->cam_lock);
+            free(my_obj);
+        }
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : uint32_t type of channel handle
+ *              0  -- invalid channel handle, meaning the op failed
+ *              >0 -- successfully added a channel with a valid handle
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
+                                           mm_camera_channel_attr_t *attr,
+                                           mm_camera_buf_notify_t channel_cb,
+                                           void *userdata)
+{
+    uint32_t ch_id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d", __func__, camera_handle);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X ch_id = %d", __func__, ch_id);
+    return ch_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_channel(uint32_t camera_handle,
+                                          uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E ch_id = %d", __func__, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_del_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_bundle_info(uint32_t camera_handle,
+                                              uint32_t ch_id,
+                                              cam_bundle_config_t *bundle_info)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E ch_id = %d", __func__, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_bundle_info(my_obj, ch_id, bundle_info);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_register_event_notify
+ *
+ * DESCRIPTION: register for event notify
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @evt_cb       : callback for event notify
+ *   @user_data    : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_event_notify(uint32_t camera_handle,
+                                                    mm_camera_event_notify_t evt_cb,
+                                                    void * user_data)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E ", __func__);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_register_event_notify(my_obj, evt_cb, user_data);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :E rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_qbuf(uint32_t camera_handle,
+                                    uint32_t ch_id,
+                                    mm_camera_buf_def_t *buf)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_qbuf(my_obj, ch_id, buf);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X evt_type = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_queued_buf_count
+ *
+ * DESCRIPTION: returns the queued buffer count
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id : stream id
+ *
+ * RETURN     : int32_t - queued buffer count
+ *
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_queued_buf_count(uint32_t camera_handle,
+        uint32_t ch_id, uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_queued_buf_count(my_obj, ch_id, stream_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X queued buffer count = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_link_stream
+ *
+ * DESCRIPTION: link a stream into a new channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream id
+ *   @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+static int32_t mm_camera_intf_link_stream(uint32_t camera_handle,
+        uint32_t ch_id,
+        uint32_t stream_id,
+        uint32_t linked_ch_id)
+{
+    int32_t id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s : E handle = %d ch_id = %d",
+         __func__, camera_handle, ch_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        id = (int32_t)mm_camera_link_stream(my_obj, ch_id, stream_id, linked_ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X stream_id = %d", __func__, stream_id);
+    return id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
+                                          uint32_t ch_id)
+{
+    uint32_t stream_id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s : E handle = %d ch_id = %d",
+         __func__, camera_handle, ch_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        stream_id = mm_camera_add_stream(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X stream_id = %d", __func__, stream_id);
+    return stream_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_stream(uint32_t camera_handle,
+                                         uint32_t ch_id,
+                                         uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s : E handle = %d ch_id = %d stream_id = %d",
+         __func__, camera_handle, ch_id, stream_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_del_stream(my_obj, ch_id, stream_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_config_stream(uint32_t camera_handle,
+                                            uint32_t ch_id,
+                                            uint32_t stream_id,
+                                            mm_camera_stream_config_t *config)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E handle = %d, ch_id = %d,stream_id = %d",
+         __func__, camera_handle, ch_id, stream_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :mm_camera_intf_config_stream stream_id = %d",__func__,stream_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_config_stream(my_obj, ch_id, stream_id, config);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
+                                            uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_start_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_channel(uint32_t camera_handle,
+                                           uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_stop_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_request_super_buf(uint32_t camera_handle,
+                                                uint32_t ch_id,
+                                                uint32_t num_buf_requested)
+{
+    int32_t rc = -1;
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_request_super_buf(my_obj, ch_id, num_buf_requested);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_super_buf_request(uint32_t camera_handle,
+                                                       uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @frame_idx    : frame index
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush_super_buf_queue(uint32_t camera_handle,
+                                                    uint32_t ch_id, uint32_t frame_idx)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_flush_super_buf_queue(my_obj, ch_id, frame_idx);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_start_zsl_snapshot
+ *
+ * DESCRIPTION: Starts zsl snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_zsl_snapshot(uint32_t camera_handle,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_start_zsl_snapshot_ch(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_stop_zsl_snapshot
+ *
+ * DESCRIPTION: Stops zsl snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_zsl_snapshot(uint32_t camera_handle,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_stop_zsl_snapshot_ch(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_configure_notify_mode
+ *
+ * DESCRIPTION: Configures channel notification mode
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_configure_notify_mode(uint32_t camera_handle,
+                                                    uint32_t ch_id,
+                                                    mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_config_channel_notify(my_obj, ch_id, notify_mode);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_buf(uint32_t camera_handle,
+                                      uint8_t buf_type,
+                                      int fd,
+                                      size_t size)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_buf(my_obj, buf_type, fd, size);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_buf(uint32_t camera_handle,
+                                        uint8_t buf_type)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_unmap_buf(my_obj, buf_type);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_stream_parms(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t s_id,
+                                               cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d,ch_id = %d,s_id = %d",
+         __func__, camera_handle, ch_id, s_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_set_stream_parms(my_obj, ch_id, s_id, parms);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_stream_parms(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t s_id,
+                                               cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d,ch_id = %d,s_id = %d",
+         __func__, camera_handle, ch_id, s_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_stream_parms(my_obj, ch_id, s_id, parms);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_buf(uint32_t camera_handle,
+                                             uint32_t ch_id,
+                                             uint32_t stream_id,
+                                             uint8_t buf_type,
+                                             uint32_t buf_idx,
+                                             int32_t plane_idx,
+                                             int fd,
+                                             size_t size)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+         __func__, camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_stream_buf(my_obj, ch_id, stream_id,
+                                      buf_type, buf_idx, plane_idx,
+                                      fd, size);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_stream_buf(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t stream_id,
+                                               uint8_t buf_type,
+                                               uint32_t buf_idx,
+                                               int32_t plane_idx)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+         __func__, camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_unmap_stream_buf(my_obj, ch_id, stream_id,
+                                        buf_type, buf_idx, plane_idx);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_sensor_info
+ *
+ * DESCRIPTION: get sensor info like facing(back/front) and mount angle
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *==========================================================================*/
+void get_sensor_info()
+{
+    int rc = 0;
+    int dev_fd = 0;
+    struct media_device_info mdev_info;
+    int num_media_devices = 0;
+    size_t num_cameras = 0;
+
+    CDBG("%s : E", __func__);
+    /* lock the mutex */
+    while (1) {
+        char dev_name[32];
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd <= 0) {
+            CDBG("Done discovering media devices\n");
+            break;
+        }
+        num_media_devices++;
+        memset(&mdev_info, 0, sizeof(mdev_info));
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = 0;
+            num_cameras = 0;
+            break;
+        }
+
+        if(strncmp(mdev_info.model,  MSM_CONFIGURATION_NAME, sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = 0;
+            continue;
+        }
+
+        unsigned int num_entities = 1;
+        while (1) {
+            struct media_entity_desc entity;
+            uint32_t temp;
+            uint32_t mount_angle;
+            uint32_t facing;
+
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                CDBG("Done enumerating media entities\n");
+                rc = 0;
+                break;
+            }
+            if(entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+                entity.group_id == MSM_CAMERA_SUBDEV_SENSOR) {
+                temp = entity.flags >> 8;
+                mount_angle = (temp & 0xFF) * 90;
+                facing = (temp >> 8);
+                ALOGD("index = %u flag = %x mount_angle = %u facing = %u\n",
+                    (unsigned int)num_cameras, (unsigned int)temp,
+                    (unsigned int)mount_angle, (unsigned int)facing);
+                g_cam_ctrl.info[num_cameras].facing = (int)facing;
+                g_cam_ctrl.info[num_cameras].orientation = (int)mount_angle;
+                num_cameras++;
+                continue;
+            }
+        }
+
+        CDBG("%s: dev_info[id=%zu,name='%s']\n",
+            __func__, num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+
+        close(dev_fd);
+        dev_fd = 0;
+    }
+
+    /* unlock the mutex */
+    CDBG("%s: num_cameras=%d\n", __func__, g_cam_ctrl.num_cam);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : sort_camera_info
+ *
+ * DESCRIPTION: sort camera info to keep back cameras idx is smaller than front cameras idx
+ *
+ * PARAMETERS : number of cameras
+ *
+ * RETURN     :
+ *==========================================================================*/
+void sort_camera_info(int num_cam)
+{
+    int idx = 0, i;
+    struct camera_info temp_info[MM_CAMERA_MAX_NUM_SENSORS];
+    char temp_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+    memset(temp_info, 0, sizeof(temp_info));
+    memset(temp_dev_name, 0, sizeof(temp_dev_name));
+
+    /* firstly save the back cameras info*/
+    for (i = 0; i < num_cam; i++) {
+        if (g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) {
+            temp_info[idx] = g_cam_ctrl.info[i];
+            memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+                MM_CAMERA_DEV_NAME_LEN);
+        }
+    }
+
+    /* then save the front cameras info*/
+    for (i = 0; i < num_cam; i++) {
+        if (g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) {
+            temp_info[idx] = g_cam_ctrl.info[i];
+            memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+                MM_CAMERA_DEV_NAME_LEN);
+        }
+    }
+
+    memcpy(g_cam_ctrl.info, temp_info, sizeof(temp_info));
+    memcpy(g_cam_ctrl.video_dev_name, temp_dev_name, sizeof(temp_dev_name));
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_num_of_cameras
+ *
+ * DESCRIPTION: get number of cameras
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : number of cameras supported
+ *==========================================================================*/
+uint8_t get_num_of_cameras()
+{
+    int rc = 0;
+    int dev_fd = 0;
+    struct media_device_info mdev_info;
+    int num_media_devices = 0;
+    int8_t num_cameras = 0;
+    char subdev_name[32];
+    int32_t sd_fd = 0;
+    struct sensor_init_cfg_data cfg;
+    char prop[PROPERTY_VALUE_MAX];
+    uint32_t temp;
+    uint32_t log_level;
+    uint32_t debug_mask;
+    int8_t fixed_num_cameras = 0;
+
+    /*  Higher 4 bits : Value of Debug log level (Default level is 1 to print all CDBG_HIGH)
+        Lower 28 bits : Control mode for sub module logging(Only 3 sub modules in HAL)
+                        0x1 for HAL
+                        0x10 for mm-camera-interface
+                        0x100 for mm-jpeg-interface  */
+    property_get("persist.camera.hal.debug.mask", prop, "268435463"); // 0x10000007=268435463
+    temp = (uint32_t) atoi(prop);
+    log_level = ((temp >> 28) & 0xF);
+    debug_mask = (temp & HAL_DEBUG_MASK_MM_CAMERA_INTERFACE);
+    if (debug_mask > 0)
+        gMmCameraIntfLogLevel = log_level;
+    else
+        gMmCameraIntfLogLevel = 0; // Debug logs are not required if debug_mask is zero
+
+    CDBG_HIGH("%s gMmCameraIntfLogLevel=%d",__func__, gMmCameraIntfLogLevel);
+
+    property_get("vold.decrypt", prop, "0");
+    int decrypt = atoi(prop);
+    if (decrypt == 1)
+     return 0;
+
+    /* lock the mutex */
+    pthread_mutex_lock(&g_intf_lock);
+
+    while (1) {
+        uint32_t num_entities = 1U;
+        char dev_name[32];
+
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd < 0) {
+            CDBG("Done discovering media devices\n");
+            break;
+        }
+        num_media_devices++;
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = 0;
+            break;
+        }
+
+        if (strncmp(mdev_info.model, "msm_config", sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = 0;
+            continue;
+        }
+
+        while (1) {
+            struct media_entity_desc entity;
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            CDBG_ERROR("entity id %d", entity.id);
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                CDBG_ERROR("Done enumerating media entities");
+                rc = 0;
+                break;
+            }
+            CDBG_ERROR("entity name %s type %d group id %d",
+                entity.name, entity.type, entity.group_id);
+            if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+                entity.group_id == MSM_CAMERA_SUBDEV_SENSOR_INIT) {
+                snprintf(subdev_name, sizeof(dev_name), "/dev/%s", entity.name);
+                break;
+            }
+        }
+        close(dev_fd);
+        dev_fd = 0;
+    }
+
+    /* Open sensor_init subdev */
+    sd_fd = open(subdev_name, O_RDWR);
+    if (sd_fd < 0) {
+        CDBG_ERROR("Open sensor_init subdev failed");
+        return FALSE;
+    }
+
+    cfg.cfgtype = CFG_SINIT_PROBE_WAIT_DONE;
+    cfg.cfg.setting = NULL;
+    if (ioctl(sd_fd, VIDIOC_MSM_SENSOR_INIT_CFG, &cfg) < 0) {
+        CDBG_ERROR("failed");
+    }
+    close(sd_fd);
+    dev_fd = 0;
+
+
+    num_media_devices = 0;
+    while (1) {
+        uint32_t num_entities = 1U;
+        char dev_name[32];
+
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd <= 0) {
+            CDBG("Done discovering media devices\n");
+            break;
+        }
+        num_media_devices++;
+        memset(&mdev_info, 0, sizeof(mdev_info));
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = 0;
+            num_cameras = 0;
+            break;
+        }
+
+        if(strncmp(mdev_info.model, MSM_CAMERA_NAME, sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = 0;
+            continue;
+        }
+
+        while (1) {
+            struct media_entity_desc entity;
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                CDBG("Done enumerating media entities\n");
+                rc = 0;
+                break;
+            }
+            if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+                strncpy(g_cam_ctrl.video_dev_name[num_cameras],
+                     entity.name, sizeof(entity.name));
+                break;
+            }
+        }
+
+        CDBG("%s: dev_info[id=%d,name='%s']\n",
+            __func__, (int)num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+
+        num_cameras++;
+        close(dev_fd);
+        dev_fd = 0;
+    }
+
+    /* In L AOSP MAX camera defined as 4, compared to 2 in KK
+     * Due to that and multimode architecture, camera switching on 8x26 target
+     * will fail on KK-based camera as there are non-zero specific profile
+     * exists in dtsi file. These profiles are needed for JB-based camera.
+     * To support JB and KK camera projects with common kernel, we have
+     * introduced a setprop where user can set max number of cameras.
+     * This'll resolve the issue for accessing incorrect camera profile.
+    */
+    property_get("persist.camera.num_cameras",prop,"0");
+    fixed_num_cameras = atoi(prop);
+    if ( (fixed_num_cameras > 0)&& (num_cameras > fixed_num_cameras)) {
+        num_cameras = fixed_num_cameras;
+        CDBG_HIGH("%s: restricted num of cameras to: %d\n",__func__, num_cameras);
+    }
+
+    g_cam_ctrl.num_cam = num_cameras;
+
+    get_sensor_info();
+    sort_camera_info(g_cam_ctrl.num_cam);
+    /* unlock the mutex */
+    pthread_mutex_unlock(&g_intf_lock);
+    CDBG("%s: num_cameras=%d\n", __func__, (int)g_cam_ctrl.num_cam);
+    return(uint8_t)g_cam_ctrl.num_cam;
+}
+
+struct camera_info *get_cam_info(uint32_t camera_id)
+{
+    return &g_cam_ctrl.info[camera_id];
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_process_advanced_capture
+ *
+ * DESCRIPTION: Configures channel advanced capture mode
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @advanced_capture_type : advanced capture type
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_process_advanced_capture(uint32_t camera_handle,
+    mm_camera_advanced_capture_t advanced_capture_type,
+    uint32_t ch_id,
+    int8_t start_flag)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s: E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_channel_advanced_capture(my_obj, advanced_capture_type,
+            ch_id, (uint32_t)start_flag);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s: X ", __func__);
+    return rc;
+}
+
+/* camera ops v-table */
+static mm_camera_ops_t mm_camera_ops = {
+    .query_capability = mm_camera_intf_query_capability,
+    .register_event_notify = mm_camera_intf_register_event_notify,
+    .close_camera = mm_camera_intf_close,
+    .set_parms = mm_camera_intf_set_parms,
+    .get_parms = mm_camera_intf_get_parms,
+    .do_auto_focus = mm_camera_intf_do_auto_focus,
+    .cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
+    .prepare_snapshot = mm_camera_intf_prepare_snapshot,
+    .start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
+    .stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
+    .map_buf = mm_camera_intf_map_buf,
+    .unmap_buf = mm_camera_intf_unmap_buf,
+    .add_channel = mm_camera_intf_add_channel,
+    .delete_channel = mm_camera_intf_del_channel,
+    .get_bundle_info = mm_camera_intf_get_bundle_info,
+    .add_stream = mm_camera_intf_add_stream,
+    .link_stream = mm_camera_intf_link_stream,
+    .delete_stream = mm_camera_intf_del_stream,
+    .config_stream = mm_camera_intf_config_stream,
+    .qbuf = mm_camera_intf_qbuf,
+    .get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
+    .map_stream_buf = mm_camera_intf_map_stream_buf,
+    .unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
+    .set_stream_parms = mm_camera_intf_set_stream_parms,
+    .get_stream_parms = mm_camera_intf_get_stream_parms,
+    .start_channel = mm_camera_intf_start_channel,
+    .stop_channel = mm_camera_intf_stop_channel,
+    .request_super_buf = mm_camera_intf_request_super_buf,
+    .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
+    .flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
+    .configure_notify_mode = mm_camera_intf_configure_notify_mode,
+    .process_advanced_capture = mm_camera_intf_process_advanced_capture
+};
+
+/*===========================================================================
+ * FUNCTION   : camera_open
+ *
+ * DESCRIPTION: open a camera by camera index
+ *
+ * PARAMETERS :
+ *   @camera_idx : camera index. should within range of 0 to num_of_cameras
+ *
+ * RETURN     : ptr to a virtual table containing camera handle and operation table.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_camera_vtbl_t * camera_open(uint8_t camera_idx)
+{
+    int32_t rc = 0;
+    mm_camera_obj_t* cam_obj = NULL;
+
+    CDBG("%s: E camera_idx = %d\n", __func__, camera_idx);
+    if (camera_idx >= g_cam_ctrl.num_cam) {
+        CDBG_ERROR("%s: Invalid camera_idx (%d)", __func__, camera_idx);
+        return NULL;
+    }
+
+    pthread_mutex_lock(&g_intf_lock);
+    /* opened already */
+    if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
+        /* Add reference */
+        g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
+        pthread_mutex_unlock(&g_intf_lock);
+        CDBG("%s:  opened alreadyn", __func__);
+        return &g_cam_ctrl.cam_obj[camera_idx]->vtbl;
+    }
+
+    cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+    if(NULL == cam_obj) {
+        pthread_mutex_unlock(&g_intf_lock);
+        CDBG("%s:  no mem", __func__);
+        return NULL;
+    }
+
+    /* initialize camera obj */
+    memset(cam_obj, 0, sizeof(mm_camera_obj_t));
+    cam_obj->ref_count++;
+    cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
+    cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
+    cam_obj->vtbl.ops = &mm_camera_ops;
+    pthread_mutex_init(&cam_obj->cam_lock, NULL);
+    /* unlock global interface lock, if not, in dual camera use case,
+      * current open will block operation of another opened camera obj*/
+    pthread_mutex_lock(&cam_obj->cam_lock);
+    pthread_mutex_unlock(&g_intf_lock);
+
+    rc = mm_camera_open(cam_obj);
+
+    pthread_mutex_lock(&g_intf_lock);
+    if(rc != 0) {
+        CDBG_ERROR("%s: mm_camera_open err = %d", __func__, rc);
+        pthread_mutex_destroy(&cam_obj->cam_lock);
+        g_cam_ctrl.cam_obj[camera_idx] = NULL;
+        free(cam_obj);
+        cam_obj = NULL;
+        pthread_mutex_unlock(&g_intf_lock);
+        return NULL;
+    }else{
+        CDBG("%s: Open succeded\n", __func__);
+        g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
+        pthread_mutex_unlock(&g_intf_lock);
+        return &cam_obj->vtbl;
+    }
+}
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
new file mode 100644
index 0000000..6fda893
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -0,0 +1,229 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_create
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ *  @cam_id   : camera ID
+ *  @sock_type: socket type, TCP/UDP
+ *
+ * RETURN     : fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+    int socket_fd;
+    mm_camera_sock_addr_t sock_addr;
+    int sktype;
+    int rc;
+
+    switch (sock_type)
+    {
+      case MM_CAMERA_SOCK_TYPE_UDP:
+        sktype = SOCK_DGRAM;
+        break;
+      case MM_CAMERA_SOCK_TYPE_TCP:
+        sktype = SOCK_STREAM;
+        break;
+      default:
+        CDBG_ERROR("%s: unknown socket type =%d", __func__, sock_type);
+        return -1;
+    }
+    socket_fd = socket(AF_UNIX, sktype, 0);
+    if (socket_fd < 0) {
+        CDBG_ERROR("%s: error create socket fd =%d", __func__, socket_fd);
+        return socket_fd;
+    }
+
+    memset(&sock_addr, 0, sizeof(sock_addr));
+    sock_addr.addr_un.sun_family = AF_UNIX;
+    snprintf(sock_addr.addr_un.sun_path, UNIX_PATH_MAX, "/data/misc/camera/cam_socket%d", cam_id);
+    rc = connect(socket_fd, &sock_addr.addr, sizeof(sock_addr.addr_un));
+    if (0 != rc) {
+      close(socket_fd);
+      socket_fd = -1;
+      CDBG_ERROR("%s: socket_fd=%d %s ", __func__, socket_fd, strerror(errno));
+    }
+
+    CDBG("%s: socket_fd=%d %s", __func__, socket_fd,
+        sock_addr.addr_un.sun_path);
+    return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_close
+ *
+ * DESCRIPTION:  close domain socket by its fd
+ *   @fd      : file descriptor for the domain socket to be closed
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+    if (fd > 0) {
+      close(fd);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_sendmsg
+ *
+ * DESCRIPTION:  send msg through domain socket
+ *   @fd      : socket fd
+ *   @msg     : pointer to msg to be sent over domain socket
+ *   @sendfd  : file descriptors to be sent
+ *
+ * RETURN     : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  size_t buf_size,
+  int sendfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr * cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+
+    if (msg == NULL) {
+      CDBG("%s: msg is NULL", __func__);
+      return -1;
+    }
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+    CDBG("%s: iov_len=%llu", __func__,
+            (unsigned long long int)iov[0].iov_len);
+
+    msgh.msg_control = NULL;
+    msgh.msg_controllen = 0;
+
+    /* if sendfd is valid, we need to pass it through control msg */
+    if( sendfd > 0) {
+      msgh.msg_control = control;
+      msgh.msg_controllen = sizeof(control);
+      cmsghp = CMSG_FIRSTHDR(&msgh);
+      if (cmsghp != NULL) {
+        CDBG("%s: Got ctrl msg pointer", __func__);
+        cmsghp->cmsg_level = SOL_SOCKET;
+        cmsghp->cmsg_type = SCM_RIGHTS;
+        cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+        *((int *)CMSG_DATA(cmsghp)) = sendfd;
+        CDBG("%s: cmsg data=%d", __func__, *((int *) CMSG_DATA(cmsghp)));
+      } else {
+        CDBG("%s: ctrl msg NULL", __func__);
+        return -1;
+      }
+    }
+
+    return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_recvmsg
+ *
+ * DESCRIPTION:  receive msg from domain socket.
+ *   @fd      : socket fd
+ *   @msg     : pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ *              need be allocated by the caller
+ *   @buf_size: the size of the buf that holds incoming msg
+ *   @rcvdfd  : pointer to hold recvd file descriptor if not NULL.
+ *
+ * RETURN     : the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr *cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+    int rcvd_fd = -1;
+    int rcvd_len = 0;
+
+    if ( (msg == NULL) || (buf_size <= 0) ) {
+      CDBG_ERROR(" %s: msg buf is NULL", __func__);
+      return -1;
+    }
+
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+    msgh.msg_control = control;
+    msgh.msg_controllen = sizeof(control);
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+
+    if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+      CDBG_ERROR(" %s: recvmsg failed", __func__);
+      return rcvd_len;
+    }
+
+    CDBG("%s:  msg_ctrl %p len %zu", __func__, msgh.msg_control,
+        msgh.msg_controllen);
+
+    if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+        (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+      if (cmsghp->cmsg_level == SOL_SOCKET &&
+        cmsghp->cmsg_type == SCM_RIGHTS) {
+        CDBG("%s:  CtrlMsg is valid", __func__);
+        rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+        CDBG("%s:  Receieved fd=%d", __func__, rcvd_fd);
+      } else {
+        CDBG_ERROR("%s:  Unexpected Control Msg. Line=%d", __func__, __LINE__);
+      }
+    }
+
+    if (rcvdfd) {
+      *rcvdfd = rcvd_fd;
+    }
+
+    return rcvd_len;
+}
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
new file mode 100644
index 0000000..3459efb
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -0,0 +1,3056 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <time.h>
+#include <cam_semaphore.h>
+#ifdef VENUS_PRESENT
+#include <media/msm_media_info.h>
+#endif
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+/* internal function decalre */
+int32_t mm_stream_qbuf(mm_stream_t *my_obj,
+                       mm_camera_buf_def_t *buf);
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj);
+int32_t mm_stream_set_fmt(mm_stream_t * my_obj);
+int32_t mm_stream_sync_info(mm_stream_t *my_obj);
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_request_buf(mm_stream_t * my_obj);
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_release(mm_stream_t *my_obj);
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *value);
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *value);
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+                            void *in_value);
+int32_t mm_stream_streamon(mm_stream_t *my_obj);
+int32_t mm_stream_streamoff(mm_stream_t *my_obj);
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+                                 mm_camera_buf_info_t* buf_info,
+                                 uint8_t num_planes);
+int32_t mm_stream_config(mm_stream_t *my_obj,
+                         mm_camera_stream_config_t *config);
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+                           mm_camera_buf_def_t *frame);
+int32_t mm_stream_get_queued_buf_count(mm_stream_t * my_obj);
+
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj);
+int32_t mm_stream_calc_offset_preview(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_stream_info_t *stream_info,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+                                  cam_dimension_t *dim,
+                                  cam_padding_info_t *padding,
+                                  cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+                                    cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *plns);
+
+
+/* state machine function declare */
+int32_t mm_stream_fsm_inited(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+int32_t mm_stream_fsm_acquired(mm_stream_t * my_obj,
+                               mm_stream_evt_type_t evt,
+                               void * in_val,
+                               void * out_val);
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt);
+
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_notify_channel
+ *
+ * DESCRIPTION: function to notify channel object on received buffer
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              0> -- failure
+ *==========================================================================*/
+int32_t mm_stream_notify_channel(struct mm_channel* ch_obj,
+        mm_camera_buf_info_t *buf_info)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    if ((NULL == ch_obj) || (NULL == buf_info)) {
+        CDBG_ERROR("%s : Invalid channel/buffer", __func__);
+        return -ENODEV;
+    }
+
+    /* send cam_sem_post to wake up channel cmd thread to enqueue
+     * to super buffer */
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+        node->u.buf = *buf_info;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(ch_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(ch_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -ENOMEM;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_handle_rcvd_buf
+ *
+ * DESCRIPTION: function to handle newly received stream buffer
+ *
+ * PARAMETERS :
+ *   @cam_obj : stream object
+ *   @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_stream_handle_rcvd_buf(mm_stream_t *my_obj,
+                               mm_camera_buf_info_t *buf_info,
+                               uint8_t has_cb)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* enqueue to super buf thread */
+    if (my_obj->is_bundled) {
+        rc = mm_stream_notify_channel(my_obj->ch_obj, buf_info);
+        if (rc < 0) {
+            CDBG_ERROR("%s: Unable to notify channel", __func__);
+        }
+    }
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if(my_obj->is_linked) {
+        /* need to add into super buf for linking, add ref count */
+        my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+
+        rc = mm_stream_notify_channel(my_obj->linked_obj, buf_info);
+        if (rc < 0) {
+            CDBG_ERROR("%s: Unable to notify channel", __func__);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    if (my_obj->ch_obj->previewSkipCnt &&
+            my_obj->stream_info->stream_type == CAM_STREAM_TYPE_PREVIEW) {
+        my_obj->ch_obj->previewSkipCnt--;
+        CDBG_HIGH("%s: Skipping preview frame, pending skip count %d", __func__,
+                my_obj->ch_obj->previewSkipCnt);
+        mm_stream_buf_done(my_obj, buf_info->buf);
+        return;
+    }
+
+    pthread_mutex_lock(&my_obj->cmd_lock);
+    if(has_cb && my_obj->cmd_thread.is_active) {
+        mm_camera_cmdcb_t* node = NULL;
+
+        /* send cam_sem_post to wake up cmd thread to dispatch dataCB */
+        node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(mm_camera_cmdcb_t));
+            node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+            node->u.buf = *buf_info;
+
+            /* enqueue to cmd thread */
+            cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+            /* wake up cmd thread */
+            cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+        } else {
+            CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cmd_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_data_notify
+ *
+ * DESCRIPTION: callback to handle data notify from kernel
+ *
+ * PARAMETERS :
+ *   @user_data : user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_data_notify(void* user_data)
+{
+    mm_stream_t *my_obj = (mm_stream_t*)user_data;
+    int32_t i, rc;
+    uint8_t has_cb = 0;
+    mm_camera_buf_info_t buf_info;
+
+    if (NULL == my_obj) {
+        return;
+    }
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    if (MM_STREAM_STATE_ACTIVE != my_obj->state) {
+        /* this Cb will only received in active_stream_on state
+         * if not so, return here */
+        CDBG_ERROR("%s: ERROR!! Wrong state (%d) to receive data notify!",
+                   __func__, my_obj->state);
+        return;
+    }
+
+    memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+    rc = mm_stream_read_msm_frame(my_obj, &buf_info,
+        (uint8_t)my_obj->frame_offset.num_planes);
+    if (rc != 0) {
+        return;
+    }
+    uint32_t idx = buf_info.buf->buf_idx;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL != my_obj->buf_cb[i].cb) {
+            /* for every CB, add ref count */
+            has_cb = 1;
+            break;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    /* update buffer location */
+    my_obj->buf_status[idx].in_kernel = 0;
+
+    /* update buf ref count */
+    if (my_obj->is_bundled) {
+        /* need to add into super buf since bundled, add ref count */
+        my_obj->buf_status[idx].buf_refcnt++;
+    }
+    my_obj->buf_status[idx].buf_refcnt =
+        (uint8_t)(my_obj->buf_status[idx].buf_refcnt + has_cb);
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_dispatch_app_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing stream buffer information
+ *   @userdata: user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
+                                        void* user_data)
+{
+    int i;
+    mm_stream_t * my_obj = (mm_stream_t *)user_data;
+    mm_camera_buf_info_t* buf_info = NULL;
+    mm_camera_super_buf_t super_buf;
+    mm_camera_cmd_thread_name("mm_cam_stream");
+
+    if (NULL == my_obj) {
+        return;
+    }
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
+        CDBG_ERROR("%s: Wrong cmd_type (%d) for dataCB",
+                   __func__, cmd_cb->cmd_type);
+        return;
+    }
+
+    buf_info = &cmd_cb->u.buf;
+    memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+    super_buf.num_bufs = 1;
+    super_buf.bufs[0] = buf_info->buf;
+    super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+    super_buf.ch_id = my_obj->ch_obj->my_hdl;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL != my_obj->buf_cb[i].cb) {
+            if (my_obj->buf_cb[i].cb_count != 0) {
+                /* if <0, means infinite CB
+                 * if >0, means CB for certain times
+                 * both case we need to call CB */
+
+                /* increase buf ref cnt */
+                pthread_mutex_lock(&my_obj->buf_lock);
+                my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+                pthread_mutex_unlock(&my_obj->buf_lock);
+
+                /* callback */
+                my_obj->buf_cb[i].cb(&super_buf,
+                                     my_obj->buf_cb[i].user_data);
+            }
+
+            /* if >0, reduce count by 1 every time we called CB until reaches 0
+             * when count reach 0, reset the buf_cb to have no CB */
+            if (my_obj->buf_cb[i].cb_count > 0) {
+                my_obj->buf_cb[i].cb_count--;
+                if (0 == my_obj->buf_cb[i].cb_count) {
+                    my_obj->buf_cb[i].cb = NULL;
+                    my_obj->buf_cb[i].user_data = NULL;
+                }
+            }
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    /* do buf_done since we increased refcnt by one when has_cb */
+    mm_stream_buf_done(my_obj, buf_info->buf);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_fn
+ *
+ * DESCRIPTION: stream finite state machine entry function. Depends on stream
+ *              state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+                         mm_stream_evt_type_t evt,
+                         void * in_val,
+                         void * out_val)
+{
+    int32_t rc = -1;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch (my_obj->state) {
+    case MM_STREAM_STATE_NOTUSED:
+        CDBG("%s: Not handling evt in unused state", __func__);
+        break;
+    case MM_STREAM_STATE_INITED:
+        rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_ACQUIRED:
+        rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_CFG:
+        rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_BUFFED:
+        rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_REG:
+        rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_ACTIVE:
+        rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
+        break;
+    default:
+        CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
+        break;
+    }
+    CDBG("%s : X rc =%d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_inited
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in INITED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_ACQUIRE:
+        if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
+            CDBG_ERROR("%s: NULL channel or camera obj\n", __func__);
+            rc = -1;
+            break;
+        }
+        snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+                 mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl));
+
+        my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (my_obj->fd <= 0) {
+            CDBG_ERROR("%s: open dev returned %d\n", __func__, my_obj->fd);
+            rc = -1;
+            break;
+        }
+        CDBG("%s: open dev fd = %d\n", __func__, my_obj->fd);
+        rc = mm_stream_set_ext_mode(my_obj);
+        if (0 == rc) {
+            my_obj->state = MM_STREAM_STATE_ACQUIRED;
+        } else {
+            /* failed setting ext_mode
+             * close fd */
+            close(my_obj->fd);
+            my_obj->fd = 0;
+            break;
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_acquired
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in AQUIRED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_acquired(mm_stream_t *my_obj,
+                               mm_stream_evt_type_t evt,
+                               void * in_val,
+                               void * out_val)
+{
+    int32_t rc = 0;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_SET_FMT:
+        {
+            mm_camera_stream_config_t *config =
+                (mm_camera_stream_config_t *)in_val;
+
+            rc = mm_stream_config(my_obj, config);
+
+            /* change state to configed */
+            my_obj->state = MM_STREAM_STATE_CFG;
+
+            break;
+        }
+    case MM_STREAM_EVT_RELEASE:
+        rc = mm_stream_release(my_obj);
+        /* change state to not used */
+         my_obj->state = MM_STREAM_STATE_NOTUSED;
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_cfg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in CONFIGURED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_SET_FMT:
+        {
+            mm_camera_stream_config_t *config =
+                (mm_camera_stream_config_t *)in_val;
+
+            rc = mm_stream_config(my_obj, config);
+
+            /* change state to configed */
+            my_obj->state = MM_STREAM_STATE_CFG;
+
+            break;
+        }
+    case MM_STREAM_EVT_RELEASE:
+        rc = mm_stream_release(my_obj);
+        my_obj->state = MM_STREAM_STATE_NOTUSED;
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_BUF:
+        rc = mm_stream_init_bufs(my_obj);
+        /* change state to buff allocated */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_BUFFED;
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_buffed
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in BUFFED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_PUT_BUF:
+        rc = mm_stream_deinit_bufs(my_obj);
+        /* change state to configed */
+        my_obj->state = MM_STREAM_STATE_CFG;
+        break;
+    case MM_STREAM_EVT_REG_BUF:
+        rc = mm_stream_reg_buf(my_obj);
+        /* change state to regged */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_REG;
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_reg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in REGGED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    switch(evt) {
+    case MM_STREAM_EVT_UNREG_BUF:
+        rc = mm_stream_unreg_buf(my_obj);
+
+        /* change state to buffed */
+        my_obj->state = MM_STREAM_STATE_BUFFED;
+        break;
+    case MM_STREAM_EVT_START:
+        {
+            uint8_t has_cb = 0;
+            uint8_t i;
+            /* launch cmd thread if CB is not null */
+            pthread_mutex_lock(&my_obj->cb_lock);
+            for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+                if(NULL != my_obj->buf_cb[i].cb) {
+                    has_cb = 1;
+                    break;
+                }
+            }
+            pthread_mutex_unlock(&my_obj->cb_lock);
+
+            pthread_mutex_lock(&my_obj->cmd_lock);
+            if (has_cb) {
+                snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
+                mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+                                            mm_stream_dispatch_app_data,
+                                            (void *)my_obj);
+            }
+            pthread_mutex_unlock(&my_obj->cmd_lock);
+
+            my_obj->state = MM_STREAM_STATE_ACTIVE;
+            rc = mm_stream_streamon(my_obj);
+            if (0 != rc) {
+                /* failed stream on, need to release cmd thread if it's launched */
+                pthread_mutex_lock(&my_obj->cmd_lock);
+                if (has_cb) {
+                    mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+                }
+                pthread_mutex_unlock(&my_obj->cmd_lock);
+                my_obj->state = MM_STREAM_STATE_REG;
+                break;
+            }
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_active
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in ACTIVE
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_QBUF:
+        rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+        break;
+    case MM_STREAM_EVT_GET_QUEUED_BUF_COUNT:
+        rc = mm_stream_get_queued_buf_count(my_obj);
+        break;
+    case MM_STREAM_EVT_STOP:
+        {
+            uint8_t has_cb = 0;
+            uint8_t i;
+            rc = mm_stream_streamoff(my_obj);
+
+            pthread_mutex_lock(&my_obj->cb_lock);
+            for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+                if(NULL != my_obj->buf_cb[i].cb) {
+                    has_cb = 1;
+                    break;
+                }
+            }
+            pthread_mutex_unlock(&my_obj->cb_lock);
+
+            pthread_mutex_lock(&my_obj->cmd_lock);
+            if (has_cb) {
+                mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+            }
+            pthread_mutex_unlock(&my_obj->cmd_lock);
+            my_obj->state = MM_STREAM_STATE_REG;
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_DO_ACTION:
+        rc = mm_stream_do_action(my_obj, in_val);
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_config
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_config(mm_stream_t *my_obj,
+                         mm_camera_stream_config_t *config)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    my_obj->stream_info = config->stream_info;
+    my_obj->buf_num = (uint8_t) config->stream_info->num_bufs;
+    my_obj->mem_vtbl = config->mem_vtbl;
+    my_obj->padding_info = config->padding_info;
+    /* cd through intf always palced at idx 0 of buf_cb */
+    my_obj->buf_cb[0].cb = config->stream_cb;
+    my_obj->buf_cb[0].user_data = config->userdata;
+    my_obj->buf_cb[0].cb_count = -1; /* infinite by default */
+
+    rc = mm_stream_sync_info(my_obj);
+    if (rc == 0) {
+        rc = mm_stream_set_fmt(my_obj);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_release
+ *
+ * DESCRIPTION: release a stream resource
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_release(mm_stream_t *my_obj)
+{
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* close fd */
+    if(my_obj->fd > 0)
+    {
+        close(my_obj->fd);
+    }
+
+    /* destroy mutex */
+    pthread_mutex_destroy(&my_obj->buf_lock);
+    pthread_mutex_destroy(&my_obj->cb_lock);
+    pthread_mutex_destroy(&my_obj->cmd_lock);
+
+    /* reset stream obj */
+    memset(my_obj, 0, sizeof(mm_stream_t));
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_streamon
+ *
+ * DESCRIPTION: stream on a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamon(mm_stream_t *my_obj)
+{
+    int32_t rc;
+    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
+    if (rc < 0) {
+        CDBG_ERROR("%s: ioctl VIDIOC_STREAMON failed: rc=%d\n",
+                   __func__, rc);
+        /* remove fd from data poll thread in case of failure */
+        mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+            my_obj->my_hdl, mm_camera_sync_call);
+    }
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_streamoff
+ *
+ * DESCRIPTION: stream off a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamoff(mm_stream_t *my_obj)
+{
+    int32_t rc;
+    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* step1: remove fd from data poll thread */
+    rc = mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+            my_obj->my_hdl, mm_camera_sync_call);
+    if (rc < 0) {
+        /* The error might be due to async update. In this case
+         * wait for all updates to complete before proceeding. */
+        rc = mm_camera_poll_thread_commit_updates(&my_obj->ch_obj->poll_thread[0]);
+        if (rc < 0) {
+            CDBG_ERROR("%s: Poll sync failed %d",
+                 __func__, rc);
+        }
+    }
+
+    /* step2: stream off */
+    rc = ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+    if (rc < 0) {
+        CDBG_ERROR("%s: STREAMOFF failed: %s\n",
+                __func__, strerror(errno));
+    }
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_read_msm_frame
+ *
+ * DESCRIPTION: dequeue a stream buffer from kernel queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_info     : ptr to a struct storing buffer information
+ *   @num_planes   : number of planes in the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+                                 mm_camera_buf_info_t* buf_info,
+                                 uint8_t num_planes)
+{
+    int32_t rc = 0;
+    struct v4l2_buffer vb;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memset(&vb,  0,  sizeof(vb));
+    vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    vb.memory = V4L2_MEMORY_USERPTR;
+    vb.m.planes = &planes[0];
+    vb.length = num_planes;
+
+    rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
+    if (0 > rc) {
+        CDBG_ERROR("%s: VIDIOC_DQBUF ioctl call failed on stream type %d (rc=%d): %s",
+            __func__, my_obj->stream_info->stream_type, rc, strerror(errno));
+    } else {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        my_obj->queued_buffer_count--;
+        if (0 == my_obj->queued_buffer_count) {
+            CDBG_HIGH("%s: Stoping poll on stream %p type: %d", __func__,
+                my_obj, my_obj->stream_info->stream_type);
+            mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+                my_obj->my_hdl, mm_camera_async_call);
+            CDBG_HIGH("%s: Stopped poll on stream %p type: %d", __func__,
+                my_obj, my_obj->stream_info->stream_type);
+        }
+        uint32_t idx = vb.index;
+        buf_info->buf = &my_obj->buf[idx];
+        buf_info->frame_idx = vb.sequence;
+        buf_info->stream_id = my_obj->my_hdl;
+
+        buf_info->buf->stream_id = my_obj->my_hdl;
+        buf_info->buf->buf_idx = idx;
+        buf_info->buf->frame_idx = vb.sequence;
+        buf_info->buf->ts.tv_sec  = vb.timestamp.tv_sec;
+        buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+ #if 0
+        /* If YUV format, check chroma size to see if extra subsampling
+                is applied */
+        if (my_obj->stream_info->fmt >= CAM_FORMAT_YUV_420_NV12 &&
+            my_obj->stream_info->fmt <= CAM_FORMAT_YUV_422_NV61 &&
+            my_obj->stream_info->buf_planes.plane_info.mp[1].len / 4 ==
+            planes[1].bytesused)
+          buf_info->buf->is_uv_subsampled = 1;
+        else
+          buf_info->buf->is_uv_subsampled = 0;
+#else
+        buf_info->buf->is_uv_subsampled =
+            (vb.reserved == V4L2_PIX_FMT_NV14 || vb.reserved == V4L2_PIX_FMT_NV41);
+#endif
+
+        CDBG_HIGH("%s: VIDIOC_DQBUF buf_index %d, frame_idx %d, stream type %d, queued cnt %d\n",
+                   __func__, vb.index, buf_info->buf->frame_idx,
+                   my_obj->stream_info->stream_type,my_obj->queued_buffer_count);
+        pthread_mutex_unlock(&my_obj->buf_lock);
+        if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
+            rc = my_obj->mem_vtbl.clean_invalidate_buf(idx,
+                my_obj->mem_vtbl.user_data);
+            if (0 > rc) {
+                CDBG_ERROR("%s: Clean invalidate cache failed on buffer index: %d",
+                    __func__, idx);
+            }
+        } else {
+            CDBG_ERROR("%s: Clean invalidate cache op not supported", __func__);
+        }
+    }
+
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be get
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        rc = mm_camera_util_g_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_do_actions
+ *
+ * DESCRIPTION: request server to perform stream based actions
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a struct of actions to be performed by the server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of actions to be performed
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+                            void *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_ext_mode
+ *
+ * DESCRIPTION: set stream extended mode to server via v4l2 ioctl
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Server will return a server stream id that uniquely identify
+ *              this stream on server side. Later on communication to server
+ *              per stream should use this server stream id.
+ *==========================================================================*/
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_streamparm s_parm;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memset(&s_parm, 0, sizeof(s_parm));
+    s_parm.type =  V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
+    CDBG("%s:stream fd=%d, rc=%d, extended_mode=%d\n",
+         __func__, my_obj->fd, rc, s_parm.parm.capture.extendedmode);
+    if (rc == 0) {
+        /* get server stream id */
+        my_obj->server_stream_id = s_parm.parm.capture.extendedmode;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel queue for furture use
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf          : ptr to a struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
+{
+    int32_t rc = 0;
+    struct v4l2_buffer buffer;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d, stream type = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state,
+         my_obj->stream_info->stream_type);
+
+    memcpy(planes, buf->planes, sizeof(planes));
+    memset(&buffer, 0, sizeof(buffer));
+    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    buffer.memory = V4L2_MEMORY_USERPTR;
+    buffer.index = (__u32)buf->buf_idx;
+    buffer.m.planes = &planes[0];
+    buffer.length = (__u32)buf->num_planes;
+
+    CDBG("%s:plane 0: stream_hdl=%u,fd=%d,frame idx=%d,num_planes = %u, "
+         "offset = %d, data_offset = %d\n", __func__,
+                 buf->stream_id, buf->fd, buffer.index, buffer.length,
+                 buf->planes[0].reserved[0], buf->planes[0].data_offset);
+    CDBG("%s:plane 1: stream_hdl=%u,fd=%d,frame idx=%d,num_planes = %u, "
+         "offset = %d, data_offset = %d\n", __func__,
+                 buf->stream_id, buf->fd, buffer.index, buffer.length,
+                 buf->planes[1].reserved[0], buf->planes[1].data_offset);
+
+    if ( NULL != my_obj->mem_vtbl.invalidate_buf ) {
+        rc = my_obj->mem_vtbl.invalidate_buf(buffer.index,
+                                             my_obj->mem_vtbl.user_data);
+        if ( 0 > rc ) {
+            CDBG_ERROR("%s: Cache invalidate failed on buffer index: %d",
+                       __func__,
+                       buffer.index);
+            return rc;
+        }
+    } else {
+        CDBG_ERROR("%s: Cache invalidate op not added", __func__);
+    }
+
+    my_obj->queued_buffer_count++;
+    if (1 == my_obj->queued_buffer_count) {
+        /* Add fd to data poll thread */
+        CDBG_HIGH("%s: Starting poll on stream %p type: %d", __func__,
+            my_obj,my_obj->stream_info->stream_type);
+        rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
+            my_obj->my_hdl, my_obj->fd, mm_stream_data_notify, (void*)my_obj,
+            mm_camera_async_call);
+        if (0 > rc) {
+            CDBG_ERROR("%s: Add poll on stream %p type: %d fd error (rc=%d)",
+                __func__, my_obj, my_obj->stream_info->stream_type, rc);
+        } else {
+            CDBG_HIGH("%s: Started poll on stream %p type: %d", __func__,
+                my_obj, my_obj->stream_info->stream_type);
+        }
+    }
+    CDBG("%s: VIDIOC_QBUF:fd = %d, state = %d, stream type=%d, qbuf_index %d,frame_idx %d",
+               __func__, my_obj->fd, my_obj->state, my_obj->stream_info->stream_type,
+               buffer.index,buf->frame_idx);
+
+    rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+    if (0 > rc) {
+        CDBG_ERROR("%s: VIDIOC_QBUF ioctl call failed on stream type %d (rc=%d): %s",
+            __func__, my_obj->stream_info->stream_type, rc, strerror(errno));
+        my_obj->queued_buffer_count--;
+        if (0 == my_obj->queued_buffer_count) {
+            /* Remove fd from data poll in case of failing
+             * first buffer queuing attempt */
+            CDBG_HIGH("%s: Stoping poll on stream %p type: %d", __func__,
+                my_obj, my_obj->stream_info->stream_type);
+            mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+                my_obj->my_hdl, mm_camera_async_call);
+            CDBG_HIGH("%s: Stopped poll on stream %p type: %d", __func__,
+                my_obj, my_obj->stream_info->stream_type);
+        }
+    } else {
+        CDBG("%s: VIDIOC_QBUF buf_index %d,stream type %d,frame_idx %d,queued cnt %d",
+                   __func__,buffer.index,
+                   my_obj->stream_info->stream_type,
+                   buf->frame_idx, my_obj->queued_buffer_count);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_request_buf
+ *
+ * DESCRIPTION: This function let kernel know the amount of buffers need to
+ *              be registered via v4l2 ioctl.
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_request_buf(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_requestbuffers bufreq;
+    uint8_t buf_num = my_obj->buf_num;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    CDBG_ERROR("%s: buf_num = %d, stream type = %d",
+         __func__, buf_num, my_obj->stream_info->stream_type);
+
+    if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+        CDBG_ERROR("%s: buf num %d > max limit %d\n",
+                   __func__, buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+        return -1;
+    }
+
+    memset(&bufreq, 0, sizeof(bufreq));
+    bufreq.count = buf_num;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+      CDBG_ERROR("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+           __func__, my_obj->fd, rc);
+    }
+
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_map_buf(mm_stream_t * my_obj,
+                          uint8_t buf_type,
+                          uint32_t frame_idx,
+                          int32_t plane_idx,
+                          int fd,
+                          size_t size)
+{
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        CDBG_ERROR("%s: NULL obj of stream/channel/camera", __func__);
+        return -1;
+    }
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+    packet.payload.buf_map.type = buf_type;
+    packet.payload.buf_map.fd = fd;
+    packet.payload.buf_map.size = size;
+    packet.payload.buf_map.stream_id = my_obj->server_stream_id;
+    packet.payload.buf_map.frame_idx = frame_idx;
+    packet.payload.buf_map.plane_idx = plane_idx;
+    return mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+                                  &packet,
+                                  sizeof(cam_sock_packet_t),
+                                  fd);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unmap_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unmap_buf(mm_stream_t * my_obj,
+                            uint8_t buf_type,
+                            uint32_t frame_idx,
+                            int32_t plane_idx)
+{
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        CDBG_ERROR("%s: NULL obj of stream/channel/camera", __func__);
+        return -1;
+    }
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+    packet.payload.buf_unmap.type = buf_type;
+    packet.payload.buf_unmap.stream_id = my_obj->server_stream_id;
+    packet.payload.buf_unmap.frame_idx = frame_idx;
+    packet.payload.buf_unmap.plane_idx = plane_idx;
+    return mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+                                  &packet,
+                                  sizeof(cam_sock_packet_t),
+                                  0);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping stream buffer via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and mapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_map_buf_ops(uint32_t frame_idx,
+                                     int32_t plane_idx,
+                                     int fd,
+                                     size_t size,
+                                     void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_map_buf(my_obj,
+                             CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                             frame_idx, plane_idx, fd, size);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unmap_buf_ops
+ *
+ * DESCRIPTION: ops for unmapping stream buffer via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and unmapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_unmap_buf_ops(uint32_t frame_idx,
+                                       int32_t plane_idx,
+                                       void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_unmap_buf(my_obj,
+                               CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                               frame_idx,
+                               plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_init_bufs
+ *
+ * DESCRIPTION: initialize stream buffers needed. This function will request
+ *              buffers needed from upper layer through the mem ops table passed
+ *              during configuration stage.
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
+{
+    int32_t i, rc = 0;
+    uint8_t *reg_flags = NULL;
+    mm_camera_map_unmap_ops_tbl_t ops_tbl;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* deinit buf if it's not NULL*/
+    if (NULL != my_obj->buf) {
+        mm_stream_deinit_bufs(my_obj);
+    }
+
+    ops_tbl.map_ops = mm_stream_map_buf_ops;
+    ops_tbl.unmap_ops = mm_stream_unmap_buf_ops;
+    ops_tbl.userdata = my_obj;
+
+    rc = my_obj->mem_vtbl.get_bufs(&my_obj->frame_offset,
+                                   &my_obj->buf_num,
+                                   &reg_flags,
+                                   &my_obj->buf,
+                                   &ops_tbl,
+                                   my_obj->mem_vtbl.user_data);
+
+    if (0 != rc) {
+        CDBG_ERROR("%s: Error get buf, rc = %d\n", __func__, rc);
+        return rc;
+    }
+
+    my_obj->buf_status =
+        (mm_stream_buf_status_t *)malloc(sizeof(mm_stream_buf_status_t) * my_obj->buf_num);
+
+    if (NULL == my_obj->buf_status) {
+        CDBG_ERROR("%s: No memory for buf_status", __func__);
+        mm_stream_deinit_bufs(my_obj);
+        free(reg_flags);
+        return -1;
+    }
+
+    memset(my_obj->buf_status, 0, sizeof(mm_stream_buf_status_t) * my_obj->buf_num);
+    for (i = 0; i < my_obj->buf_num; i++) {
+        my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
+        my_obj->buf[i].stream_id = my_obj->my_hdl;
+        my_obj->buf[i].stream_type = my_obj->stream_info->stream_type;
+    }
+
+    free(reg_flags);
+    reg_flags = NULL;
+
+    /* update in stream info about number of stream buffers */
+    my_obj->stream_info->num_bufs = my_obj->buf_num;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_deinit_bufs
+ *
+ * DESCRIPTION: return stream buffers to upper layer through the mem ops table
+ *              passed during configuration stage.
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    mm_camera_map_unmap_ops_tbl_t ops_tbl;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (NULL == my_obj->buf) {
+        CDBG("%s: Buf is NULL, no need to deinit", __func__);
+        return rc;
+    }
+
+    /* release bufs */
+    ops_tbl.map_ops = mm_stream_map_buf_ops;
+    ops_tbl.unmap_ops = mm_stream_unmap_buf_ops;
+    ops_tbl.userdata = my_obj;
+
+    rc = my_obj->mem_vtbl.put_bufs(&ops_tbl,
+                                   my_obj->mem_vtbl.user_data);
+
+    free(my_obj->buf);
+    my_obj->buf = NULL;
+    if (my_obj->buf_status != NULL) {
+        free(my_obj->buf_status);
+        my_obj->buf_status = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_reg_buf
+ *
+ * DESCRIPTION: register buffers with kernel by calling v4l2 ioctl QBUF for
+ *              each buffer in the stream
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    uint8_t i;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    rc = mm_stream_request_buf(my_obj);
+    if (rc != 0) {
+        return rc;
+    }
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    my_obj->queued_buffer_count = 0;
+    for(i = 0; i < my_obj->buf_num; i++){
+        /* check if need to qbuf initially */
+        if (my_obj->buf_status[i].initial_reg_flag) {
+            rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
+            if (rc != 0) {
+                CDBG_ERROR("%s: VIDIOC_QBUF rc = %d\n", __func__, rc);
+                break;
+            }
+            my_obj->buf_status[i].buf_refcnt = 0;
+            my_obj->buf_status[i].in_kernel = 1;
+        } else {
+            /* the buf is held by upper layer, will not queue into kernel.
+             * add buf reference count */
+            my_obj->buf_status[i].buf_refcnt = 1;
+            my_obj->buf_status[i].in_kernel = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unreg buf
+ *
+ * DESCRIPTION: unregister all stream buffers from kernel
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj)
+{
+    struct v4l2_requestbuffers bufreq;
+    int32_t i, rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* unreg buf to kernel */
+    bufreq.count = 0;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+        CDBG_ERROR("%s: fd=%d, VIDIOC_REQBUFS failed, rc=%d\n",
+              __func__, my_obj->fd, rc);
+    }
+
+    /* reset buf reference count */
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if (NULL != my_obj->buf_status) {
+        for(i = 0; i < my_obj->buf_num; i++){
+            my_obj->buf_status[i].buf_refcnt = 0;
+            my_obj->buf_status[i].in_kernel = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_v4l2_fmt
+ *
+ * DESCRIPTION: translate camera image format into FOURCC code
+ *
+ * PARAMETERS :
+ *   @fmt     : camera image format
+ *
+ * RETURN     : FOURCC code for image format
+ *==========================================================================*/
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt)
+{
+    uint32_t val;
+    switch(fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+        val = V4L2_PIX_FMT_NV12;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+        val = V4L2_PIX_FMT_NV21;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+        val= V4L2_PIX_FMT_SBGGR10;
+        break;
+    case CAM_FORMAT_YUV_422_NV61:
+        val= V4L2_PIX_FMT_NV61;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+        val= V4L2_PIX_FMT_YUYV;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+        val= V4L2_PIX_FMT_YVYU;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+        val= V4L2_PIX_FMT_UYVY;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+        val= V4L2_PIX_FMT_VYUY;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        val= V4L2_PIX_FMT_NV12;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+        val= V4L2_PIX_FMT_NV16;
+        break;
+    default:
+        val = 0;
+        CDBG_ERROR("%s: Unknown fmt=%d", __func__, fmt);
+        break;
+    }
+    CDBG("%s: fmt=%d, val =%d", __func__, fmt, val);
+    return val;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_preview
+ *
+ * DESCRIPTION: calculate preview frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_preview(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0, scanline = 0;
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset = 0;
+        buf_planes->plane_info.mp[2].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[2].offset_x = 0;
+        buf_planes->plane_info.mp[2].offset_y = 0;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = dim->height;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+        buf_planes->plane_info.frame_len =
+            VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+        buf_planes->plane_info.mp[1].len =
+            buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+#else
+        CDBG_ERROR("%s: Venus hardware not avail, cannot use this format", __func__);
+        rc = -1;
+#endif
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format for preview %d",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_post_view
+ *
+ * DESCRIPTION: calculate postview frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0, scanline = 0;
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_64);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_64);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset = 0;
+        buf_planes->plane_info.mp[2].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[2].offset_x = 0;
+        buf_planes->plane_info.mp[2].offset_y = 0;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = dim->height;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+        buf_planes->plane_info.frame_len =
+            VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+        buf_planes->plane_info.mp[1].len =
+            buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+#else
+        CDBG_ERROR("%s: Venus hardware not avail, cannot use this format", __func__);
+        rc = -1;
+#endif
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format for preview %d",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_snapshot
+ *
+ * DESCRIPTION: calculate snapshot/postproc frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @stream_info : stream info
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_snapshot(cam_stream_info_t *stream_info,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    cam_format_t fmt = stream_info->fmt;
+    int32_t rc = 0;
+    uint8_t isAFamily = mm_camera_util_chip_is_a_family();
+    int offset_x = 0, offset_y = 0;
+    int stride = 0, scanline = 0;
+
+    if (isAFamily) {
+        stride = dim->width;
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_16);
+        offset_x = 0;
+        offset_y = scanline - dim->height;
+        scanline += offset_y; /* double padding */
+    } else {
+        stride = PAD_TO_SIZE(dim->width,
+                             padding->width_padding);
+        scanline = PAD_TO_SIZE(dim->height,
+                               padding->height_padding);
+        offset_x = 0;
+        offset_y = 0;
+    }
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+
+        if (stream_info->reprocess_config.pp_feature_config.feature_mask &
+                CAM_QCOM_FEATURE_TRUEPORTRAIT) {
+            /* allocate extra mem for TP to get meta from backend */
+            buf_planes->plane_info.frame_len =
+                    PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                    buf_planes->plane_info.mp[1].len +
+                    stream_info->reprocess_config.pp_feature_config.tp_param.meta_max_size,
+                    CAM_PAD_TO_4K);
+            CDBG_HIGH("%s: Allocating extra %d memory for TruePortrait", __func__,
+                    stream_info->reprocess_config.pp_feature_config.tp_param.meta_max_size);
+        }
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].offset_x = offset_x;
+        buf_planes->plane_info.mp[2].offset_y = offset_y;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+            buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+            CAM_PAD_TO_4K);
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format for snapshot %d",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_raw
+ *
+ * DESCRIPTION: calculate raw frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+                                  cam_dimension_t *dim,
+                                  cam_padding_info_t *padding,
+                                  cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int32_t stride = 0;
+    int32_t scanline = dim->height;
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+    case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+    case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+    case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+    case CAM_FORMAT_JPEG_RAW_8BIT:
+    case CAM_FORMAT_META_RAW_8BIT:
+        /* 1 plane */
+        /* Every 16 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline * 2),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width =
+                (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR:
+        /* 1 plane */
+        /* Every 16 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR:
+        /* Every 12 pixels occupy 16 bytes */
+        stride = (dim->width + 11)/12 * 12;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline * 8 / 6),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR:
+        /* Every 10 pixels occupy 16 bytes */
+        stride = (dim->width + 9)/10 * 10;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline * 8 / 5),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR:
+        /* Every 64 pixels occupy 80 bytes */
+        stride = PAD_TO_SIZE(dim->width * 5 / 4, CAM_PAD_TO_8);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR:
+        /* Every 32 pixels occupy 48 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline * 3 / 2),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR:
+        /* Every 8 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_8);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline * 2),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format %d for raw stream",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_video
+ *
+ * DESCRIPTION: calculate video frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+#ifdef VENUS_PRESENT
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+                                    cam_stream_buf_plane_info_t *buf_planes)
+{
+    int stride = 0, scanline = 0;
+
+    // using Venus
+    stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+    scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+    buf_planes->plane_info.frame_len =
+        VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+    buf_planes->plane_info.num_planes = 2;
+    buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = stride;
+    buf_planes->plane_info.mp[0].scanline = scanline;
+    buf_planes->plane_info.mp[0].width = dim->width;
+    buf_planes->plane_info.mp[0].height = dim->height;
+    stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+    scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+    buf_planes->plane_info.mp[1].len =
+        buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+    buf_planes->plane_info.mp[1].offset = 0;
+    buf_planes->plane_info.mp[1].offset_x =0;
+    buf_planes->plane_info.mp[1].offset_y = 0;
+    buf_planes->plane_info.mp[1].stride = stride;
+    buf_planes->plane_info.mp[1].scanline = scanline;
+    buf_planes->plane_info.mp[1].width = dim->width;
+    buf_planes->plane_info.mp[1].height = dim->height;
+
+    return 0;
+}
+#else
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+                                    cam_stream_buf_plane_info_t *buf_planes)
+{
+    int stride = 0, scanline = 0;
+
+    buf_planes->plane_info.num_planes = 2;
+
+    stride = dim->width;
+    scanline = dim->height;
+    buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_2K);
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = stride;
+    buf_planes->plane_info.mp[0].scanline = scanline;
+    buf_planes->plane_info.mp[0].width = dim->width;
+    buf_planes->plane_info.mp[0].height = dim->height;
+
+    stride = dim->width;
+    scanline = dim->height / 2;
+    buf_planes->plane_info.mp[1].len =
+            PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_2K);
+    buf_planes->plane_info.mp[1].offset = 0;
+    buf_planes->plane_info.mp[1].offset_x =0;
+    buf_planes->plane_info.mp[1].offset_y = 0;
+    buf_planes->plane_info.mp[1].stride = stride;
+    buf_planes->plane_info.mp[1].scanline = scanline;
+    buf_planes->plane_info.mp[0].width = dim->width;
+    buf_planes->plane_info.mp[0].height = dim->height / 2;
+
+    buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                    buf_planes->plane_info.mp[1].len,
+                    CAM_PAD_TO_4K);
+
+    return 0;
+}
+#endif
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_metadata
+ *
+ * DESCRIPTION: calculate metadata frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    buf_planes->plane_info.num_planes = 1;
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE((uint32_t)(dim->width * dim->height),
+                    padding->plane_padding);
+    buf_planes->plane_info.frame_len =
+        buf_planes->plane_info.mp[0].len;
+
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = dim->width;
+    buf_planes->plane_info.mp[0].scanline = dim->height;
+    buf_planes->plane_info.mp[0].width = dim->width;
+    buf_planes->plane_info.mp[0].height = dim->height;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_postproc
+ *
+ * DESCRIPTION: calculate postprocess frame offset
+ *
+ * PARAMETERS :
+ *   @stream_info: ptr to stream info
+ *   @padding : padding information
+ *   @plns : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *plns)
+{
+    int32_t rc = 0;
+    cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+    if (stream_info->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+        type = stream_info->reprocess_config.offline.input_type;
+        if (CAM_STREAM_TYPE_DEFAULT == type) {
+            if (plns->plane_info.frame_len == 0) {
+                // take offset from input source
+                *plns = stream_info->reprocess_config.offline.input_buf_planes;
+            }
+            return rc;
+        }
+    } else {
+        type = stream_info->reprocess_config.online.input_stream_type;
+    }
+
+    switch (type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        rc = mm_stream_calc_offset_preview(stream_info->fmt,
+                                           &stream_info->dim,
+                                           plns);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        rc = mm_stream_calc_offset_post_view(stream_info->fmt,
+                                           &stream_info->dim,
+                                           plns);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        rc = mm_stream_calc_offset_snapshot(stream_info,
+                                            &stream_info->dim,
+                                            padding,
+                                            plns);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(&stream_info->dim,
+                        plns);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(stream_info->fmt,
+                                       &stream_info->dim,
+                                       padding,
+                                       plns);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&stream_info->dim,
+                                            padding,
+                                            plns);
+        break;
+    default:
+        CDBG_ERROR("%s: not supported for stream type %d",
+                   __func__, type);
+        rc = -1;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+
+    cam_dimension_t dim = my_obj->stream_info->dim;
+    if (my_obj->stream_info->pp_config.feature_mask & CAM_QCOM_FEATURE_CPP &&
+        my_obj->stream_info->stream_type != CAM_STREAM_TYPE_VIDEO) {
+        if (my_obj->stream_info->pp_config.rotation == ROTATE_90 ||
+            my_obj->stream_info->pp_config.rotation == ROTATE_270) {
+            // rotated by 90 or 270, need to switch width and height
+            dim.width = my_obj->stream_info->dim.height;
+            dim.height = my_obj->stream_info->dim.width;
+        }
+    }
+
+    switch (my_obj->stream_info->stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        rc = mm_stream_calc_offset_preview(my_obj->stream_info->fmt,
+                                           &dim,
+                                           &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+      rc = mm_stream_calc_offset_post_view(my_obj->stream_info->fmt,
+                                         &dim,
+                                         &my_obj->stream_info->buf_planes);
+      break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        rc = mm_stream_calc_offset_snapshot(my_obj->stream_info,
+                                            &dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        rc = mm_stream_calc_offset_postproc(my_obj->stream_info,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(&dim,
+                                         &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(my_obj->stream_info->fmt,
+                                       &dim,
+                                       &my_obj->padding_info,
+                                       &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    default:
+        CDBG_ERROR("%s: not supported for stream type %d",
+                   __func__, my_obj->stream_info->stream_type);
+        rc = -1;
+        break;
+    }
+
+    my_obj->frame_offset = my_obj->stream_info->buf_planes.plane_info;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_sync_info
+ *
+ * DESCRIPTION: synchronize stream information with server
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : assume stream info buffer is mapped to server and filled in with
+ *              stream information by upper layer. This call will let server to
+ *              synchornize the stream information with HAL. If server find any
+ *              fields that need to be changed accroding to hardware configuration,
+ *              server will modify corresponding fields so that HAL could know
+ *              about it.
+ *==========================================================================*/
+int32_t mm_stream_sync_info(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    int32_t value = 0;
+    my_obj->stream_info->stream_svr_id = my_obj->server_stream_id;
+    rc = mm_stream_calc_offset(my_obj);
+
+    if (rc == 0) {
+        rc = mm_camera_util_s_ctrl(my_obj->fd,
+                                   CAM_PRIV_STREAM_INFO_SYNC,
+                                   &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_fmt
+ *
+ * DESCRIPTION: set stream format to kernel via v4l2 ioctl
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_format fmt;
+    struct msm_v4l2_format_data msm_fmt;
+    int i;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (my_obj->stream_info->dim.width == 0 ||
+        my_obj->stream_info->dim.height == 0) {
+        CDBG_ERROR("%s:invalid input[w=%d,h=%d,fmt=%d]\n",
+                   __func__,
+                   my_obj->stream_info->dim.width,
+                   my_obj->stream_info->dim.height,
+                   my_obj->stream_info->fmt);
+        return -1;
+    }
+
+    memset(&fmt, 0, sizeof(fmt));
+    memset(&msm_fmt, 0, sizeof(msm_fmt));
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    msm_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    msm_fmt.width = (unsigned int)my_obj->stream_info->dim.width;
+    msm_fmt.height = (unsigned int)my_obj->stream_info->dim.height;
+    msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);
+    msm_fmt.num_planes = (unsigned char)my_obj->frame_offset.num_planes;
+    for (i = 0; i < msm_fmt.num_planes; i++) {
+        msm_fmt.plane_sizes[i] = my_obj->frame_offset.mp[i].len;
+    }
+
+    memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
+    rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_buf_done
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @frame        : frame to be enqueued back to kernel
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+                           mm_camera_buf_def_t *frame)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+        CDBG("%s: Error Trying to free second time?(idx=%d) count=%d\n",
+                   __func__, frame->buf_idx,
+                   my_obj->buf_status[frame->buf_idx].buf_refcnt);
+        rc = -1;
+    }else{
+        my_obj->buf_status[frame->buf_idx].buf_refcnt--;
+        if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+            CDBG("<DEBUG> : Buf done for buffer:%d", frame->buf_idx);
+            rc = mm_stream_qbuf(my_obj, frame);
+            if(rc < 0) {
+                CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                           __func__, frame->buf_idx, rc);
+            } else {
+                my_obj->buf_status[frame->buf_idx].in_kernel = 1;
+            }
+        }else{
+            CDBG("<DEBUG> : Still ref count pending count :%d",
+                 my_obj->buf_status[frame->buf_idx].buf_refcnt);
+            CDBG("<DEBUG> : for buffer:%p:%d",
+                 my_obj, frame->buf_idx);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t mm_stream_get_queued_buf_count(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+            __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    pthread_mutex_lock(&my_obj->buf_lock);
+    rc = my_obj->queued_buffer_count;
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_reg_buf_cb
+ *
+ * DESCRIPTION: Allow other stream to register dataCB at this stream.
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @val          : ptr to info about the callback to be registered
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+                             mm_stream_data_cb_t *val)
+{
+    int32_t rc = -1;
+    uint8_t i;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL == my_obj->buf_cb[i].cb) {
+            my_obj->buf_cb[i] = *val;
+            rc = 0;
+            break;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    return rc;
+}
diff --git a/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
new file mode 100644
index 0000000..e60f54c
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -0,0 +1,677 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+typedef enum {
+    /* poll entries updated */
+    MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED,
+    /* poll entries updated asynchronous */
+    MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC,
+    /* commit updates */
+    MM_CAMERA_PIPE_CMD_COMMIT,
+    /* exit */
+    MM_CAMERA_PIPE_CMD_EXIT,
+    /* max count */
+    MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TASK_STATE_STOPPED,
+    MM_CAMERA_POLL_TASK_STATE_POLL,     /* polling pid in polling state. */
+    MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+    uint32_t cmd;
+    mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig_async
+ *
+ * DESCRIPTION: Asynchoronous call to send a command through pipe.
+ *
+ * PARAMETERS :
+ *   @poll_cb      : ptr to poll thread object
+ *   @cmd          : command to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig_async(mm_camera_poll_thread_t *poll_cb,
+                                        uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+
+    CDBG("%s: E cmd = %d", __func__,cmd);
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+
+    /* send cmd to worker */
+    ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if (len < 1) {
+        CDBG_ERROR("%s: len = %lld, errno = %d", __func__,
+                (long long int)len, errno);
+        /* Avoid waiting for the signal */
+        pthread_mutex_unlock(&poll_cb->mutex);
+        return 0;
+    }
+    CDBG("%s: begin IN mutex write done, len = %lld", __func__,
+            (long long int)len);
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: X", __func__);
+    return 0;
+}
+
+
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig
+ *
+ * DESCRIPTION: synchorinzed call to send a command through pipe.
+ *
+ * PARAMETERS :
+ *   @poll_cb      : ptr to poll thread object
+ *   @cmd          : command to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+                                  uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+
+    CDBG("%s: E cmd = %d", __func__,cmd);
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+    /* send cmd to worker */
+
+    ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if(len < 1) {
+        CDBG_ERROR("%s: len = %lld, errno = %d", __func__,
+                (long long int)len, errno);
+        /* Avoid waiting for the signal */
+        pthread_mutex_unlock(&poll_cb->mutex);
+        return 0;
+    }
+    CDBG("%s: begin IN mutex write done, len = %lld", __func__,
+            (long long int)len);
+    /* wait till worker task gives positive signal */
+    if (FALSE == poll_cb->status) {
+        CDBG("%s: wait", __func__);
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    /* done */
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: X", __func__);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig
+ *
+ * DESCRIPTION: signal the status of done
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = TRUE;
+    pthread_cond_signal(&poll_cb->cond_v);
+    CDBG("%s: done, in mutex", __func__);
+    pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_set_state
+ *
+ * DESCRIPTION: set a polling state
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *   @state   : polling state (stopped/polling)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+                                     mm_camera_poll_task_state_type_t state)
+{
+    poll_cb->state = state;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_proc_pipe
+ *
+ * DESCRIPTION: polling thread routine to process pipe
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+    ssize_t read_len;
+    int i;
+    mm_camera_sig_evt_t cmd_evt;
+    read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
+    CDBG("%s: read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
+         __func__, poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
+    switch (cmd_evt.cmd) {
+    case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
+    case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
+        /* we always have index 0 for pipe read */
+        poll_cb->num_fds = 0;
+        poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
+        poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+        poll_cb->num_fds++;
+
+        if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type) {
+            if (poll_cb->poll_entries[0].fd > 0) {
+                /* fd is valid, we update poll_fds */
+                poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
+                poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+                poll_cb->num_fds++;
+            }
+        } else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+            for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+                if(poll_cb->poll_entries[i].fd > 0) {
+                    /* fd is valid, we update poll_fds to this fd */
+                    poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
+                    poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+                    poll_cb->num_fds++;
+                } else {
+                    /* fd is invalid, we set the entry to -1 to prevent polling.
+                     * According to spec, polling will not poll on entry with fd=-1.
+                     * If this is not the case, we need to skip these invalid fds
+                     * when updating this array.
+                     * We still keep fd=-1 in this array because this makes easier to
+                     * map cb associated with this fd once incoming data avail by directly
+                     * using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
+                    poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
+                    poll_cb->poll_fds[poll_cb->num_fds].events = 0;
+                    poll_cb->num_fds++;
+                }
+            }
+        }
+        if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
+            mm_camera_poll_sig_done(poll_cb);
+        break;
+
+    case MM_CAMERA_PIPE_CMD_COMMIT:
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    case MM_CAMERA_PIPE_CMD_EXIT:
+    default:
+        mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_fn
+ *
+ * DESCRIPTION: polling thread routine
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+    int rc = 0, i;
+    if (poll_cb == NULL) {
+        CDBG_ERROR("%s: error: poll_cb=%p",__func__,poll_cb);
+        return NULL;
+    }
+    CDBG("%s: poll type = %d, num_fd = %d poll_cb = %p\n",
+         __func__, poll_cb->poll_type, poll_cb->num_fds,poll_cb);
+    do {
+         for(i = 0; i < poll_cb->num_fds; i++) {
+            poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+         }
+
+         rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
+         if(rc > 0) {
+            if ((poll_cb->poll_fds[0].revents & POLLIN) &&
+                (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
+                /* if we have data on pipe, we only process pipe in this iteration */
+                CDBG("%s: cmd received on pipe\n", __func__);
+                mm_camera_poll_proc_pipe(poll_cb);
+            } else {
+                for(i=1; i<poll_cb->num_fds; i++) {
+                    /* Checking for ctrl events */
+                    if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+                        (poll_cb->poll_fds[i].revents & POLLPRI)) {
+                        CDBG("%s: mm_camera_evt_notify\n", __func__);
+                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+                        }
+                    }
+
+                    if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
+                        (poll_cb->poll_fds[i].revents & POLLIN) &&
+                        (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
+                        CDBG("%s: mm_stream_data_notify\n", __func__);
+                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+                        }
+                    }
+                }
+            }
+        } else {
+            /* in error case sleep 10 us and then continue. hard coded here */
+            usleep(10);
+            continue;
+        }
+    } while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread
+ *
+ * DESCRIPTION: polling thread entry function
+ *
+ * PARAMETERS :
+ *   @data    : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void *mm_camera_poll_thread(void *data)
+{
+    prctl(PR_SET_NAME, (unsigned long)"mm_cam_poll_th", 0, 0, 0);
+    mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
+
+    /* add pipe read fd into poll first */
+    poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
+
+    mm_camera_poll_sig_done(poll_cb);
+    mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
+    return mm_camera_poll_fn(poll_cb);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread
+ *
+ * DESCRIPTION: notify the polling thread that entries for polling fd have
+ *              been updated
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_notify_entries_updated(mm_camera_poll_thread_t * poll_cb)
+{
+    /* send poll entries updated signal to poll thread */
+    return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_commit_updates
+ *
+ * DESCRIPTION: sync with all previously pending async updates
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_commit_updates(mm_camera_poll_thread_t * poll_cb)
+{
+    return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_COMMIT);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_add_poll_fd
+ *
+ * DESCRIPTION: add a new fd into polling thread
+ *
+ * PARAMETERS :
+ *   @poll_cb   : ptr to poll thread object
+ *   @handler   : stream handle if channel data polling thread,
+ *                0 if event polling thread
+ *   @fd        : file descriptor need to be added into polling thread
+ *   @notify_cb : callback function to handle if any notify from fd
+ *   @userdata  : user data ptr
+ *   @call_type : Whether its Synchronous or Asynchronous call
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_add_poll_fd(mm_camera_poll_thread_t * poll_cb,
+                                          uint32_t handler,
+                                          int32_t fd,
+                                          mm_camera_poll_notify_t notify_cb,
+                                          void* userdata,
+                                          mm_camera_call_type_t call_type)
+{
+    int32_t rc = -1;
+    uint8_t idx = 0;
+
+    if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+        /* get stream idx from handler if CH type */
+        idx = mm_camera_util_get_index_by_handler(handler);
+    } else {
+        /* for EVT type, only idx=0 is valid */
+        idx = 0;
+    }
+
+    if (MAX_STREAM_NUM_IN_BUNDLE > idx) {
+        poll_cb->poll_entries[idx].fd = fd;
+        poll_cb->poll_entries[idx].handler = handler;
+        poll_cb->poll_entries[idx].notify_cb = notify_cb;
+        poll_cb->poll_entries[idx].user_data = userdata;
+        /* send poll entries updated signal to poll thread */
+        if (call_type == mm_camera_sync_call) {
+            rc = mm_camera_poll_sig(poll_cb,
+                MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+        } else {
+            rc = mm_camera_poll_sig_async(poll_cb,
+                MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC);
+        }
+    } else {
+        CDBG_ERROR("%s: invalid handler %d (%d)",
+                   __func__, handler, idx);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_del_poll_fd
+ *
+ * DESCRIPTION: delete a fd from polling thread
+ *
+ * PARAMETERS :
+ *   @poll_cb   : ptr to poll thread object
+ *   @handler   : stream handle if channel data polling thread,
+ *                0 if event polling thread
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_del_poll_fd(mm_camera_poll_thread_t * poll_cb,
+                                          uint32_t handler,
+                                          mm_camera_call_type_t call_type)
+{
+    int32_t rc = -1;
+    uint8_t idx = 0;
+
+    if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+        /* get stream idx from handler if CH type */
+        idx = mm_camera_util_get_index_by_handler(handler);
+    } else {
+        /* for EVT type, only idx=0 is valid */
+        idx = 0;
+    }
+
+    if ((MAX_STREAM_NUM_IN_BUNDLE > idx) &&
+        (handler == poll_cb->poll_entries[idx].handler)) {
+        /* reset poll entry */
+        poll_cb->poll_entries[idx].fd = -1; /* set fd to invalid */
+        poll_cb->poll_entries[idx].handler = 0;
+        poll_cb->poll_entries[idx].notify_cb = NULL;
+
+        /* send poll entries updated signal to poll thread */
+        if (call_type == mm_camera_sync_call ) {
+            rc = mm_camera_poll_sig(poll_cb,
+                MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+        } else {
+            rc = mm_camera_poll_sig_async(poll_cb,
+                MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC);
+        }
+    } else {
+        CDBG_ERROR("%s: invalid handler %d (%d)",
+                   __func__, handler, idx);
+        return -1;
+    }
+
+    return rc;
+}
+
+int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
+                                     mm_camera_poll_thread_type_t poll_type)
+{
+    int32_t rc = 0;
+    poll_cb->poll_type = poll_type;
+
+    poll_cb->pfds[0] = 0;
+    poll_cb->pfds[1] = 0;
+    rc = pipe(poll_cb->pfds);
+    if(rc < 0) {
+        CDBG_ERROR("%s: pipe open rc=%d\n", __func__, rc);
+        return -1;
+    }
+
+    poll_cb->timeoutms = -1;  /* Infinite seconds */
+
+    CDBG("%s: poll_type = %d, read fd = %d, write fd = %d timeout = %d",
+        __func__, poll_cb->poll_type,
+        poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
+
+    pthread_mutex_init(&poll_cb->mutex, NULL);
+    pthread_cond_init(&poll_cb->cond_v, NULL);
+
+    /* launch the thread */
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = 0;
+    pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+    if(!poll_cb->status) {
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    pthread_setname_np(poll_cb->pid, "CAM_poll");
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: End",__func__);
+    return rc;
+}
+
+int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb)
+{
+    int32_t rc = 0;
+    if(MM_CAMERA_POLL_TASK_STATE_STOPPED == poll_cb->state) {
+        CDBG_ERROR("%s: err, poll thread is not running.\n", __func__);
+        return rc;
+    }
+
+    /* send exit signal to poll thread */
+    mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+    /* wait until poll thread exits */
+    if (pthread_join(poll_cb->pid, NULL) != 0) {
+        CDBG_ERROR("%s: pthread dead already\n", __func__);
+    }
+
+    /* close pipe */
+    if(poll_cb->pfds[0]) {
+        close(poll_cb->pfds[0]);
+    }
+    if(poll_cb->pfds[1]) {
+        close(poll_cb->pfds[1]);
+    }
+
+    pthread_mutex_destroy(&poll_cb->mutex);
+    pthread_cond_destroy(&poll_cb->cond_v);
+    memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+    return rc;
+}
+
+static void *mm_camera_cmd_thread(void *data)
+{
+    int running = 1;
+    int ret;
+    mm_camera_cmd_thread_t *cmd_thread =
+                (mm_camera_cmd_thread_t *)data;
+    mm_camera_cmdcb_t* node = NULL;
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmd_thread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                CDBG_ERROR("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        /* we got notified about new cmd avail in cmd queue */
+        node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+        while (node != NULL) {
+            switch (node->cmd_type) {
+            case MM_CAMERA_CMD_TYPE_EVT_CB:
+            case MM_CAMERA_CMD_TYPE_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY:
+            case MM_CAMERA_CMD_TYPE_START_ZSL:
+            case MM_CAMERA_CMD_TYPE_STOP_ZSL:
+            case MM_CAMERA_CMD_TYPE_GENERAL:
+            case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE:
+                if (NULL != cmd_thread->cb) {
+                    cmd_thread->cb(node, cmd_thread->user_data);
+                }
+                break;
+            case MM_CAMERA_CMD_TYPE_EXIT:
+            default:
+                running = 0;
+                break;
+            }
+            free(node);
+            node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+        } /* (node != NULL) */
+    } while (running);
+    return NULL;
+}
+
+int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
+                                    mm_camera_cmd_cb_t cb,
+                                    void* user_data)
+{
+    int32_t rc = 0;
+
+    cam_sem_init(&cmd_thread->cmd_sem, 0);
+    cam_queue_init(&cmd_thread->cmd_queue);
+    cmd_thread->cb = cb;
+    cmd_thread->user_data = user_data;
+    cmd_thread->is_active = TRUE;
+
+    /* launch the thread */
+    pthread_create(&cmd_thread->cmd_pid,
+                   NULL,
+                   mm_camera_cmd_thread,
+                   (void *)cmd_thread);
+
+    pthread_setname_np(cmd_thread->cmd_pid, "CAM_launch");
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_name(const char* name)
+{
+    int32_t rc = 0;
+    /* name the thread */
+    prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+    return rc;
+}
+
+
+int32_t mm_camera_cmd_thread_stop(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL == node) {
+        CDBG_ERROR("%s: No memory for mm_camera_cmdcb_t", __func__);
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_camera_cmdcb_t));
+    node->cmd_type = MM_CAMERA_CMD_TYPE_EXIT;
+
+    cam_queue_enq(&cmd_thread->cmd_queue, node);
+    cam_sem_post(&cmd_thread->cmd_sem);
+
+    /* wait until cmd thread exits */
+    if (pthread_join(cmd_thread->cmd_pid, NULL) != 0) {
+        CDBG("%s: pthread dead already\n", __func__);
+    }
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_destroy(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    cam_queue_deinit(&cmd_thread->cmd_queue);
+    cam_sem_destroy(&cmd_thread->cmd_sem);
+    memset(cmd_thread, 0, sizeof(mm_camera_cmd_thread_t));
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    rc = mm_camera_cmd_thread_stop(cmd_thread);
+    if (0 == rc) {
+        rc = mm_camera_cmd_thread_destroy(cmd_thread);
+    }
+    return rc;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/Android.mk b/msm8974/QCamera2/stack/mm-jpeg-interface/Android.mk
new file mode 100644
index 0000000..b4ad67c
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/Android.mk
@@ -0,0 +1,40 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS += -D_ANDROID_
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_C_INCLUDES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_C_INCLUDES += \
+    frameworks/native/include/media/openmax \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common \
+    $(LOCAL_PATH)/../../../ \
+    $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+    $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+ifeq ($(call is-board-platform-in-list, msm8974),true)
+    LOCAL_CFLAGS += -DMM_JPEG_CONCURRENT_SESSIONS_COUNT=2
+else
+    LOCAL_CFLAGS += -DMM_JPEG_CONCURRENT_SESSIONS_COUNT=1
+endif
+
+LOCAL_SRC_FILES := \
+    src/mm_jpeg_queue.c \
+    src/mm_jpeg_exif.c \
+    src/mm_jpeg.c \
+    src/mm_jpeg_interface.c \
+    src/mm_jpeg_ionbuf.c \
+    src/mm_jpegdec_interface.c \
+    src/mm_jpegdec.c
+
+LOCAL_MODULE := libmmjpeg_interface
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libqomx_core
+LOCAL_MODULE_TAGS := optional
+LOCAL_VENDOR_MODULE := true
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
new file mode 100644
index 0000000..106a5a7
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
@@ -0,0 +1,490 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_H_
+#define MM_JPEG_H_
+
+#include <cam_semaphore.h>
+#include "mm_jpeg_interface.h"
+#include "cam_list.h"
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "QOMX_JpegExtensions.h"
+#include "mm_jpeg_ionbuf.h"
+
+#define MM_JPEG_MAX_THREADS 30
+#define MM_JPEG_CIRQ_SIZE 30
+#define MM_JPEG_MAX_SESSION 10
+#define MAX_EXIF_TABLE_ENTRIES 50
+#define MAX_JPEG_SIZE 20000000
+#define MAX_OMX_HANDLES (5)
+
+
+/** mm_jpeg_abort_state_t:
+ *  @MM_JPEG_ABORT_NONE: Abort is not issued
+ *  @MM_JPEG_ABORT_INIT: Abort is issued from the client
+ *  @MM_JPEG_ABORT_DONE: Abort is completed
+ *
+ *  State representing the abort state
+ **/
+typedef enum {
+  MM_JPEG_ABORT_NONE,
+  MM_JPEG_ABORT_INIT,
+  MM_JPEG_ABORT_DONE,
+} mm_jpeg_abort_state_t;
+
+
+/* define max num of supported concurrent jpeg jobs by OMX engine.
+ * Current, only one per time */
+#define NUM_MAX_JPEG_CNCURRENT_JOBS 2
+
+#define JOB_ID_MAGICVAL 0x1
+#define JOB_HIST_MAX 10000
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    CDBG_ERROR("%s:%d] written size %zu", __func__, __LINE__, len); \
+    fclose(fp); \
+  } else { \
+    CDBG_ERROR("%s:%d] open %s failed", __func__, __LINE__, filename); \
+  } \
+})
+
+/** DUMP_TO_FILE2:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file if the memory is non-contiguous
+ **/
+#define DUMP_TO_FILE2(filename, p_addr1, len1, paddr2, len2) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr1, 1, len1, fp); \
+    rc = fwrite(p_addr2, 1, len2, fp); \
+    CDBG_ERROR("%s:%d] written %zu %zu", __func__, __LINE__, len1, len2); \
+    fclose(fp); \
+  } else { \
+    CDBG_ERROR("%s:%d] open %s failed", __func__, __LINE__, filename); \
+  } \
+})
+
+/** MM_JPEG_CHK_ABORT:
+ *  @p: client pointer
+ *  @ret: return value
+ *  @label: label to jump to
+ *
+ *  check the abort failure
+ **/
+#define MM_JPEG_CHK_ABORT(p, ret, label) ({ \
+  if (MM_JPEG_ABORT_INIT == p->abort_state) { \
+    CDBG_ERROR("%s:%d] jpeg abort", __func__, __LINE__); \
+    ret = OMX_ErrorNone; \
+    goto label; \
+  } \
+})
+
+#define GET_CLIENT_IDX(x) ((x) & 0xff)
+#define GET_SESSION_IDX(x) (((x) >> 8) & 0xff)
+#define GET_JOB_IDX(x) (((x) >> 16) & 0xff)
+
+typedef struct {
+  union {
+    int i_data[MM_JPEG_CIRQ_SIZE];
+    void *p_data[MM_JPEG_CIRQ_SIZE];
+  };
+  int front;
+  int rear;
+  int count;
+  pthread_mutex_t lock;
+} mm_jpeg_cirq_t;
+
+/** cirq_reset:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Resets the circular queue
+ *
+ **/
+static inline void cirq_reset(mm_jpeg_cirq_t *q)
+{
+  q->front = 0;
+  q->rear = 0;
+  q->count = 0;
+  pthread_mutex_init(&q->lock, NULL);
+}
+
+/** cirq_empty:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       check if the curcular queue is empty
+ *
+ **/
+#define cirq_empty(q) (q->count == 0)
+
+/** cirq_full:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       check if the curcular queue is full
+ *
+ **/
+#define cirq_full(q) (q->count == MM_JPEG_CIRQ_SIZE)
+
+/** cirq_enqueue:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *    @data: data to be inserted
+ *
+ *  Return:
+ *       true/false
+ *
+ *  Description:
+ *       enqueue an element into circular queue
+ *
+ **/
+#define cirq_enqueue(q, type, data) ({ \
+  int rc = 0; \
+  pthread_mutex_lock(&q->lock); \
+  if (cirq_full(q)) { \
+    rc = -1; \
+  } else { \
+    q->type[q->rear] = data; \
+    q->rear = (q->rear + 1) % MM_JPEG_CIRQ_SIZE; \
+    q->count++; \
+  } \
+  pthread_mutex_unlock(&q->lock); \
+  rc; \
+})
+
+/** cirq_dequeue:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *    @data: data to be popped
+ *
+ *  Return:
+ *       true/false
+ *
+ *  Description:
+ *       dequeue an element from the circular queue
+ *
+ **/
+#define cirq_dequeue(q, type, data) ({ \
+  int rc = 0; \
+  pthread_mutex_lock(&q->lock); \
+  if (cirq_empty(q)) { \
+    pthread_mutex_unlock(&q->lock); \
+    rc = -1; \
+  } else { \
+    data = q->type[q->front]; \
+    q->count--; \
+  } \
+  pthread_mutex_unlock(&q->lock); \
+  rc; \
+})
+
+
+typedef union {
+  uint32_t u32;
+  void* p;
+} mm_jpeg_q_data_t;
+
+  typedef struct {
+  struct cam_list list;
+  mm_jpeg_q_data_t data;
+} mm_jpeg_q_node_t;
+
+typedef struct {
+  mm_jpeg_q_node_t head; /* dummy head */
+  uint32_t size;
+  pthread_mutex_t lock;
+} mm_jpeg_queue_t;
+
+typedef enum {
+  MM_JPEG_CMD_TYPE_JOB,          /* job cmd */
+  MM_JPEG_CMD_TYPE_EXIT,         /* EXIT cmd for exiting jobMgr thread */
+  MM_JPEG_CMD_TYPE_DECODE_JOB,
+  MM_JPEG_CMD_TYPE_MAX
+} mm_jpeg_cmd_type_t;
+
+typedef struct mm_jpeg_job_session {
+  uint32_t client_hdl;           /* client handler */
+  uint32_t jobId;                /* job ID */
+  uint32_t sessionId;            /* session ID */
+  mm_jpeg_encode_params_t params; /* encode params */
+  mm_jpeg_decode_params_t dec_params; /* encode params */
+  mm_jpeg_encode_job_t encode_job;             /* job description */
+  mm_jpeg_decode_job_t decode_job;
+  pthread_t encode_pid;          /* encode thread handler*/
+
+  void *jpeg_obj;                /* ptr to mm_jpeg_obj */
+  jpeg_job_status_t job_status;  /* job status */
+
+  int state_change_pending;      /* flag to indicate if state change is pending */
+  OMX_ERRORTYPE error_flag;      /* variable to indicate error during encoding */
+  mm_jpeg_abort_state_t abort_state; /* variable to indicate abort during encoding */
+
+  /* OMX related */
+  OMX_HANDLETYPE omx_handle;                      /* handle to omx engine */
+  OMX_CALLBACKTYPE omx_callbacks;                 /* callbacks to omx engine */
+
+  /* buffer headers */
+  OMX_BUFFERHEADERTYPE *p_in_omx_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_in_omx_thumb_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_out_omx_buf[MM_JPEG_MAX_BUF];
+
+  OMX_PARAM_PORTDEFINITIONTYPE inputPort;
+  OMX_PARAM_PORTDEFINITIONTYPE outputPort;
+  OMX_PARAM_PORTDEFINITIONTYPE inputTmbPort;
+
+  /* event locks */
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+
+  QEXIF_INFO_DATA exif_info_local[MAX_EXIF_TABLE_ENTRIES];  //all exif tags for JPEG encoder
+  int exif_count_local;
+
+  mm_jpeg_cirq_t cb_q;
+  int32_t ebd_count;
+  int32_t fbd_count;
+
+  /* this flag represents whether the job is active */
+  OMX_BOOL active;
+
+  /* this flag indicates if the configration is complete */
+  OMX_BOOL config;
+
+  /* job history count to generate unique id */
+  unsigned int job_hist;
+
+  OMX_BOOL encoding;
+
+  buffer_t work_buffer;
+
+  OMX_EVENTTYPE omxEvent;
+  int event_pending;
+
+  uint8_t *meta_enc_key;
+  size_t meta_enc_keylen;
+
+  struct mm_jpeg_job_session *next_session;
+
+  uint32_t curr_out_buf_idx;
+
+  uint32_t num_omx_sessions;
+  OMX_BOOL auto_out_buf;
+
+  mm_jpeg_queue_t *session_handle_q;
+  mm_jpeg_queue_t *out_buf_q;
+} mm_jpeg_job_session_t;
+
+typedef struct {
+  mm_jpeg_encode_job_t encode_job;
+  uint32_t job_id;
+  uint32_t client_handle;
+} mm_jpeg_encode_job_info_t;
+
+typedef struct {
+  mm_jpeg_decode_job_t decode_job;
+  uint32_t job_id;
+  uint32_t client_handle;
+} mm_jpeg_decode_job_info_t;
+
+typedef struct {
+  mm_jpeg_cmd_type_t type;
+  union {
+    mm_jpeg_encode_job_info_t enc_info;
+    mm_jpeg_decode_job_info_t dec_info;
+  };
+} mm_jpeg_job_q_node_t;
+
+typedef struct {
+  uint8_t is_used;                /* flag: if is a valid client */
+  uint32_t client_handle;         /* client handle */
+  mm_jpeg_job_session_t session[MM_JPEG_MAX_SESSION];
+  pthread_mutex_t lock;           /* job lock */
+} mm_jpeg_client_t;
+
+typedef struct {
+  pthread_t pid;                  /* job cmd thread ID */
+  cam_semaphore_t job_sem;        /* semaphore for job cmd thread */
+  mm_jpeg_queue_t job_queue;      /* queue for job to do */
+} mm_jpeg_job_cmd_thread_t;
+
+#define MAX_JPEG_CLIENT_NUM 8
+typedef struct mm_jpeg_obj_t {
+  /* ClientMgr */
+  int num_clients;                                /* num of clients */
+  mm_jpeg_client_t clnt_mgr[MAX_JPEG_CLIENT_NUM]; /* client manager */
+
+  /* JobMkr */
+  pthread_mutex_t job_lock;                       /* job lock */
+  mm_jpeg_job_cmd_thread_t job_mgr;               /* job mgr thread including todo_q*/
+  mm_jpeg_queue_t ongoing_job_q;                  /* queue for ongoing jobs */
+  buffer_t ionBuffer[MM_JPEG_CONCURRENT_SESSIONS_COUNT];
+
+
+  /* Max pic dimension for work buf calc*/
+  uint32_t max_pic_w;
+  uint32_t max_pic_h;
+  uint32_t work_buf_cnt;
+
+  uint32_t num_sessions;
+
+} mm_jpeg_obj;
+
+/** mm_jpeg_pending_func_t:
+ *
+ * Intermediate function for transition change
+ **/
+typedef OMX_ERRORTYPE (*mm_jpeg_transition_func_t)(void *);
+
+extern int32_t mm_jpeg_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj);
+extern uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t* job,
+  uint32_t* jobId);
+extern int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId);
+extern int32_t mm_jpeg_close(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl);
+extern int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_encode_params_t *p_params,
+  uint32_t* p_session_id);
+extern int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj,
+  uint32_t session_id);
+
+extern int32_t mm_jpegdec_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpegdec_deinit(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj);
+extern int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpegdec_start_decode_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t* job,
+  uint32_t* jobId);
+
+extern int32_t mm_jpegdec_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_decode_params_t *p_params,
+  uint32_t* p_session_id);
+
+extern int32_t mm_jpegdec_destroy_session_by_id(mm_jpeg_obj *my_obj,
+  uint32_t session_id);
+
+extern int32_t mm_jpegdec_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId);
+
+int32_t mm_jpegdec_process_decoding_job(mm_jpeg_obj *my_obj,
+    mm_jpeg_job_q_node_t* job_node);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index);
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler);
+
+/* basic queue functions */
+extern int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue,
+    mm_jpeg_q_data_t data);
+extern int32_t mm_jpeg_queue_enq_head(mm_jpeg_queue_t* queue,
+    mm_jpeg_q_data_t data);
+extern mm_jpeg_q_data_t mm_jpeg_queue_deq(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue);
+extern uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue);
+extern mm_jpeg_q_data_t mm_jpeg_queue_peek(mm_jpeg_queue_t* queue);
+extern int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+  exif_tag_type_t type, uint32_t count, void *data);
+extern int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data);
+extern int process_meta_data(cam_metadata_info_t *p_meta,
+  QOMX_EXIF_INFO *exif_info, mm_jpeg_exif_params_t *p_cam3a_params);
+
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+  OMX_STATETYPE new_state,
+  mm_jpeg_transition_func_t p_exec);
+
+int map_jpeg_format(mm_jpeg_color_format color_fmt);
+
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session);
+/**
+ *
+ * special queue functions for job queue
+ **/
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+  mm_jpeg_queue_t* queue, uint32_t client_hdl);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+  mm_jpeg_queue_t* queue, uint32_t job_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+  mm_jpeg_queue_t* queue, uint32_t session_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+  mm_jpeg_queue_t* queue, uint32_t job_id);
+
+
+/** mm_jpeg_queue_func_t:
+ *
+ * Intermediate function for queue operation
+ **/
+typedef void (*mm_jpeg_queue_func_t)(void *);
+
+
+#endif /* MM_JPEG_H_ */
+
+
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
new file mode 100644
index 0000000..4e55296
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
@@ -0,0 +1,76 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_DBG_H__
+#define __MM_JPEG_DBG_H__
+
+#include "cam_types.h"
+
+/* Choose debug log level. This will not affect the error logs
+   0: turns off CDBG and CDBG_HIGH logs
+   1: turns-on CDBG_HIGH logs
+   2: turns-on CDBG_HIGH and CDBG logs */
+extern volatile uint32_t gMmCameraJpegLogLevel;
+
+#ifndef LOG_DEBUG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-jpeg-intf"
+    #include <utils/Log.h>
+  #else
+    #include <stdio.h>
+    #define ALOGE CDBG
+  #endif
+  #undef CDBG
+  #define CDBG(fmt, args...) do{}while(0)
+#else
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-jpeg-intf"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGD_IF(gMmCameraJpegLogLevel >= 2, fmt, ##args)
+  #else
+    #include <stdio.h>
+    #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+    #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+  #endif
+#endif
+
+#ifdef _ANDROID_
+  #define CDBG_HIGH(fmt, args...)   ALOGD_IF(gMmCameraJpegLogLevel >= 1, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...)  ALOGE(fmt, ##args)
+#else
+  #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_JPEG_DBG_H__ */
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h
new file mode 100644
index 0000000..39fec8d
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h
@@ -0,0 +1,126 @@
+/* Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INLINES_H_
+#define MM_JPEG_INLINES_H_
+
+#include "mm_jpeg.h"
+
+/** mm_jpeg_get_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+static inline mm_jpeg_job_session_t *mm_jpeg_get_session(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+  mm_jpeg_job_session_t *p_session = NULL;
+  int client_idx =  GET_CLIENT_IDX(job_id);
+  int session_idx= GET_SESSION_IDX(job_id);
+
+  CDBG("%s:%d] client_idx %d session_idx %d", __func__, __LINE__,
+    client_idx, session_idx);
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    CDBG_ERROR("%s:%d] invalid job id %x", __func__, __LINE__,
+      job_id);
+    return NULL;
+  }
+  pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+  return p_session;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+static inline int mm_jpeg_get_new_session_idx(mm_jpeg_obj *my_obj, int client_idx,
+  mm_jpeg_job_session_t **pp_session)
+{
+  int i = 0;
+  int index = -1;
+  for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+    pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+    if (!my_obj->clnt_mgr[client_idx].session[i].active) {
+      *pp_session = &my_obj->clnt_mgr[client_idx].session[i];
+      my_obj->clnt_mgr[client_idx].session[i].active = OMX_TRUE;
+      index = i;
+      pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+      break;
+    }
+    pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+  }
+  return index;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+static inline void mm_jpeg_remove_session_idx(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+  int client_idx =  GET_CLIENT_IDX(job_id);
+  int session_idx= GET_SESSION_IDX(job_id);
+  CDBG("%s:%d] client_idx %d session_idx %d", __func__, __LINE__,
+    client_idx, session_idx);
+  pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+  my_obj->clnt_mgr[client_idx].session[session_idx].active = OMX_FALSE;
+  pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+}
+
+
+
+#endif /* MM_JPEG_INLINES_H_ */
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h
new file mode 100644
index 0000000..0a1b0ae
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h
@@ -0,0 +1,95 @@
+/* Copyright (c) 2013-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_IONBUF_H__
+#define __MM_JPEG_IONBUF_H__
+
+
+#include <stdio.h>
+#include <linux/msm_ion.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <errno.h>
+#include <linux/android_pmem.h>
+#include <fcntl.h>
+#include "mm_jpeg_dbg.h"
+
+typedef struct  {
+  struct ion_fd_data ion_info_fd;
+  struct ion_allocation_data alloc;
+  int p_pmem_fd;
+  size_t size;
+  int ion_fd;
+  uint8_t *addr;
+} buffer_t;
+
+/** buffer_allocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      allocates ION buffer
+ *
+ **/
+void* buffer_allocate(buffer_t *p_buffer, int cached);
+
+/** buffer_deallocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_t *p_buffer);
+
+/** buffer_invalidate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      Invalidates the cached buffer
+ *
+ **/
+int buffer_invalidate(buffer_t *p_buffer);
+
+#endif
+
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
new file mode 100644
index 0000000..9c4664d
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
@@ -0,0 +1,2757 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_inlines.h"
+
+#define ENCODING_MODE_PARALLEL 1
+
+#define META_KEYFILE "/data/metadata.key"
+
+#define MM_JPG_USE_TURBO_CLOCK (0)
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_EVENTTYPE eEvent,
+    OMX_U32 nData1,
+    OMX_U32 nData2,
+    OMX_PTR pEventData);
+
+static int32_t mm_jpegenc_destroy_job(mm_jpeg_job_session_t *p_session);
+static void mm_jpegenc_job_done(mm_jpeg_job_session_t *p_session);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_dst_ptr(
+  mm_jpeg_queue_t* queue, void * dst_ptr);
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session);
+
+/** mm_jpeg_session_send_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_send_buffers(void *data)
+{
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_BUFFER_INFO lbuffer_info;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    lbuffer_info.fd = (OMX_U32)p_params->src_main_buf[i].fd;
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_in_omx_buf[i]), 0,
+      &lbuffer_info, p_params->src_main_buf[i].buf_size,
+      p_params->src_main_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_tmb_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    lbuffer_info.fd = (OMX_U32)p_params->src_thumb_buf[i].fd;
+    ret = OMX_UseBuffer(p_session->omx_handle,
+        &(p_session->p_in_omx_thumb_buf[i]), 2,
+        &lbuffer_info, p_params->src_thumb_buf[i].buf_size,
+        p_params->src_thumb_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    CDBG("%s:%d] Dest buffer %d", __func__, __LINE__, i);
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+      1, NULL, p_params->dest_buf[i].buf_size,
+      p_params->dest_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      return ret;
+    }
+  }
+  CDBG("%s:%d]", __func__, __LINE__);
+  return ret;
+}
+
+
+/** mm_jpeg_session_free_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_free_buffers(void *data)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 0, p_session->p_in_omx_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_tmb_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 2, p_session->p_in_omx_thumb_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    CDBG("%s:%d] Dest buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      return ret;
+    }
+  }
+  CDBG("%s:%d]", __func__, __LINE__);
+  return ret;
+}
+
+
+
+
+/** mm_jpeg_session_change_state:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *    @new_state: new state to be transitioned to
+ *    @p_exec: transition function
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       This method is used for state transition
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+  OMX_STATETYPE new_state,
+  mm_jpeg_transition_func_t p_exec)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_STATETYPE current_state;
+  CDBG("%s:%d] new_state %d p_exec %p", __func__, __LINE__,
+    new_state, p_exec);
+
+
+  pthread_mutex_lock(&p_session->lock);
+
+  ret = OMX_GetState(p_session->omx_handle, &current_state);
+
+  if (ret) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+
+  if (current_state == new_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  p_session->state_change_pending = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+    new_state, NULL);
+  pthread_mutex_lock(&p_session->lock);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorIncorrectStateTransition;
+  }
+  CDBG("%s:%d] ", __func__, __LINE__);
+  if ((OMX_ErrorNone != p_session->error_flag) &&
+      (OMX_ErrorOverflow != p_session->error_flag)) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, p_session->error_flag);
+    pthread_mutex_unlock(&p_session->lock);
+    return p_session->error_flag;
+  }
+  if (p_exec) {
+    ret = p_exec(p_session);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      pthread_mutex_unlock(&p_session->lock);
+      return ret;
+    }
+  }
+  CDBG("%s:%d] ", __func__, __LINE__);
+  if (p_session->state_change_pending) {
+    CDBG("%s:%d] before wait", __func__, __LINE__);
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+    CDBG("%s:%d] after wait", __func__, __LINE__);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_session_create:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error types
+ *
+ *  Description:
+ *       Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_create(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+  pthread_mutex_init(&p_session->lock, NULL);
+  pthread_cond_init(&p_session->cond, NULL);
+  cirq_reset(&p_session->cb_q);
+  p_session->state_change_pending = OMX_FALSE;
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->error_flag = OMX_ErrorNone;
+  p_session->ebd_count = 0;
+  p_session->fbd_count = 0;
+  p_session->encode_pid = -1;
+  p_session->config = OMX_FALSE;
+  p_session->exif_count_local = 0;
+  p_session->auto_out_buf = OMX_FALSE;
+
+  p_session->omx_callbacks.EmptyBufferDone = mm_jpeg_ebd;
+  p_session->omx_callbacks.FillBufferDone = mm_jpeg_fbd;
+  p_session->omx_callbacks.EventHandler = mm_jpeg_event_handler;
+
+
+  rc = OMX_GetHandle(&p_session->omx_handle,
+      "OMX.qcom.image.jpeg.encoder",
+      (void *)p_session,
+      &p_session->omx_callbacks);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s:%d] OMX_GetHandle failed (%d)", __func__, __LINE__, rc);
+    return rc;
+  }
+
+  my_obj->num_sessions++;
+
+  return rc;
+}
+
+
+
+/** mm_jpeg_session_destroy:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Destroy a jpeg encode session
+ *
+ **/
+void mm_jpeg_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_STATETYPE state;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+  if (NULL == p_session->omx_handle) {
+    CDBG_ERROR("%s:%d] invalid handle", __func__, __LINE__);
+    return;
+  }
+
+  rc = OMX_GetState(p_session->omx_handle, &state);
+
+  //Check state before state transition
+  if ((state == OMX_StateExecuting) || (state == OMX_StatePause)) {
+    rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+    if (rc) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    }
+  }
+
+  rc = OMX_GetState(p_session->omx_handle, &state);
+
+  if (state == OMX_StateIdle) {
+    rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+      mm_jpeg_session_free_buffers);
+    if (rc) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    }
+  }
+
+  rc = OMX_FreeHandle(p_session->omx_handle);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] OMX_FreeHandle failed (%d)", __func__, __LINE__, rc);
+  }
+  p_session->omx_handle = NULL;
+
+  pthread_mutex_destroy(&p_session->lock);
+  pthread_cond_destroy(&p_session->cond);
+
+  if (NULL != p_session->meta_enc_key) {
+    free(p_session->meta_enc_key);
+    p_session->meta_enc_key = NULL;
+  }
+
+  my_obj->num_sessions--;
+
+  // Destroy next session
+  if (p_session->next_session) {
+    mm_jpeg_session_destroy(p_session->next_session);
+  }
+
+  CDBG("%s:%d] X", __func__, __LINE__);
+}
+
+
+
+/** mm_jpeg_session_config_main_buffer_offset:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the buffer offsets
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_buffer_offset(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE buffer_index;
+  QOMX_YUV_FRAME_INFO frame_info;
+  size_t totalSize = 0;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[0];
+
+  memset(&frame_info, 0x0, sizeof(QOMX_YUV_FRAME_INFO));
+
+  frame_info.cbcrStartOffset[0] = p_src_buf->offset.mp[0].len;
+  frame_info.cbcrStartOffset[1] = p_src_buf->offset.mp[1].len;
+  frame_info.yOffset = p_src_buf->offset.mp[0].offset;
+  frame_info.cbcrOffset[0] = p_src_buf->offset.mp[1].offset;
+  frame_info.cbcrOffset[1] = p_src_buf->offset.mp[2].offset;
+  totalSize = p_src_buf->buf_size;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME, &buffer_index);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  CDBG_HIGH("%s:%d] yOffset = %d, cbcrOffset = (%d %d), totalSize = %zu,"
+    "cbcrStartOffset = (%d %d)", __func__, __LINE__,
+    (int)frame_info.yOffset,
+    (int)frame_info.cbcrOffset[0],
+    (int)frame_info.cbcrOffset[1],
+    totalSize,
+    (int)frame_info.cbcrStartOffset[0],
+    (int)frame_info.cbcrStartOffset[1]);
+
+  rc = OMX_SetParameter(p_session->omx_handle, buffer_index, &frame_info);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_encoding_mode:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the serial or parallel encoding
+ *       mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_encoding_mode(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_ENCODING_MODE encoding_mode;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_ENCODING_MODE_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  if (ENCODING_MODE_PARALLEL) {
+    encoding_mode = OMX_Parallel_Encoding;
+  } else {
+    encoding_mode = OMX_Serial_Encoding;
+  }
+  CDBG_HIGH("%s:%d] encoding mode = %d ", __func__, __LINE__,
+    (int)encoding_mode);
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &encoding_mode);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_speed_mode:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *      Configure normal or high speed jpeg
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_speed_mode(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_JPEG_SPEED jpeg_speed;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_JPEG_SPEED_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  if (MM_JPG_USE_TURBO_CLOCK) {
+    jpeg_speed.speedMode = QOMX_JPEG_SPEED_MODE_HIGH;
+  } else {
+    jpeg_speed.speedMode = QOMX_JPEG_SPEED_MODE_NORMAL;
+  }
+
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &jpeg_speed);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return rc;
+}
+
+
+/** mm_jpeg_mem_ops:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the serial or parallel encoding
+ *       mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_mem_ops(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_MEM_OPS mem_ops;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  mem_ops.get_memory = p_params->get_memory;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_MEM_OPS_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &mem_ops);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_metadata:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Pass meta data
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_metadata(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_INDEXTYPE indexType;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  QOMX_METADATA lMeta;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+      QOMX_IMAGE_EXT_METADATA_NAME, &indexType);
+
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  lMeta.metadata = (OMX_U8 *)p_jobparams->p_metadata;
+  lMeta.metaPayloadSize = sizeof(*p_jobparams->p_metadata);
+  lMeta.mobicat_mask = p_jobparams->mobicat_mask;
+
+  rc = OMX_SetConfig(p_session->omx_handle, indexType, &lMeta);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return OMX_ErrorNone;
+}
+
+/** mm_jpeg_meta_enc_key:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Pass metadata encrypt key
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_meta_enc_key(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_INDEXTYPE indexType;
+  QOMX_META_ENC_KEY lKey;
+
+  lKey.metaKey = p_session->meta_enc_key;
+  lKey.keyLen = p_session->meta_enc_keylen;
+
+  if ((!lKey.metaKey) || (!lKey.keyLen)){
+    CDBG_ERROR("%s:%d] Key is invalid", __func__, __LINE__);
+    return OMX_ErrorNone;
+  }
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+      QOMX_IMAGE_EXT_META_ENC_KEY_NAME, &indexType);
+
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  rc = OMX_SetConfig(p_session->omx_handle, indexType, &lKey);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return OMX_ErrorNone;
+}
+
+/** map_jpeg_format:
+ *
+ *  Arguments:
+ *    @color_fmt: color format
+ *
+ *  Return:
+ *       OMX color format
+ *
+ *  Description:
+ *       Map mmjpeg color format to OMX color format
+ *
+ **/
+int map_jpeg_format(mm_jpeg_color_format color_fmt)
+{
+  switch (color_fmt) {
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+    return (int)OMX_COLOR_FormatYUV420SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+    return (int)OMX_COLOR_FormatYUV422SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+     return (int)OMX_COLOR_FormatMonochrome;
+  default:
+    CDBG_ERROR("%s:%d] invalid format %d", __func__, __LINE__, color_fmt);
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+  }
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  OMX_CONFIG_ROTATIONTYPE rotate;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[0];
+
+  p_session->inputPort.nPortIndex = 0;
+  p_session->outputPort.nPortIndex = 1;
+  p_session->inputTmbPort.nPortIndex = 2;
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputTmbPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  p_session->inputPort.format.image.nFrameWidth =
+    (OMX_U32)p_params->main_dim.src_dim.width;
+  p_session->inputPort.format.image.nFrameHeight =
+    (OMX_U32)p_params->main_dim.src_dim.height;
+  p_session->inputPort.format.image.nStride =
+    p_src_buf->offset.mp[0].stride;
+  p_session->inputPort.format.image.nSliceHeight =
+    (OMX_U32)p_src_buf->offset.mp[0].scanline;
+  p_session->inputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+  p_session->inputPort.nBufferSize =
+    p_params->src_main_buf[0/*p_jobparams->src_index*/].buf_size;
+  p_session->inputPort.nBufferCountActual = (OMX_U32)p_params->num_src_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  if (p_session->params.encode_thumbnail) {
+    mm_jpeg_buf_t *p_tmb_buf =
+      &p_params->src_thumb_buf[0];
+    p_session->inputTmbPort.format.image.nFrameWidth =
+      (OMX_U32)p_params->thumb_dim.src_dim.width;
+    p_session->inputTmbPort.format.image.nFrameHeight =
+      (OMX_U32)p_params->thumb_dim.src_dim.height;
+    p_session->inputTmbPort.format.image.nStride =
+      p_tmb_buf->offset.mp[0].stride;
+    p_session->inputTmbPort.format.image.nSliceHeight =
+      (OMX_U32)p_tmb_buf->offset.mp[0].scanline;
+    p_session->inputTmbPort.format.image.eColorFormat =
+      map_jpeg_format(p_params->thumb_color_format);
+    p_session->inputTmbPort.nBufferSize =
+      p_params->src_thumb_buf[0].buf_size;
+    p_session->inputTmbPort.nBufferCountActual = (OMX_U32)p_params->num_tmb_bufs;
+    ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+      &p_session->inputTmbPort);
+
+    if (ret) {
+      CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+      return ret;
+    }
+
+    // Enable thumbnail port
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+        p_session->inputTmbPort.nPortIndex, NULL);
+
+    if (ret) {
+      CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+      return ret;
+    }
+  } else {
+    // Disable thumbnail port
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+        p_session->inputTmbPort.nPortIndex, NULL);
+
+    if (ret) {
+      CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+      return ret;
+    }
+  }
+
+  p_session->outputPort.nBufferSize =
+    p_params->dest_buf[0].buf_size;
+  p_session->outputPort.nBufferCountActual = (OMX_U32)p_params->num_dst_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  /* set rotation */
+  memset(&rotate, 0, sizeof(rotate));
+  rotate.nPortIndex = 1;
+  rotate.nRotation = (OMX_S32)p_params->rotation;
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+      &rotate);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+    return ret;
+  }
+  CDBG("%s:%d] Set rotation to %d at port_idx = %d", __func__, __LINE__,
+      (int)p_params->rotation, (int)rotate.nPortIndex);
+
+  return ret;
+}
+
+/** mm_jpeg_omx_config_thumbnail:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_thumbnail(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_THUMBNAIL_INFO thumbnail_info;
+  OMX_INDEXTYPE thumb_indextype;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_dim_t *p_thumb_dim = &p_jobparams->thumb_dim;
+  QOMX_YUV_FRAME_INFO *p_frame_info = &thumbnail_info.tmbOffset;
+  mm_jpeg_buf_t *p_tmb_buf = &p_params->src_thumb_buf[p_jobparams->thumb_index];
+
+  CDBG_HIGH("%s:%d] encode_thumbnail %u", __func__, __LINE__,
+    p_params->encode_thumbnail);
+  if (OMX_FALSE == p_params->encode_thumbnail) {
+    return ret;
+  }
+
+  if ((p_thumb_dim->dst_dim.width == 0) || (p_thumb_dim->dst_dim.height == 0)) {
+    CDBG_ERROR("%s:%d] Error invalid output dim for thumbnail",
+      __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+
+  if ((p_thumb_dim->src_dim.width == 0) || (p_thumb_dim->src_dim.height == 0)) {
+    CDBG_ERROR("%s:%d] Error invalid input dim for thumbnail",
+      __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+
+  if ((p_thumb_dim->crop.width == 0) || (p_thumb_dim->crop.height == 0)) {
+    p_thumb_dim->crop.width = p_thumb_dim->src_dim.width;
+    p_thumb_dim->crop.height = p_thumb_dim->src_dim.height;
+  }
+
+  /* check crop boundary */
+  if ((p_thumb_dim->crop.width + p_thumb_dim->crop.left > p_thumb_dim->src_dim.width) ||
+    (p_thumb_dim->crop.height + p_thumb_dim->crop.top > p_thumb_dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] invalid crop boundary (%d, %d) offset (%d, %d) out of (%d, %d)",
+      __func__, __LINE__,
+      p_thumb_dim->crop.width,
+      p_thumb_dim->crop.height,
+      p_thumb_dim->crop.left,
+      p_thumb_dim->crop.top,
+      p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height);
+    return OMX_ErrorBadParameter;
+  }
+
+  memset(&thumbnail_info, 0x0, sizeof(QOMX_THUMBNAIL_INFO));
+  ret = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_THUMBNAIL_NAME,
+    &thumb_indextype);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+    return ret;
+  }
+
+  /* fill thumbnail info */
+  thumbnail_info.scaling_enabled = 1;
+  thumbnail_info.input_width = (OMX_U32)p_thumb_dim->src_dim.width;
+  thumbnail_info.input_height = (OMX_U32)p_thumb_dim->src_dim.height;
+  thumbnail_info.crop_info.nWidth = (OMX_U32)p_thumb_dim->crop.width;
+  thumbnail_info.crop_info.nHeight = (OMX_U32)p_thumb_dim->crop.height;
+  thumbnail_info.crop_info.nLeft = p_thumb_dim->crop.left;
+  thumbnail_info.crop_info.nTop = p_thumb_dim->crop.top;
+  thumbnail_info.rotation = (OMX_U32)p_params->thumb_rotation;
+
+  if ((p_thumb_dim->dst_dim.width > p_thumb_dim->src_dim.width)
+    || (p_thumb_dim->dst_dim.height > p_thumb_dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] Incorrect thumbnail dim %dx%d resetting to %dx%d",
+      __func__, __LINE__,
+      p_thumb_dim->dst_dim.width,
+      p_thumb_dim->dst_dim.height,
+      p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height);
+    thumbnail_info.output_width = (OMX_U32)p_thumb_dim->src_dim.width;
+    thumbnail_info.output_height = (OMX_U32)p_thumb_dim->src_dim.height;
+  } else {
+    thumbnail_info.output_width = (OMX_U32)p_thumb_dim->dst_dim.width;
+    thumbnail_info.output_height = (OMX_U32)p_thumb_dim->dst_dim.height;
+  }
+
+  memset(p_frame_info, 0x0, sizeof(*p_frame_info));
+
+  p_frame_info->cbcrStartOffset[0] = p_tmb_buf->offset.mp[0].len;
+  p_frame_info->cbcrStartOffset[1] = p_tmb_buf->offset.mp[1].len;
+  p_frame_info->yOffset = p_tmb_buf->offset.mp[0].offset;
+  p_frame_info->cbcrOffset[0] = p_tmb_buf->offset.mp[1].offset;
+  p_frame_info->cbcrOffset[1] = p_tmb_buf->offset.mp[2].offset;
+
+  ret = OMX_SetConfig(p_session->omx_handle, thumb_indextype,
+    &thumbnail_info);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+
+  return ret;
+}
+
+/** mm_jpeg_session_config_main_crop:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image crop
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_crop(mm_jpeg_job_session_t *p_session)
+{
+  OMX_CONFIG_RECTTYPE rect_type_in, rect_type_out;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_dim_t *dim = &p_jobparams->main_dim;
+
+  if ((dim->crop.width == 0) || (dim->crop.height == 0)) {
+    dim->crop.width = dim->src_dim.width;
+    dim->crop.height = dim->src_dim.height;
+  }
+  /* error check first */
+  if ((dim->crop.width + dim->crop.left > dim->src_dim.width) ||
+    (dim->crop.height + dim->crop.top > dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] invalid crop boundary (%d, %d) out of (%d, %d)",
+      __func__, __LINE__,
+      dim->crop.width + dim->crop.left,
+      dim->crop.height + dim->crop.top,
+      dim->src_dim.width,
+      dim->src_dim.height);
+    return OMX_ErrorBadParameter;
+  }
+
+  memset(&rect_type_in, 0, sizeof(rect_type_in));
+  memset(&rect_type_out, 0, sizeof(rect_type_out));
+  rect_type_in.nPortIndex = 0;
+  rect_type_out.nPortIndex = 0;
+
+  if ((dim->src_dim.width != dim->crop.width) ||
+    (dim->src_dim.height != dim->crop.height) ||
+    (dim->src_dim.width != dim->dst_dim.width) ||
+    (dim->src_dim.height != dim->dst_dim.height)) {
+    /* Scaler information */
+    rect_type_in.nWidth = CEILING2(dim->crop.width);
+    rect_type_in.nHeight = CEILING2(dim->crop.height);
+    rect_type_in.nLeft = dim->crop.left;
+    rect_type_in.nTop = dim->crop.top;
+
+    if (dim->dst_dim.width && dim->dst_dim.height) {
+      rect_type_out.nWidth = (OMX_U32)dim->dst_dim.width;
+      rect_type_out.nHeight = (OMX_U32)dim->dst_dim.height;
+    }
+  }
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonInputCrop,
+    &rect_type_in);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+
+  CDBG("%s:%d] OMX_IndexConfigCommonInputCrop w = %d, h = %d, l = %d, t = %d,"
+    " port_idx = %d", __func__, __LINE__,
+    (int)rect_type_in.nWidth, (int)rect_type_in.nHeight,
+    (int)rect_type_in.nLeft, (int)rect_type_in.nTop,
+    (int)rect_type_in.nPortIndex);
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonOutputCrop,
+    &rect_type_out);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+  CDBG("%s:%d] OMX_IndexConfigCommonOutputCrop w = %d, h = %d,"
+    " port_idx = %d", __func__, __LINE__,
+    (int)rect_type_out.nWidth, (int)rect_type_out.nHeight,
+    (int)rect_type_out.nPortIndex);
+
+  return ret;
+}
+
+/** mm_jpeg_session_config_main:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  /* config port */
+  CDBG("%s:%d] config port", __func__, __LINE__);
+  rc = mm_jpeg_session_config_ports(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config port failed", __func__);
+    return rc;
+  }
+
+  /* config buffer offset */
+  CDBG("%s:%d] config main buf offset", __func__, __LINE__);
+  rc = mm_jpeg_session_config_main_buffer_offset(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config buffer offset failed", __func__);
+    return rc;
+  }
+
+  /* set the encoding mode */
+  rc = mm_jpeg_encoding_mode(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config encoding mode failed", __func__);
+    return rc;
+  }
+
+  /* set the metadata encrypt key */
+  rc = mm_jpeg_meta_enc_key(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config session failed", __func__);
+    return rc;
+  }
+
+  /* set the mem ops */
+  rc = mm_jpeg_mem_ops(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config mem ops failed", __func__);
+    return rc;
+  }
+  /* set the jpeg speed mode */
+  rc = mm_jpeg_speed_mode(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config speed mode failed", __func__);
+    return rc;
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_session_config_common:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure common parameters
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_common(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_INDEXTYPE exif_idx;
+  OMX_CONFIG_ROTATIONTYPE rotate;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  QOMX_EXIF_INFO exif_info;
+
+  /* set rotation */
+  memset(&rotate, 0, sizeof(rotate));
+  rotate.nPortIndex = 1;
+  rotate.nRotation = (OMX_S32)p_jobparams->rotation;
+  rc = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+    &rotate);
+  if (OMX_ErrorNone != rc) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+      return rc;
+  }
+  CDBG("%s:%d] Set rotation to %d at port_idx = %d", __func__, __LINE__,
+    (int)p_jobparams->rotation, (int)rotate.nPortIndex);
+
+  /* Set Exif data*/
+  memset(&p_session->exif_info_local[0], 0, sizeof(p_session->exif_info_local));
+  rc = OMX_GetExtensionIndex(p_session->omx_handle, QOMX_IMAGE_EXT_EXIF_NAME,
+    &exif_idx);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+    return rc;
+  }
+
+  CDBG("%s:%d] Num of exif entries passed from HAL: %d", __func__, __LINE__,
+      (int)p_jobparams->exif_info.numOfEntries);
+  if (p_jobparams->exif_info.numOfEntries > 0) {
+    rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+        &p_jobparams->exif_info);
+    if (OMX_ErrorNone != rc) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+      return rc;
+    }
+  }
+  /*parse aditional exif data from the metadata*/
+  if (NULL != p_jobparams->p_metadata) {
+    exif_info.numOfEntries = 0;
+    exif_info.exif_data = &p_session->exif_info_local[0];
+    process_meta_data(p_jobparams->p_metadata, &exif_info, &p_jobparams->cam_exif_params);
+    /* After Parse metadata */
+    p_session->exif_count_local = (int)exif_info.numOfEntries;
+
+    if (exif_info.numOfEntries > 0) {
+      /* set exif tags */
+      CDBG("%s:%d] exif tags from metadata count %d", __func__, __LINE__,
+        (int)exif_info.numOfEntries);
+
+      rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+        &exif_info);
+      if (OMX_ErrorNone != rc) {
+        CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+        return rc;
+      }
+    }
+  }
+
+  return rc;
+}
+
+
+
+
+/** mm_jpeg_session_abort:
+ *
+ *  Arguments:
+ *    @p_session: jpeg session
+ *
+ *  Return:
+ *       OMX_BOOL
+ *
+ *  Description:
+ *       Abort ongoing job
+ *
+ **/
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  int rc = 0;
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+  pthread_mutex_lock(&p_session->lock);
+  if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    CDBG("%s:%d] **** ALREADY ABORTED", __func__, __LINE__);
+    return 0;
+  }
+  p_session->abort_state = MM_JPEG_ABORT_INIT;
+  if (OMX_TRUE == p_session->encoding) {
+    p_session->state_change_pending = OMX_TRUE;
+
+    CDBG("%s:%d] **** ABORTING", __func__, __LINE__);
+    pthread_mutex_unlock(&p_session->lock);
+
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+    OMX_StateIdle, NULL);
+
+    if (ret != OMX_ErrorNone) {
+      CDBG("%s:%d] OMX_SendCommand returned error %d", __func__, __LINE__, ret);
+      return 1;
+    }
+    rc = mm_jpegenc_destroy_job(p_session);
+    if (rc != 0) {
+      CDBG("%s:%d] Destroy job returned error %d", __func__, __LINE__, rc);
+    }
+
+    pthread_mutex_lock(&p_session->lock);
+    if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+      CDBG("%s:%d] before wait", __func__, __LINE__);
+      pthread_cond_wait(&p_session->cond, &p_session->lock);
+    }
+    CDBG("%s:%d] after wait", __func__, __LINE__);
+  }
+  p_session->abort_state = MM_JPEG_ABORT_DONE;
+  pthread_mutex_unlock(&p_session->lock);
+
+
+  // Abort next session
+  if (p_session->next_session) {
+    mm_jpeg_session_abort(p_session->next_session);
+  }
+
+  CDBG("%s:%d] X", __func__, __LINE__);
+  return 0;
+}
+
+
+/** mm_jpeg_configure_params
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the job specific params
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_configure_job_params(
+  mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_IMAGE_PARAM_QFACTORTYPE q_factor;
+  QOMX_WORK_BUFFER work_buffer;
+  OMX_INDEXTYPE work_buffer_index;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  /* common config */
+  ret = mm_jpeg_session_config_common(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config common failed", __func__, __LINE__);
+
+  }
+
+  /* config Main Image crop */
+  CDBG("%s:%d] config main crop", __func__, __LINE__);
+  ret = mm_jpeg_session_config_main_crop(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s: config crop failed", __func__);
+    return ret;
+  }
+
+  /* set quality */
+  memset(&q_factor, 0, sizeof(q_factor));
+  q_factor.nPortIndex = 0;
+  q_factor.nQFactor = p_params->quality;
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexParamQFactor, &q_factor);
+  CDBG("%s:%d] config QFactor: %d", __func__, __LINE__, (int)q_factor.nQFactor);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error setting Q factor %d", __func__, __LINE__, ret);
+    return ret;
+  }
+
+  /* config thumbnail */
+  ret = mm_jpeg_session_config_thumbnail(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config thumbnail img failed", __func__, __LINE__);
+    return ret;
+  }
+
+  //Pass the ION buffer to be used as o/p for HW
+  memset(&work_buffer, 0x0, sizeof(QOMX_WORK_BUFFER));
+  ret = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_WORK_BUFFER_NAME,
+    &work_buffer_index);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error getting work buffer index %d",
+      __func__, __LINE__, ret);
+    return ret;
+  }
+  work_buffer.fd = p_session->work_buffer.p_pmem_fd;
+  work_buffer.vaddr = p_session->work_buffer.addr;
+  work_buffer.length = (uint32_t)p_session->work_buffer.size;
+  CDBG_ERROR("%s:%d] Work buffer %d %p WorkBufSize: %d", __func__, __LINE__,
+    work_buffer.fd, work_buffer.vaddr, work_buffer.length);
+
+  buffer_invalidate(&p_session->work_buffer);
+
+  ret = OMX_SetConfig(p_session->omx_handle, work_buffer_index,
+    &work_buffer);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+
+  /* set metadata */
+  ret = mm_jpeg_metadata(p_session);
+  CDBG_ERROR("%s: config makernote data failed", __func__);
+  if (OMX_ErrorNone != ret) {
+    return ret;
+  }
+
+  return ret;
+}
+
+/** mm_jpeg_session_configure:
+ *
+ *  Arguments:
+ *    @data: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+  CDBG("%s:%d] E ", __func__, __LINE__);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  /* config main img */
+  ret = mm_jpeg_session_config_main(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config main img failed", __func__, __LINE__);
+    goto error;
+  }
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+    mm_jpeg_session_send_buffers);
+  if (ret) {
+    CDBG_ERROR("%s:%d] change state to idle failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+    NULL);
+  if (ret) {
+    CDBG_ERROR("%s:%d] change state to executing failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+error:
+  CDBG("%s:%d] X ret %d", __func__, __LINE__, ret);
+  return ret;
+}
+
+
+
+
+
+
+/** mm_jpeg_session_encode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_encode(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->encoding = OMX_FALSE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (OMX_FALSE == p_session->config) {
+    ret = mm_jpeg_session_configure(p_session);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+    }
+    p_session->config = OMX_TRUE;
+  }
+
+  ret = mm_jpeg_configure_job_params(p_session);
+  if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+  }
+  pthread_mutex_lock(&p_session->lock);
+  p_session->encoding = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+#ifdef MM_JPEG_DUMP_INPUT
+  char filename[256];
+  snprintf(filename, 255, "/data/misc/camera/jpeg/mm_jpeg_int%d.yuv", p_session->ebd_count);
+  DUMP_TO_FILE(filename,
+    p_session->p_in_omx_buf[p_jobparams->src_index]->pBuffer,
+    (size_t)p_session->p_in_omx_buf[p_jobparams->src_index]->nAllocLen);
+#endif
+
+  ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+    p_session->p_in_omx_buf[p_jobparams->src_index]);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    goto error;
+  }
+
+  if (p_session->params.encode_thumbnail) {
+#ifdef MM_JPEG_DUMP_INPUT
+  char thumb_filename[256];
+  snprintf(thumb_filename, 255, "/data/misc/camera/jpeg/mm_jpeg_int_t%d.yuv",
+    p_session->ebd_count);
+  DUMP_TO_FILE(filename,
+    p_session->p_in_omx_thumb_buf[p_jobparams->thumb_index]->pBuffer,
+    (size_t)p_session->p_in_omx_thumb_buf[p_jobparams->thumb_index]->nAllocLen);
+#endif
+    ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+        p_session->p_in_omx_thumb_buf[p_jobparams->thumb_index]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+    }
+  }
+
+  ret = OMX_FillThisBuffer(p_session->omx_handle,
+    p_session->p_out_omx_buf[p_jobparams->dst_index]);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    goto error;
+  }
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+  CDBG("%s:%d] X ", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_process_encoding_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg client
+ *    @job_node: job node
+ *
+ *  Return:
+ *       0 for success -1 otherwise
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_process_encoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = NULL;
+  uint32_t buf_idx;
+
+  /* check if valid session */
+  p_session = mm_jpeg_get_session(my_obj, job_node->enc_info.job_id);
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid job id %x", __func__, __LINE__,
+        job_node->enc_info.job_id);
+    return -1;
+  }
+
+  CDBG_HIGH("%s:%d] before dequeue session %d",
+                __func__, __LINE__, ret);
+
+  /* dequeue available omx handle */
+  qdata = mm_jpeg_queue_deq(p_session->session_handle_q);
+  p_session = qdata.p;
+
+  if (NULL == p_session) {
+    CDBG_HIGH("%s:%d] No available sessions %d",
+          __func__, __LINE__, ret);
+    /* No available handles */
+    qdata.p = job_node;
+    mm_jpeg_queue_enq_head(&my_obj->job_mgr.job_queue, qdata);
+
+    CDBG_HIGH("%s:%d]end enqueue %d",
+              __func__, __LINE__, ret);
+    return rc;
+
+  }
+
+  p_session->auto_out_buf = OMX_FALSE;
+  if (job_node->enc_info.encode_job.dst_index < 0) {
+    /* dequeue available output buffer idx */
+    qdata = mm_jpeg_queue_deq(p_session->out_buf_q);
+    buf_idx = qdata.u32;
+
+    if (0U == buf_idx) {
+      CDBG_ERROR("%s:%d] No available output buffers %d",
+          __func__, __LINE__, ret);
+      return OMX_ErrorUndefined;
+    }
+
+    buf_idx--;
+
+    job_node->enc_info.encode_job.dst_index = (int32_t)buf_idx;
+    p_session->auto_out_buf = OMX_TRUE;
+  }
+
+  /* sent encode cmd to OMX, queue job into ongoing queue */
+  qdata.p = job_node;
+  rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, qdata);
+  if (rc) {
+    CDBG_ERROR("%s:%d] jpeg enqueue failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+  p_session->encode_job = job_node->enc_info.encode_job;
+  p_session->jobId = job_node->enc_info.job_id;
+  ret = mm_jpeg_session_encode(p_session);
+  if (ret) {
+    CDBG_ERROR("%s:%d] encode session failed", __func__, __LINE__);
+    goto error;
+  }
+
+  CDBG("%s:%d] Success X ", __func__, __LINE__);
+  return rc;
+
+error:
+
+  if ((OMX_ErrorNone != ret) &&
+    (NULL != p_session->params.jpeg_cb)) {
+    p_session->job_status = JPEG_JOB_STATUS_ERROR;
+    CDBG("%s:%d] send jpeg error callback %d", __func__, __LINE__,
+      p_session->job_status);
+    p_session->params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      NULL,
+      p_session->params.userdata);
+  }
+
+  /*remove the job*/
+  mm_jpegenc_job_done(p_session);
+  CDBG("%s:%d] Error X ", __func__, __LINE__);
+
+  return rc;
+}
+
+
+
+/** mm_jpeg_jobmgr_thread:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       job manager thread main function
+ *
+ **/
+static void *mm_jpeg_jobmgr_thread(void *data)
+{
+  mm_jpeg_q_data_t qdata;
+  int rc = 0;
+  int running = 1;
+  uint32_t num_ongoing_jobs = 0;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj*)data;
+  mm_jpeg_job_cmd_thread_t *cmd_thread = &my_obj->job_mgr;
+  mm_jpeg_job_q_node_t* node = NULL;
+  prctl(PR_SET_NAME, (unsigned long)"mm_jpeg_thread", 0, 0, 0);
+
+  do {
+    do {
+      rc = cam_sem_wait(&cmd_thread->job_sem);
+      if (rc != 0 && errno != EINVAL) {
+        CDBG_ERROR("%s: cam_sem_wait error (%s)",
+          __func__, strerror(errno));
+        return NULL;
+      }
+    } while (rc != 0);
+
+    /* check ongoing q size */
+    num_ongoing_jobs = mm_jpeg_queue_get_size(&my_obj->ongoing_job_q);
+    if (num_ongoing_jobs >= NUM_MAX_JPEG_CNCURRENT_JOBS) {
+      CDBG("%s:%d] ongoing job already reach max %d", __func__,
+        __LINE__, num_ongoing_jobs);
+      continue;
+    }
+
+    pthread_mutex_lock(&my_obj->job_lock);
+    /* can go ahead with new work */
+    qdata = mm_jpeg_queue_deq(&cmd_thread->job_queue);
+    node = (mm_jpeg_job_q_node_t*)qdata.p;
+    if (node != NULL) {
+      switch (node->type) {
+      case MM_JPEG_CMD_TYPE_JOB:
+        rc = mm_jpeg_process_encoding_job(my_obj, node);
+        break;
+      case MM_JPEG_CMD_TYPE_DECODE_JOB:
+        rc = mm_jpegdec_process_decoding_job(my_obj, node);
+        break;
+      case MM_JPEG_CMD_TYPE_EXIT:
+      default:
+        /* free node */
+        free(node);
+        /* set running flag to false */
+        running = 0;
+        break;
+      }
+    }
+    pthread_mutex_unlock(&my_obj->job_lock);
+
+  } while (running);
+  return NULL;
+}
+
+/** mm_jpeg_jobmgr_thread_launch:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       launches the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_cmd_thread_t *job_mgr = &my_obj->job_mgr;
+
+  cam_sem_init(&job_mgr->job_sem, 0);
+  mm_jpeg_queue_init(&job_mgr->job_queue);
+
+  /* launch the thread */
+  pthread_create(&job_mgr->pid,
+    NULL,
+    mm_jpeg_jobmgr_thread,
+    (void *)my_obj);
+  return rc;
+}
+
+/** mm_jpeg_jobmgr_thread_release:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Releases the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  mm_jpeg_job_cmd_thread_t * cmd_thread = &my_obj->job_mgr;
+  mm_jpeg_job_q_node_t* node =
+    (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+    return -1;
+  }
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->type = MM_JPEG_CMD_TYPE_EXIT;
+
+  qdata.p = node;
+  mm_jpeg_queue_enq(&cmd_thread->job_queue, qdata);
+  cam_sem_post(&cmd_thread->job_sem);
+
+  /* wait until cmd thread exits */
+  if (pthread_join(cmd_thread->pid, NULL) != 0) {
+    CDBG("%s: pthread dead already", __func__);
+  }
+  mm_jpeg_queue_deinit(&cmd_thread->job_queue);
+
+  cam_sem_destroy(&cmd_thread->job_sem);
+  memset(cmd_thread, 0, sizeof(mm_jpeg_job_cmd_thread_t));
+  return rc;
+}
+
+/** mm_jpeg_init:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpeg_init(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  uint32_t work_buf_size;
+  unsigned int i = 0;
+  unsigned int initial_workbufs_cnt = 1;
+
+  /* init locks */
+  pthread_mutex_init(&my_obj->job_lock, NULL);
+
+  /* init ongoing job queue */
+  rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    pthread_mutex_destroy(&my_obj->job_lock);
+    return -1;
+  }
+
+
+  /* init job semaphore and launch jobmgr thread */
+  CDBG("%s:%d] Launch jobmgr thread rc %d", __func__, __LINE__, rc);
+  rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+    return -1;
+  }
+
+  /* set work buf size from max picture size */
+  if (my_obj->max_pic_w <= 0 || my_obj->max_pic_h <= 0) {
+    CDBG_ERROR("%s:%d] Width and height are not valid "
+      "dimensions, cannot calc work buf size",__func__, __LINE__);
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+    return -1;
+  }
+  work_buf_size = CEILING64((uint32_t)my_obj->max_pic_w) *
+    CEILING64((uint32_t)my_obj->max_pic_h) * 3U / 2U;
+  for (i = 0; i < initial_workbufs_cnt; i++) {
+    my_obj->ionBuffer[i].size = CEILING32(work_buf_size);
+    CDBG_HIGH("Max picture size %d x %d, WorkBufSize = %zu",
+        my_obj->max_pic_w, my_obj->max_pic_h, my_obj->ionBuffer[i].size);
+
+    my_obj->ionBuffer[i].addr = (uint8_t *)buffer_allocate(&my_obj->ionBuffer[i], 1);
+    if (NULL == my_obj->ionBuffer[i].addr) {
+      while (i--) {
+        buffer_deallocate(&my_obj->ionBuffer[i]);
+      }
+      mm_jpeg_jobmgr_thread_release(my_obj);
+      mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+      pthread_mutex_destroy(&my_obj->job_lock);
+      CDBG_ERROR("%s:%d] Ion allocation failed",__func__, __LINE__);
+      return -1;
+    }
+  }
+
+  my_obj->work_buf_cnt = i;
+
+  /* load OMX */
+  if (OMX_ErrorNone != OMX_Init()) {
+    /* roll back in error case */
+    CDBG_ERROR("%s:%d] OMX_Init failed (%d)", __func__, __LINE__, rc);
+    for (i = 0; i < initial_workbufs_cnt; i++) {
+      buffer_deallocate(&my_obj->ionBuffer[i]);
+    }
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_deinit:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  uint32_t i = 0;
+
+  /* release jobmgr thread */
+  rc = mm_jpeg_jobmgr_thread_release(my_obj);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  /* unload OMX engine */
+  OMX_Deinit();
+
+  /* deinit ongoing job and cb queue */
+  rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  for (i = 0; i < my_obj->work_buf_cnt; i++) {
+    /*Release the ION buffer*/
+    rc = buffer_deallocate(&my_obj->ionBuffer[i]);
+    if (0 != rc) {
+      CDBG_ERROR("%s:%d] Error releasing ION buffer", __func__, __LINE__);
+    }
+  }
+
+  /* destroy locks */
+  pthread_mutex_destroy(&my_obj->job_lock);
+
+  return rc;
+}
+
+/** mm_jpeg_new_client:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Create new jpeg client
+ *
+ **/
+uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj)
+{
+  uint32_t client_hdl = 0;
+  uint8_t idx;
+  int i = 0;
+
+  if (my_obj->num_clients >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: num of clients reached limit", __func__);
+    return client_hdl;
+  }
+
+  for (idx = 0; idx < MAX_JPEG_CLIENT_NUM; idx++) {
+    if (0 == my_obj->clnt_mgr[idx].is_used) {
+      break;
+    }
+  }
+
+  if (idx < MAX_JPEG_CLIENT_NUM) {
+    /* client session avail */
+    /* generate client handler by index */
+    client_hdl = mm_jpeg_util_generate_handler(idx);
+
+    /* update client session */
+    my_obj->clnt_mgr[idx].is_used = 1;
+    my_obj->clnt_mgr[idx].client_handle = client_hdl;
+
+    pthread_mutex_init(&my_obj->clnt_mgr[idx].lock, NULL);
+    for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+      memset(&my_obj->clnt_mgr[idx].session[i], 0x0, sizeof(mm_jpeg_job_session_t));
+    }
+
+    /* increse client count */
+    my_obj->num_clients++;
+  }
+
+  return client_hdl;
+}
+
+/** mm_jpeg_start_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @job: pointer to encode job
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t *job,
+  uint32_t *job_id)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = -1;
+  uint8_t session_idx = 0;
+  uint8_t client_idx = 0;
+  mm_jpeg_job_q_node_t* node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_encode_job_t *p_jobparams  = &job->encode_job;
+
+  *job_id = 0;
+
+  /* check if valid session */
+  session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+  client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+  CDBG("%s:%d] session_idx %d client idx %d", __func__, __LINE__,
+    session_idx, client_idx);
+
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    CDBG_ERROR("%s:%d] invalid session id %x", __func__, __LINE__,
+      job->encode_job.session_id);
+    return rc;
+  }
+
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  if (OMX_FALSE == p_session->active) {
+    CDBG_ERROR("%s:%d] session not active %x", __func__, __LINE__,
+      job->encode_job.session_id);
+    return rc;
+  }
+
+  if ((p_jobparams->src_index >= (int32_t)p_session->params.num_src_bufs) ||
+    (p_jobparams->dst_index >= (int32_t)p_session->params.num_dst_bufs)) {
+    CDBG_ERROR("%s:%d] invalid buffer indices", __func__, __LINE__);
+    return rc;
+  }
+
+  /* enqueue new job into todo job queue */
+  node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+    return -1;
+  }
+
+  *job_id = job->encode_job.session_id |
+    (((uint32_t)p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->enc_info.encode_job = job->encode_job;
+  node->enc_info.job_id = *job_id;
+  node->enc_info.client_handle = p_session->client_hdl;
+  node->type = MM_JPEG_CMD_TYPE_JOB;
+
+
+
+  qdata.p = node;
+  rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, qdata);
+  if (0 == rc) {
+      cam_sem_post(&my_obj->job_mgr.job_sem);
+  }
+
+  CDBG_ERROR("%s:%d] X", __func__, __LINE__);
+
+  return rc;
+}
+
+
+
+/** mm_jpeg_abort_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Abort the encoding session
+ *
+ **/
+int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId)
+{
+  int32_t rc = -1;
+  mm_jpeg_job_q_node_t *node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+  if (NULL != node) {
+    free(node);
+    goto abort_done;
+  }
+
+  /* abort job if in ongoing queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+  if (NULL != node) {
+    /* find job that is OMX ongoing, ask OMX to abort the job */
+    p_session = mm_jpeg_get_session(my_obj, node->enc_info.job_id);
+    if (p_session) {
+      mm_jpeg_session_abort(p_session);
+    } else {
+      CDBG_ERROR("%s:%d] Invalid job id 0x%x", __func__, __LINE__,
+        node->enc_info.job_id);
+    }
+    free(node);
+    goto abort_done;
+  }
+
+abort_done:
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  return rc;
+}
+
+
+#ifdef MM_JPEG_READ_META_KEYFILE
+static int32_t mm_jpeg_read_meta_keyfile(mm_jpeg_job_session_t *p_session,
+    const char *filename)
+{
+  int rc = 0;
+  FILE *fp = NULL;
+  size_t file_size = 0;
+  fp = fopen(filename, "r");
+  if (!fp) {
+    CDBG_ERROR("%s:%d] Key not present", __func__, __LINE__);
+    return -1;
+  }
+  fseek(fp, 0, SEEK_END);
+  file_size = (size_t)ftell(fp);
+  fseek(fp, 0, SEEK_SET);
+
+  p_session->meta_enc_key = (uint8_t *) malloc((file_size + 1) * sizeof(uint8_t));
+
+  if (!p_session->meta_enc_key) {
+    CDBG_ERROR("%s:%d] error", __func__, __LINE__);
+    return -1;
+  }
+
+  fread(p_session->meta_enc_key, 1, file_size, fp);
+  fclose(fp);
+
+  p_session->meta_enc_keylen = file_size;
+
+  return rc;
+}
+#endif // MM_JPEG_READ_META_KEYFILE
+
+/** mm_jpeg_create_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @p_params: pointer to encode params
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding session
+ *
+ **/
+int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_encode_params_t *p_params,
+  uint32_t* p_session_id)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint8_t clnt_idx = 0;
+  int session_idx = -1;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_job_session_t * p_prev_session = NULL;
+  *p_session_id = 0;
+  uint32_t i = 0;
+  uint32_t num_omx_sessions;
+  uint32_t work_buf_size;
+  mm_jpeg_queue_t *p_session_handle_q, *p_out_buf_q;
+  uint32_t work_bufs_need;
+
+  /* validate the parameters */
+  if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+    || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+    CDBG_ERROR("%s:%d] invalid num buffers", __func__, __LINE__);
+    return rc;
+  }
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+    return rc;
+  }
+
+  num_omx_sessions = 1;
+  if (p_params->burst_mode) {
+    num_omx_sessions = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+  }
+  work_bufs_need = my_obj->num_sessions + num_omx_sessions;
+  if (work_bufs_need > MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+    work_bufs_need = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+  }
+  CDBG_ERROR("%s:%d] >>>> Work bufs need %d", __func__, __LINE__, work_bufs_need);
+  work_buf_size = CEILING64(my_obj->max_pic_w) *
+      CEILING64(my_obj->max_pic_h) * 1.5;
+  for (i = my_obj->work_buf_cnt; i < work_bufs_need; i++) {
+     my_obj->ionBuffer[i].size = CEILING32(work_buf_size);
+     CDBG_HIGH("Max picture size %d x %d, WorkBufSize = %zu",
+         my_obj->max_pic_w, my_obj->max_pic_h, my_obj->ionBuffer[i].size);
+
+     my_obj->ionBuffer[i].addr = (uint8_t *)buffer_allocate(&my_obj->ionBuffer[i], 1);
+     if (NULL == my_obj->ionBuffer[i].addr) {
+       CDBG_ERROR("%s:%d] Ion allocation failed",__func__, __LINE__);
+       return -1;
+     }
+     my_obj->work_buf_cnt++;
+  }
+
+  /* init omx handle queue */
+  p_session_handle_q = (mm_jpeg_queue_t *) malloc(sizeof(*p_session_handle_q));
+  if (NULL == p_session_handle_q) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+  rc = mm_jpeg_queue_init(p_session_handle_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+
+  /* init output buf queue */
+  p_out_buf_q = (mm_jpeg_queue_t *) malloc(sizeof(*p_out_buf_q));
+  if (NULL == p_out_buf_q) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+  /* init omx handle queue */
+  rc = mm_jpeg_queue_init(p_out_buf_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+
+  for (i = 0; i < num_omx_sessions; i++) {
+    uint32_t buf_idx = 0U;
+    session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+    if (session_idx < 0) {
+      CDBG_ERROR("%s:%d] invalid session id (%d)", __func__, __LINE__, session_idx);
+      return rc;
+    }
+
+    p_session->next_session = NULL;
+
+    if (p_prev_session) {
+      p_prev_session->next_session = p_session;
+    }
+    p_prev_session = p_session;
+
+    buf_idx = my_obj->num_sessions + i;
+    if (buf_idx < MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+      p_session->work_buffer = my_obj->ionBuffer[buf_idx];
+    } else {
+      p_session->work_buffer.addr = NULL;
+      p_session->work_buffer.ion_fd = -1;
+      p_session->work_buffer.p_pmem_fd = -1;
+    }
+
+    p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+
+    ret = mm_jpeg_session_create(p_session);
+    if (OMX_ErrorNone != ret) {
+      p_session->active = OMX_FALSE;
+      CDBG_ERROR("%s:%d] jpeg session create failed", __func__, __LINE__);
+      return rc;
+    }
+
+    uint32_t session_id = (JOB_ID_MAGICVAL << 24) |
+        ((uint32_t)session_idx << 8) | clnt_idx;
+
+    if (!*p_session_id) {
+      *p_session_id = session_id;
+    }
+
+    /*copy the params*/
+    p_session->params = *p_params;
+    p_session->client_hdl = client_hdl;
+    p_session->sessionId = session_id;
+    p_session->session_handle_q = p_session_handle_q;
+    p_session->out_buf_q = p_out_buf_q;
+
+    qdata.p = p_session;
+    mm_jpeg_queue_enq(p_session_handle_q, qdata);
+
+    if (OMX_FALSE == p_session->config) {
+      rc = mm_jpeg_session_configure(p_session);
+      if (rc) {
+        CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+        return rc;
+      }
+      p_session->config = OMX_TRUE;
+    }
+    p_session->num_omx_sessions = num_omx_sessions;
+
+    CDBG("%s:%d] session id %x", __func__, __LINE__, session_id);
+  }
+
+  // Queue the output buf indexes
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    qdata.u32 = i + 1;
+    mm_jpeg_queue_enq(p_out_buf_q, qdata);
+  }
+
+  p_session->meta_enc_key = NULL;
+  p_session->meta_enc_keylen = 0;
+
+  return rc;
+}
+
+/** mm_jpegenc_destroy_job
+ *
+ *  Arguments:
+ *    @p_session: Session obj
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the job based paramenters
+ *
+ **/
+static int32_t mm_jpegenc_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  int i = 0, rc = 0;
+
+  CDBG_ERROR("%s:%d] Exif entry count %d %d", __func__, __LINE__,
+    (int)p_jobparams->exif_info.numOfEntries,
+    (int)p_session->exif_count_local);
+  for (i = 0; i < p_session->exif_count_local; i++) {
+    rc = releaseExifEntry(&p_session->exif_info_local[i]);
+    if (rc) {
+      CDBG_ERROR("%s:%d] Exif release failed (%d)", __func__, __LINE__, rc);
+    }
+  }
+  p_session->exif_count_local = 0;
+
+  return rc;
+}
+
+/** mm_jpeg_session_encode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static void mm_jpegenc_job_done(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_q_data_t qdata;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  /*Destroy job related params*/
+  mm_jpegenc_destroy_job(p_session);
+
+  /*remove the job*/
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+    p_session->jobId);
+  if (node) {
+    free(node);
+  }
+  p_session->encoding = OMX_FALSE;
+
+  // Queue to available sessions
+  qdata.p = p_session;
+  mm_jpeg_queue_enq(p_session->session_handle_q, qdata);
+
+  if (p_session->auto_out_buf) {
+    //Queue out buf index
+    qdata.u32 = (uint32_t)(p_session->encode_job.dst_index + 1);
+    mm_jpeg_queue_enq(p_session->out_buf_q, qdata);
+  }
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  uint32_t session_id = 0;
+  mm_jpeg_job_session_t *p_cur_sess;
+
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid session", __func__, __LINE__);
+    return rc;
+  }
+
+  session_id = p_session->sessionId;
+
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  CDBG("%s:%d] abort todo jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  CDBG("%s:%d] abort ongoing jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpeg_session_destroy(p_session);
+
+  p_cur_sess = p_session;
+
+  do {
+    mm_jpeg_remove_session_idx(my_obj, p_cur_sess->sessionId);
+  } while (NULL != (p_cur_sess = p_cur_sess->next_session));
+
+
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  while (1) {
+    qdata = mm_jpeg_queue_deq(p_session->session_handle_q);
+    if (NULL == qdata.p)
+      break;
+  }
+  mm_jpeg_queue_deinit(p_session->session_handle_q);
+  free(p_session->session_handle_q);
+  p_session->session_handle_q = NULL;
+
+  while (1) {
+    qdata = mm_jpeg_queue_deq(p_session->out_buf_q);
+    if (0U == qdata.u32)
+      break;
+  }
+  mm_jpeg_queue_deinit(p_session->out_buf_q);
+  free(p_session->out_buf_q);
+  p_session->out_buf_q = NULL;
+
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+
+  CDBG("%s:%d] X", __func__, __LINE__);
+
+
+  return rc;
+}
+
+
+
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_unlocked(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = -1;
+  mm_jpeg_job_q_node_t *node = NULL;
+  uint32_t session_id = 0;
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid session", __func__, __LINE__);
+    return rc;
+  }
+
+  session_id = p_session->sessionId;
+
+  /* abort job if in todo queue */
+  CDBG("%s:%d] abort todo jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  CDBG("%s:%d] abort ongoing jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpeg_remove_session_idx(my_obj, session_id);
+
+  return rc;
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+  mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+
+  return mm_jpeg_destroy_session(my_obj, p_session);
+}
+
+
+
+/** mm_jpeg_close:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Close the jpeg client
+ *
+ **/
+int32_t mm_jpeg_close(mm_jpeg_obj *my_obj, uint32_t client_hdl)
+{
+  int32_t rc = -1;
+  uint8_t clnt_idx = 0;
+  int i = 0;
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+    return rc;
+  }
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+
+  /* abort all jobs from the client */
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+    if (OMX_TRUE == my_obj->clnt_mgr[clnt_idx].session[i].active)
+      mm_jpeg_destroy_session_unlocked(my_obj,
+        &my_obj->clnt_mgr[clnt_idx].session[i]);
+  }
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  pthread_mutex_unlock(&my_obj->job_lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  /* invalidate client session */
+  pthread_mutex_destroy(&my_obj->clnt_mgr[clnt_idx].lock);
+  memset(&my_obj->clnt_mgr[clnt_idx], 0, sizeof(mm_jpeg_client_t));
+
+  rc = 0;
+  CDBG("%s:%d] X", __func__, __LINE__);
+  return rc;
+}
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  CDBG("%s:%d] count %d ", __func__, __LINE__, p_session->ebd_count);
+  pthread_mutex_lock(&p_session->lock);
+  p_session->ebd_count++;
+  pthread_mutex_unlock(&p_session->lock);
+  return 0;
+}
+
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+  mm_jpeg_output_t output_buf;
+  CDBG("%s:%d] count %d ", __func__, __LINE__, p_session->fbd_count);
+  CDBG_HIGH("[KPI Perf] : PROFILE_JPEG_FBD");
+
+  pthread_mutex_lock(&p_session->lock);
+
+  if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+
+  p_session->fbd_count++;
+  if (NULL != p_session->params.jpeg_cb) {
+
+    p_session->job_status = JPEG_JOB_STATUS_DONE;
+    output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+    output_buf.buf_vaddr = pBuffer->pBuffer;
+    output_buf.fd = 0;
+    CDBG("%s:%d] send jpeg callback %d buf 0x%p len %u JobID %u",
+      __func__, __LINE__,
+      p_session->job_status, pBuffer->pBuffer,
+      (unsigned int)pBuffer->nFilledLen, p_session->jobId);
+    p_session->params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      &output_buf,
+      p_session->params.userdata);
+
+    mm_jpegenc_job_done(p_session);
+
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  return ret;
+}
+
+
+
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent __unused,
+  OMX_PTR pAppData,
+  OMX_EVENTTYPE eEvent,
+  OMX_U32 nData1,
+  OMX_U32 nData2,
+  OMX_PTR pEventData __unused)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  CDBG("%s:%d] %d %d %d state %d", __func__, __LINE__, eEvent, (int)nData1,
+    (int)nData2, p_session->abort_state);
+
+  pthread_mutex_lock(&p_session->lock);
+
+  if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+    p_session->abort_state = MM_JPEG_ABORT_DONE;
+    pthread_cond_signal(&p_session->cond);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  if (eEvent == OMX_EventError) {
+    p_session->error_flag = nData2;
+    if (p_session->encoding == OMX_TRUE) {
+      CDBG("%s:%d] Error during encoding", __func__, __LINE__);
+
+      /* send jpeg callback */
+      if (NULL != p_session->params.jpeg_cb) {
+        p_session->job_status = JPEG_JOB_STATUS_ERROR;
+        CDBG("%s:%d] send jpeg error callback %d", __func__, __LINE__,
+          p_session->job_status);
+        p_session->params.jpeg_cb(p_session->job_status,
+          p_session->client_hdl,
+          p_session->jobId,
+          NULL,
+          p_session->params.userdata);
+      }
+
+      /* remove from ready queue */
+      mm_jpegenc_job_done(p_session);
+    }
+    pthread_cond_signal(&p_session->cond);
+  } else if (eEvent == OMX_EventCmdComplete) {
+    if (p_session->state_change_pending == OMX_TRUE) {
+      p_session->state_change_pending = OMX_FALSE;
+      pthread_cond_signal(&p_session->cond);
+    }
+  }
+
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d]", __func__, __LINE__);
+  return OMX_ErrorNone;
+}
+
+
+
+/* remove the first job from the queue with matching client handle */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+  mm_jpeg_queue_t* queue, uint32_t client_hdl)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data && (data->enc_info.client_handle == client_hdl)) {
+      CDBG_ERROR("%s:%d] found matching client handle", __func__, __LINE__);
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      CDBG_ERROR("%s: queue size = %d", __func__, queue->size);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove the first job from the queue with matching session id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+  mm_jpeg_queue_t* queue, uint32_t session_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data && (data->enc_info.encode_job.session_id == session_id)) {
+      CDBG_ERROR("%s:%d] found matching session id", __func__, __LINE__);
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      CDBG_ERROR("%s: queue size = %d", __func__, queue->size);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+  mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+  uint32_t lq_job_id;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data != NULL) {
+        if (data->type == MM_JPEG_CMD_TYPE_DECODE_JOB) {
+            lq_job_id = data->dec_info.job_id;
+        } else {
+            lq_job_id = data->enc_info.job_id;
+        }
+
+      if (lq_job_id == job_id) {
+        CDBG_ERROR("%s:%d] found matching job id", __func__, __LINE__);
+        job_node = data;
+        cam_list_del_node(&node->list);
+        queue->size--;
+        free(node);
+        break;
+      }
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+  mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data && (data->enc_info.job_id == job_id)) {
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  return job_node;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
new file mode 100644
index 0000000..3c17068
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -0,0 +1,563 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+#include <errno.h>
+#include <math.h>
+
+
+#define LOWER(a)               ((a) & 0xFFFF)
+#define UPPER(a)               (((a)>>16) & 0xFFFF)
+#define CHANGE_ENDIAN_16(a) \
+        ((uint16_t)((0x00FF & ((a)>>8)) | (0xFF00 & ((a)<<8))))
+#define ROUND(a) \
+        ((a >= 0) ? (uint32_t)(a + 0.5) : (uint32_t)(a - 0.5))
+
+#define AAA_EXIF_BUF_SIZE   10
+#define AE_EXIF_SIZE        2
+#define AWB_EXIF_SIZE       4
+#define AF_EXIF_SIZE        2
+
+/** addExifEntry:
+ *
+ *  Arguments:
+ *   @exif_info : Exif info struct
+ *   @p_session: job session
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ *  Retrun     : int32_t type of status
+ *               0  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Function to add an entry to exif data
+ *
+ **/
+int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+  exif_tag_type_t type, uint32_t count, void *data)
+{
+    int32_t rc = 0;
+    uint32_t numOfEntries = (uint32_t)p_exif_info->numOfEntries;
+    QEXIF_INFO_DATA *p_info_data = p_exif_info->exif_data;
+    if(numOfEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("%s: Number of entries exceeded limit", __func__);
+        return -1;
+    }
+
+    p_info_data[numOfEntries].tag_id = tagid;
+    p_info_data[numOfEntries].tag_entry.type = type;
+    p_info_data[numOfEntries].tag_entry.count = count;
+    p_info_data[numOfEntries].tag_entry.copy = 1;
+    switch (type) {
+    case EXIF_BYTE: {
+      if (count > 1) {
+        uint8_t *values = (uint8_t *)malloc(count);
+        if (values == NULL) {
+          ALOGE("%s: No memory for byte array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count);
+          p_info_data[numOfEntries].tag_entry.data._bytes = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._byte = *(uint8_t *)data;
+      }
+    }
+    break;
+    case EXIF_ASCII: {
+      char *str = NULL;
+      str = (char *)malloc(count + 1);
+      if (str == NULL) {
+        ALOGE("%s: No memory for ascii string", __func__);
+        rc = -1;
+      } else {
+        memset(str, 0, count + 1);
+        memcpy(str, data, count);
+        p_info_data[numOfEntries].tag_entry.data._ascii = str;
+      }
+    }
+    break;
+    case EXIF_SHORT: {
+      if (count > 1) {
+        uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for short array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(uint16_t));
+          p_info_data[numOfEntries].tag_entry.data._shorts = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._short = *(uint16_t *)data;
+      }
+    }
+    break;
+    case EXIF_LONG: {
+      if (count > 1) {
+        uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for long array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(uint32_t));
+          p_info_data[numOfEntries].tag_entry.data._longs = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._long = *(uint32_t *)data;
+      }
+    }
+    break;
+    case EXIF_RATIONAL: {
+      if (count > 1) {
+        rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for rational array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(rat_t));
+          p_info_data[numOfEntries].tag_entry.data._rats = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._rat = *(rat_t *)data;
+      }
+    }
+    break;
+    case EXIF_UNDEFINED: {
+      uint8_t *values = (uint8_t *)malloc(count);
+      if (values == NULL) {
+        ALOGE("%s: No memory for undefined array", __func__);
+        rc = -1;
+      } else {
+        memcpy(values, data, count);
+        p_info_data[numOfEntries].tag_entry.data._undefined = values;
+      }
+    }
+    break;
+    case EXIF_SLONG: {
+      if (count > 1) {
+        int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for signed long array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(int32_t));
+          p_info_data[numOfEntries].tag_entry.data._slongs = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._slong = *(int32_t *)data;
+      }
+    }
+    break;
+    case EXIF_SRATIONAL: {
+      if (count > 1) {
+        srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for signed rational array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(srat_t));
+          p_info_data[numOfEntries].tag_entry.data._srats = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._srat = *(srat_t *)data;
+      }
+    }
+    break;
+    }
+
+    // Increase number of entries
+    p_exif_info->numOfEntries++;
+    return rc;
+}
+
+/** releaseExifEntry
+ *
+ *  Arguments:
+ *   @p_exif_data : Exif info struct
+ *
+ *  Retrun     : int32_t type of status
+ *               0  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Function to release an entry from exif data
+ *
+ **/
+int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data)
+{
+ switch (p_exif_data->tag_entry.type) {
+  case EXIF_BYTE: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._bytes != NULL) {
+      free(p_exif_data->tag_entry.data._bytes);
+      p_exif_data->tag_entry.data._bytes = NULL;
+    }
+  }
+  break;
+  case EXIF_ASCII: {
+    if (p_exif_data->tag_entry.data._ascii != NULL) {
+      free(p_exif_data->tag_entry.data._ascii);
+      p_exif_data->tag_entry.data._ascii = NULL;
+    }
+  }
+  break;
+  case EXIF_SHORT: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._shorts != NULL) {
+      free(p_exif_data->tag_entry.data._shorts);
+      p_exif_data->tag_entry.data._shorts = NULL;
+    }
+  }
+  break;
+  case EXIF_LONG: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._longs != NULL) {
+      free(p_exif_data->tag_entry.data._longs);
+      p_exif_data->tag_entry.data._longs = NULL;
+    }
+  }
+  break;
+  case EXIF_RATIONAL: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._rats != NULL) {
+      free(p_exif_data->tag_entry.data._rats);
+      p_exif_data->tag_entry.data._rats = NULL;
+    }
+  }
+  break;
+  case EXIF_UNDEFINED: {
+    if (p_exif_data->tag_entry.data._undefined != NULL) {
+      free(p_exif_data->tag_entry.data._undefined);
+      p_exif_data->tag_entry.data._undefined = NULL;
+    }
+  }
+  break;
+  case EXIF_SLONG: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._slongs != NULL) {
+      free(p_exif_data->tag_entry.data._slongs);
+      p_exif_data->tag_entry.data._slongs = NULL;
+    }
+  }
+  break;
+  case EXIF_SRATIONAL: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._srats != NULL) {
+      free(p_exif_data->tag_entry.data._srats);
+      p_exif_data->tag_entry.data._srats = NULL;
+    }
+  }
+  break;
+  } /*end of switch*/
+
+  return 0;
+}
+
+/** process_sensor_data:
+ *
+ *  Arguments:
+ *   @p_sensor_params : ptr to sensor data
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process sensor data
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_sensor_data(cam_sensor_params_t *p_sensor_params,
+  QOMX_EXIF_INFO *exif_info, mm_jpeg_exif_params_t *p_cam_exif_params)
+{
+  int rc = 0;
+  rat_t val_rat;
+
+  if (NULL == p_sensor_params) {
+    ALOGE("%s %d: Sensor params are null", __func__, __LINE__);
+    return 0;
+  }
+
+  CDBG_HIGH("%s:%d] From metadata aperture = %f ", __func__, __LINE__,
+    p_sensor_params->aperture_value );
+
+  val_rat.num = (uint32_t)(p_sensor_params->aperture_value * 100);
+  val_rat.denom = 100;
+  rc = addExifEntry(exif_info, EXIFTAGID_APERTURE, EXIF_RATIONAL, 1, &val_rat);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+  }
+
+  short flash_tag = -1;
+  uint8_t flash_fired = 0;
+  uint8_t strobe_state = 0;
+  uint8_t flash_mode = 0;
+  uint8_t flash_presence = 0;
+  uint8_t red_eye_mode = 0;
+
+  if (!p_cam_exif_params->flash_presence) {
+    if (p_cam_exif_params->ui_flash_mode == CAM_FLASH_MODE_AUTO) {
+      CDBG_HIGH("%s %d: flashmode auto, take from sensor: %d", __func__, __LINE__,
+        p_sensor_params->flash_mode);
+      if(p_sensor_params->flash_mode == CAM_FLASH_MODE_ON)
+        flash_fired = FLASH_FIRED;
+      else if(p_sensor_params->flash_mode == CAM_FLASH_MODE_OFF)
+        flash_fired = FLASH_NOT_FIRED;
+
+      flash_mode = CAMERA_FLASH_AUTO;
+    } else {
+      CDBG_HIGH("%s %d: flashmode from ui: %d", __func__, __LINE__,
+                 p_cam_exif_params->ui_flash_mode);
+      if (p_cam_exif_params->ui_flash_mode == CAM_FLASH_MODE_ON) {
+        flash_mode = CAMERA_FLASH_COMPULSORY;
+        flash_fired = FLASH_FIRED;
+      } else if(p_cam_exif_params->ui_flash_mode == CAM_FLASH_MODE_OFF) {
+        flash_mode = CAMERA_FLASH_SUPRESSION;
+        flash_fired = FLASH_NOT_FIRED;
+      }
+   }
+
+   if((p_cam_exif_params->red_eye) && (flash_fired == FLASH_FIRED))
+     red_eye_mode = REDEYE_MODE;
+
+  } else {
+    flash_presence = NO_FLASH_FUNC;
+    red_eye_mode = NO_REDEYE_MODE;
+  }
+
+  /* No strobe flash support */
+  strobe_state = NO_STROBE_RETURN_DETECT;
+
+  /* Generating the flash tag */
+  flash_tag = 0x00 | flash_fired |
+    strobe_state | flash_mode |
+    flash_presence | red_eye_mode;
+
+  CDBG_HIGH("%s %d: flash_tag: 0x%x", __func__, __LINE__, flash_tag);
+
+
+  /*FLASH*/
+  rc = addExifEntry(exif_info, EXIFTAGID_FLASH, EXIF_SHORT,
+    sizeof(flash_tag)/2, &flash_tag);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding flash Exif Entry", __func__, __LINE__);
+  }
+
+  return rc;
+}
+
+
+/** process_3a_data:
+ *
+ *  Arguments:
+ *   @p_ae_params : ptr to aec data
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process 3a data
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_3a_data(cam_ae_params_t *p_ae_params, cam_awb_params_t *p_awb_params,
+        cam_auto_focus_data_t *p_focus_data, QOMX_EXIF_INFO *exif_info)
+{
+  int rc = 0;
+  srat_t val_srat;
+  rat_t val_rat;
+  double shutter_speed_value;
+  uint16_t aaa_exif_buff[AAA_EXIF_BUF_SIZE];
+  uint32_t exif_byte_cnt = 0;
+
+  memset(aaa_exif_buff, 0x0, sizeof(aaa_exif_buff));
+
+  if (NULL == p_ae_params) {
+    ALOGE("%s %d: AE params are null", __func__, __LINE__);
+    /* increment exif_byte_cnt, so that this info will be filled with 0s */
+    exif_byte_cnt += AE_EXIF_SIZE;
+  } else {
+    ALOGE("%s:%d] exp_time %f, iso_value %d exp idx: %d, lc: %d, gain: %f", __func__, __LINE__,
+      p_ae_params->exp_time, p_ae_params->iso_value, p_ae_params->exp_index,
+      p_ae_params->line_count, p_ae_params->real_gain);
+
+    /* Exposure time */
+    if (0.0f >= p_ae_params->exp_time) {
+      val_rat.num = 0;
+      val_rat.denom = 0;
+    } else {
+      val_rat.num = 1;
+      val_rat.denom = ROUND(1.0/p_ae_params->exp_time);
+    }
+    CDBG_HIGH("%s: numer %d denom %d", __func__, val_rat.num, val_rat.denom );
+
+    rc = addExifEntry(exif_info, EXIFTAGID_EXPOSURE_TIME, EXIF_RATIONAL,
+      (sizeof(val_rat)/(8)), &val_rat);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry Exposure time",
+        __func__, __LINE__);
+    }
+
+    /* Shutter Speed*/
+    if (p_ae_params->exp_time > 0) {
+      shutter_speed_value = log10(1/p_ae_params->exp_time)/log10(2);
+      val_srat.num = (int32_t)(shutter_speed_value * 1000.0f);
+      val_srat.denom = 1000;
+    } else {
+      val_srat.num = 0;
+      val_srat.denom = 0;
+    }
+    rc = addExifEntry(exif_info, EXIFTAGID_SHUTTER_SPEED, EXIF_SRATIONAL,
+      (sizeof(val_srat)/(8)), &val_srat);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+    }
+
+    /* ISO */
+    short val_short;
+    val_short = (short) p_ae_params->iso_value;
+    rc = addExifEntry(exif_info, EXIFTAGID_ISO_SPEED_RATING, EXIF_SHORT,
+      sizeof(val_short)/2, &val_short);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry ISO", __func__, __LINE__);
+    }
+
+    /* Gain */
+    val_short = (short) p_ae_params->real_gain;
+    rc = addExifEntry(exif_info, EXIFTAGID_GAIN_CONTROL, EXIF_SHORT,
+      sizeof(val_short)/2, &val_short);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry Gain", __func__, __LINE__);
+    }
+
+    /* Exposure Index */
+    val_rat.num = p_ae_params->exp_index;
+    val_rat.denom = 1;
+
+    CDBG_HIGH("%s: numer %d denom %d", __func__, val_rat.num, val_rat.denom );
+
+    rc = addExifEntry(exif_info, EXIFTAGID_EXPOSURE_INDEX, EXIF_RATIONAL,
+      (sizeof(val_rat)/(8)), &val_rat);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry Exposure Index",
+        __func__, __LINE__);
+    }
+
+    /* AE line count */
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(LOWER(p_ae_params->line_count));
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(UPPER(p_ae_params->line_count));
+  }
+
+  if (NULL == p_awb_params) {
+    ALOGE("%s %d: AWB params are null", __func__, __LINE__);
+    /* increment exif_byte_cnt, so that this info will be filled with 0s */
+    exif_byte_cnt += AWB_EXIF_SIZE;
+  } else {
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(LOWER(p_awb_params->cct_value));
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(UPPER(p_awb_params->cct_value));
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(LOWER(p_awb_params->decision));
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(UPPER(p_awb_params->decision));
+  }
+
+  if (NULL == p_focus_data) {
+    ALOGE("%s %d: AF params are null", __func__, __LINE__);
+    /* increment exif_byte_cnt, so that this info will be filled with 0s */
+    exif_byte_cnt += AF_EXIF_SIZE;
+  } else {
+    aaa_exif_buff[exif_byte_cnt++] = CHANGE_ENDIAN_16(LOWER(p_focus_data->focus_pos));
+    aaa_exif_buff[exif_byte_cnt]   = CHANGE_ENDIAN_16(UPPER(p_focus_data->focus_pos));
+  }
+
+  /* Add to exif data */
+  rc = addExifEntry(exif_info, EXIFTAGID_EXIF_MAKER_NOTE, EXIF_UNDEFINED,
+    (exif_byte_cnt * 2), aaa_exif_buff);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding Exif Entry Maker note", __func__, __LINE__);
+  }
+
+ return rc;
+
+}
+
+/** processMetaData:
+ *
+ *  Arguments:
+ *   @p_meta : ptr to metadata
+ *   @exif_info: Exif info struct
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process awb debug info
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_meta_data(cam_metadata_info_t *p_meta, QOMX_EXIF_INFO *exif_info,
+  mm_jpeg_exif_params_t *p_cam_exif_params)
+{
+  int rc = 0;
+
+  if (!p_meta) {
+    ALOGE("%s %d:Meta data is NULL", __func__, __LINE__);
+    return 0;
+  }
+  cam_ae_params_t *p_ae_params = p_meta->is_ae_params_valid ?
+    &p_meta->ae_params : &p_cam_exif_params->ae_params;
+
+  cam_awb_params_t *p_awb_params = p_meta->is_awb_params_valid ?
+    &p_meta->awb_params : &p_cam_exif_params->awb_params;
+
+  cam_auto_focus_data_t *p_focus_data = p_meta->is_focus_valid ?
+    &p_meta->focus_data : &p_cam_exif_params->af_params;
+
+  if(p_cam_exif_params->sensor_params.sens_type != CAM_SENSOR_YUV) {
+      rc = process_3a_data(p_ae_params, p_awb_params, p_focus_data, exif_info);
+      if (rc) {
+        ALOGE("%s %d: Failed to extract 3a params", __func__, __LINE__);
+      }
+  }
+
+  cam_sensor_params_t *p_sensor_params = p_meta->is_sensor_params_valid ?
+    &p_meta->sensor_params : &p_cam_exif_params->sensor_params;
+
+  if (NULL != p_sensor_params) {
+    rc = process_sensor_data(p_sensor_params, exif_info, p_cam_exif_params);
+    if (rc) {
+      ALOGE("%s %d: Failed to extract sensor params", __func__, __LINE__);
+    }
+  }
+  return rc;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
new file mode 100644
index 0000000..1e580b6
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
@@ -0,0 +1,374 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <stdlib.h>
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <stdlib.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+static mm_jpeg_obj* g_jpeg_obj = NULL;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+volatile uint32_t gMmCameraJpegLogLevel = 0;
+
+/** mm_jpeg_util_generate_handler:
+ *
+ *  Arguments:
+ *    @index: client index
+ *
+ *  Return:
+ *       handle value
+ *
+ *  Description:
+ *       utility function to generate handler
+ *
+ **/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index)
+{
+  uint32_t handler = 0;
+  pthread_mutex_lock(&g_handler_lock);
+  g_handler_history_count++;
+  if (0 == g_handler_history_count) {
+    g_handler_history_count++;
+  }
+  handler = g_handler_history_count;
+  handler = (handler<<8) | index;
+  pthread_mutex_unlock(&g_handler_lock);
+  return handler;
+}
+
+/** mm_jpeg_util_get_index_by_handler:
+ *
+ *  Arguments:
+ *    @handler: handle value
+ *
+ *  Return:
+ *       client index
+ *
+ *  Description:
+ *       get client index
+ *
+ **/
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler)
+{
+  return (handler & 0x000000ff);
+}
+
+/** mm_jpeg_intf_start_job:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @job: jpeg job object
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       start the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+  int32_t rc = -1;
+
+  if (NULL == job ||
+    NULL == job_id) {
+    CDBG_ERROR("%s:%d] invalid parameters for job or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+  rc = mm_jpeg_start_job(g_jpeg_obj, job, job_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @p_params: encode parameters
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Create new jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_create_session(uint32_t client_hdl,
+    mm_jpeg_encode_params_t *p_params,
+    uint32_t *p_session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+    CDBG_ERROR("%s:%d] invalid client_hdl or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+ rc = mm_jpeg_create_session(g_jpeg_obj, client_hdl, p_params, p_session_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ *  Arguments:
+ *    @session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_destroy_session(uint32_t session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == session_id) {
+    CDBG_ERROR("%s:%d] invalid client_hdl or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_destroy_session_by_id(g_jpeg_obj, session_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_abort_job:
+ *
+ *  Arguments:
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_abort_job(uint32_t job_id)
+{
+  int32_t rc = -1;
+
+  if (0 == job_id) {
+    CDBG_ERROR("%s:%d] invalid jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_abort_job(g_jpeg_obj, job_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Close the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_close(uint32_t client_hdl)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl) {
+    CDBG_ERROR("%s:%d] invalid client_hdl", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_close(g_jpeg_obj, client_hdl);
+  g_jpeg_obj->num_clients--;
+  if(0 == rc) {
+    if (0 == g_jpeg_obj->num_clients) {
+      /* No client, close jpeg internally */
+      rc = mm_jpeg_deinit(g_jpeg_obj);
+      free(g_jpeg_obj);
+      g_jpeg_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** jpeg_open:
+ *
+ *  Arguments:
+ *    @ops: ops table pointer
+ *
+ *  Return:
+ *       0 failure, success otherwise
+ *
+ *  Description:
+ *       Open a jpeg client
+ *
+ **/
+uint32_t jpeg_open(mm_jpeg_ops_t *ops, mm_dimension picture_size)
+{
+  int32_t rc = 0;
+  uint32_t clnt_hdl = 0;
+  mm_jpeg_obj* jpeg_obj = NULL;
+  char prop[PROPERTY_VALUE_MAX];
+  uint32_t temp;
+  uint32_t log_level;
+  uint32_t debug_mask;
+  memset(prop, 0, sizeof(prop));
+
+  /*  Higher 4 bits : Value of Debug log level (Default level is 1 to print all CDBG_HIGH)
+      Lower 28 bits : Control mode for sub module logging(Only 3 sub modules in HAL)
+                      0x1 for HAL
+                      0x10 for mm-camera-interface
+                      0x100 for mm-jpeg-interface  */
+  property_get("persist.camera.hal.debug.mask", prop, "268435463"); // 0x10000007=268435463
+  temp = (uint32_t)atoi(prop);
+  log_level = ((temp >> 28) & 0xF);
+  debug_mask = (temp & HAL_DEBUG_MASK_MM_JPEG_INTERFACE);
+  if (debug_mask > 0)
+      gMmCameraJpegLogLevel = log_level;
+  else
+      gMmCameraJpegLogLevel = 0; // Debug logs are not required if debug_mask is zero
+
+  CDBG_HIGH("%s gMmCameraJpegLogLevel=%d",__func__, gMmCameraJpegLogLevel);
+
+  pthread_mutex_lock(&g_intf_lock);
+  /* first time open */
+  if(NULL == g_jpeg_obj) {
+    jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+    if(NULL == jpeg_obj) {
+      CDBG_ERROR("%s:%d] no mem", __func__, __LINE__);
+      pthread_mutex_unlock(&g_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* initialize jpeg obj */
+    memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+
+    /* used for work buf calculation */
+    jpeg_obj->max_pic_w = picture_size.w;
+    jpeg_obj->max_pic_h = picture_size.h;
+
+    rc = mm_jpeg_init(jpeg_obj);
+    if(0 != rc) {
+      CDBG_ERROR("%s:%d] mm_jpeg_init err = %d", __func__, __LINE__, rc);
+      free(jpeg_obj);
+      pthread_mutex_unlock(&g_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* remember in global variable */
+    g_jpeg_obj = jpeg_obj;
+  }
+
+  /* open new client */
+  clnt_hdl = mm_jpeg_new_client(g_jpeg_obj);
+  if (clnt_hdl > 0) {
+    /* valid client */
+    if (NULL != ops) {
+      /* fill in ops tbl if ptr not NULL */
+      ops->start_job = mm_jpeg_intf_start_job;
+      ops->abort_job = mm_jpeg_intf_abort_job;
+      ops->create_session = mm_jpeg_intf_create_session;
+      ops->destroy_session = mm_jpeg_intf_destroy_session;
+      ops->close = mm_jpeg_intf_close;
+    }
+  } else {
+    /* failed new client */
+    CDBG_ERROR("%s:%d] mm_jpeg_new_client failed", __func__, __LINE__);
+
+    if (0 == g_jpeg_obj->num_clients) {
+      /* no client, close jpeg */
+      mm_jpeg_deinit(g_jpeg_obj);
+      free(g_jpeg_obj);
+      g_jpeg_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_intf_lock);
+  return clnt_hdl;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c
new file mode 100644
index 0000000..b1f4a93
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c
@@ -0,0 +1,163 @@
+/* Copyright (c) 2013-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "mm_jpeg_ionbuf.h"
+#include <linux/msm_ion.h>
+
+/** buffer_allocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      allocates ION buffer
+ *
+ **/
+void *buffer_allocate(buffer_t *p_buffer, int cached)
+{
+  void *l_buffer = NULL;
+
+  int lrc = 0;
+  struct ion_handle_data lhandle_data;
+
+   p_buffer->alloc.len = p_buffer->size;
+   p_buffer->alloc.align = 4096;
+   p_buffer->alloc.flags = (cached) ? ION_FLAG_CACHED : 0;
+   p_buffer->alloc.heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+
+   p_buffer->ion_fd = open("/dev/ion", O_RDONLY);
+   if(p_buffer->ion_fd < 0) {
+    CDBG_ERROR("%s :Ion open failed", __func__);
+    goto ION_ALLOC_FAILED;
+  }
+
+  /* Make it page size aligned */
+  p_buffer->alloc.len = (p_buffer->alloc.len + 4095U) & (~4095U);
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_ALLOC, &p_buffer->alloc);
+  if (lrc < 0) {
+    CDBG_ERROR("%s :ION allocation failed len %zu", __func__,
+      p_buffer->alloc.len);
+    goto ION_ALLOC_FAILED;
+  }
+
+  p_buffer->ion_info_fd.handle = p_buffer->alloc.handle;
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_SHARE,
+    &p_buffer->ion_info_fd);
+  if (lrc < 0) {
+    CDBG_ERROR("%s :ION map failed %s", __func__, strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+
+  p_buffer->p_pmem_fd = p_buffer->ion_info_fd.fd;
+
+  l_buffer = mmap(NULL, p_buffer->alloc.len, PROT_READ  | PROT_WRITE,
+    MAP_SHARED,p_buffer->p_pmem_fd, 0);
+
+  if (l_buffer == MAP_FAILED) {
+    CDBG_ERROR("%s :ION_MMAP_FAILED: %s (%d)", __func__,
+      strerror(errno), errno);
+    goto ION_MAP_FAILED;
+  }
+
+  return l_buffer;
+
+ION_MAP_FAILED:
+  lhandle_data.handle = p_buffer->ion_info_fd.handle;
+  ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+  return NULL;
+ION_ALLOC_FAILED:
+  return NULL;
+
+}
+
+/** buffer_deallocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_t *p_buffer)
+{
+  int lrc = 0;
+  size_t lsize = (p_buffer->size + 4095U) & (~4095U);
+
+  struct ion_handle_data lhandle_data;
+  lrc = munmap(p_buffer->addr, lsize);
+
+  close(p_buffer->ion_info_fd.fd);
+
+  lhandle_data.handle = p_buffer->ion_info_fd.handle;
+  ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+
+  close(p_buffer->ion_fd);
+  return lrc;
+}
+
+/** buffer_invalidate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      Invalidates the cached buffer
+ *
+ **/
+int buffer_invalidate(buffer_t *p_buffer)
+{
+  int lrc = 0;
+  struct ion_flush_data cache_inv_data;
+  struct ion_custom_data custom_data;
+
+  memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+  memset(&custom_data, 0, sizeof(custom_data));
+  cache_inv_data.vaddr = p_buffer->addr;
+  cache_inv_data.fd = p_buffer->ion_info_fd.fd;
+  cache_inv_data.handle = p_buffer->ion_info_fd.handle;
+  cache_inv_data.length = (unsigned int)p_buffer->size;
+  custom_data.cmd = (unsigned int)ION_IOC_INV_CACHES;
+  custom_data.arg = (unsigned long)&cache_inv_data;
+
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_CUSTOM, &custom_data);
+  if (lrc < 0)
+    CDBG_ERROR("%s: Cache Invalidate failed: %s\n", __func__, strerror(errno));
+
+  return lrc;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
new file mode 100644
index 0000000..03c8f3c
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
@@ -0,0 +1,182 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue)
+{
+    pthread_mutex_init(&queue->lock, NULL);
+    cam_list_init(&queue->head.list);
+    queue->size = 0;
+    return 0;
+}
+
+int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, mm_jpeg_q_data_t data)
+{
+    mm_jpeg_q_node_t* node =
+        (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+    if (NULL == node) {
+        CDBG_ERROR("%s: No memory for mm_jpeg_q_node_t", __func__);
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_jpeg_q_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_add_tail_node(&node->list, &queue->head.list);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+
+}
+
+int32_t mm_jpeg_queue_enq_head(mm_jpeg_queue_t* queue, mm_jpeg_q_data_t data)
+{
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+  mm_jpeg_q_node_t* node =
+        (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+    if (NULL == node) {
+        CDBG_ERROR("%s: No memory for mm_jpeg_q_node_t", __func__);
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_jpeg_q_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    cam_list_insert_before_node(&node->list, pos);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+}
+
+mm_jpeg_q_data_t mm_jpeg_queue_deq(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_data_t data;
+    mm_jpeg_q_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    memset(&data, 0, sizeof(data));
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue)
+{
+    uint32_t size = 0;
+
+    pthread_mutex_lock(&queue->lock);
+    size = queue->size;
+    pthread_mutex_unlock(&queue->lock);
+
+    return size;
+
+}
+
+int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_queue_flush(queue);
+    pthread_mutex_destroy(&queue->lock);
+    return 0;
+}
+
+int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+
+        /* for now we only assume there is no ptr inside data
+         * so we free data directly */
+        if (NULL != node->data.p) {
+            free(node->data.p);
+        }
+        free(node);
+        pos = pos->next;
+    }
+    queue->size = 0;
+    pthread_mutex_unlock(&queue->lock);
+    return 0;
+}
+
+mm_jpeg_q_data_t mm_jpeg_queue_peek(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_data_t data;
+    mm_jpeg_q_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    memset(&data, 0, sizeof(data));
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+    }
+    return data;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c
new file mode 100644
index 0000000..4ed9062
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c
@@ -0,0 +1,1190 @@
+/* Copyright (c) 2013-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_inlines.h"
+
+OMX_ERRORTYPE mm_jpegdec_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer);
+OMX_ERRORTYPE mm_jpegdec_fbd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpegdec_event_handler(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_EVENTTYPE eEvent,
+    OMX_U32 nData1,
+    OMX_U32 nData2,
+    OMX_PTR pEventData);
+
+
+/** mm_jpegdec_destroy_job
+ *
+ *  Arguments:
+ *    @p_session: Session obj
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the job based paramenters
+ *
+ **/
+static int32_t mm_jpegdec_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = 0;
+
+  return rc;
+}
+
+/** mm_jpeg_job_done:
+ *
+ *  Arguments:
+ *    @p_session: decode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Finalize the job
+ *
+ **/
+static void mm_jpegdec_job_done(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  /*Destroy job related params*/
+  mm_jpegdec_destroy_job(p_session);
+
+  /*remove the job*/
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+    p_session->jobId);
+  if (node) {
+    free(node);
+  }
+  p_session->encoding = OMX_FALSE;
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+
+/** mm_jpegdec_session_send_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_send_buffers(void *data)
+{
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_BUFFER_INFO lbuffer_info;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    lbuffer_info.fd = (OMX_U32)p_params->src_main_buf[i].fd;
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_in_omx_buf[i]), 0,
+      &lbuffer_info, p_params->src_main_buf[i].buf_size,
+      p_params->src_main_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  CDBG("%s:%d]", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_session_free_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_free_buffers(void *data)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 0, p_session->p_in_omx_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    CDBG("%s:%d] Dest buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      return ret;
+    }
+  }
+  CDBG("%s:%d]", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpegdec_session_create:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error types
+ *
+ *  Description:
+ *       Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_create(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  pthread_mutex_init(&p_session->lock, NULL);
+  pthread_cond_init(&p_session->cond, NULL);
+  cirq_reset(&p_session->cb_q);
+  p_session->state_change_pending = OMX_FALSE;
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->error_flag = OMX_ErrorNone;
+  p_session->ebd_count = 0;
+  p_session->fbd_count = 0;
+  p_session->encode_pid = -1;
+  p_session->config = OMX_FALSE;
+
+  p_session->omx_callbacks.EmptyBufferDone = mm_jpegdec_ebd;
+  p_session->omx_callbacks.FillBufferDone = mm_jpegdec_fbd;
+  p_session->omx_callbacks.EventHandler = mm_jpegdec_event_handler;
+  p_session->exif_count_local = 0;
+
+  rc = OMX_GetHandle(&p_session->omx_handle,
+    "OMX.qcom.image.jpeg.decoder",
+    (void *)p_session,
+    &p_session->omx_callbacks);
+
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s:%d] OMX_GetHandle failed (%d)", __func__, __LINE__, rc);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpegdec_session_destroy:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Destroy a jpeg encode session
+ *
+ **/
+void mm_jpegdec_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+  if (NULL == p_session->omx_handle) {
+    CDBG_ERROR("%s:%d] invalid handle", __func__, __LINE__);
+    return;
+  }
+
+  rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+    mm_jpegdec_session_free_buffers);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  rc = OMX_FreeHandle(p_session->omx_handle);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] OMX_FreeHandle failed (%d)", __func__, __LINE__, rc);
+  }
+  p_session->omx_handle = NULL;
+
+
+  pthread_mutex_destroy(&p_session->lock);
+  pthread_cond_destroy(&p_session->cond);
+  CDBG("%s:%d] X", __func__, __LINE__);
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+  mm_jpeg_decode_job_t *p_jobparams = &p_session->decode_job;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[p_jobparams->src_index];
+
+  p_session->inputPort.nPortIndex = 0;
+  p_session->outputPort.nPortIndex = 1;
+
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  p_session->inputPort.format.image.nFrameWidth =
+    (OMX_U32)p_jobparams->main_dim.src_dim.width;
+  p_session->inputPort.format.image.nFrameHeight =
+    (OMX_U32)p_jobparams->main_dim.src_dim.height;
+  p_session->inputPort.format.image.nStride =
+    p_src_buf->offset.mp[0].stride;
+  p_session->inputPort.format.image.nSliceHeight =
+    (OMX_U32)p_src_buf->offset.mp[0].scanline;
+  p_session->inputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+  p_session->inputPort.nBufferSize =
+    p_params->src_main_buf[p_jobparams->src_index].buf_size;
+  p_session->inputPort.nBufferCountActual = (OMX_U32)p_params->num_src_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  return ret;
+}
+
+
+/** mm_jpegdec_session_config_main:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  /* config port */
+  CDBG("%s:%d] config port", __func__, __LINE__);
+  rc = mm_jpegdec_session_config_ports(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config port failed", __func__);
+    return rc;
+  }
+
+
+  /* TODO: config crop */
+
+  return rc;
+}
+
+/** mm_jpeg_session_configure:
+ *
+ *  Arguments:
+ *    @data: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpegdec_session_configure(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+  CDBG("%s:%d] E ", __func__, __LINE__);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  /* config main img */
+  ret = mm_jpegdec_session_config_main(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config main img failed", __func__, __LINE__);
+    goto error;
+  }
+
+  /* TODO: common config (if needed) */
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+    mm_jpegdec_session_send_buffers);
+  if (ret) {
+    CDBG_ERROR("%s:%d] change state to idle failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+    NULL);
+  if (ret) {
+    CDBG_ERROR("%s:%d] change state to executing failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+error:
+  CDBG("%s:%d] X ret %d", __func__, __LINE__, ret);
+  return ret;
+}
+
+static OMX_ERRORTYPE mm_jpeg_session_port_enable(
+    mm_jpeg_job_session_t *p_session,
+    OMX_U32 nPortIndex,
+    OMX_BOOL wait)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_EVENTTYPE lEvent;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->event_pending = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+      nPortIndex, NULL);
+
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  if (wait == OMX_TRUE) {
+    // Wait for cmd complete
+    pthread_mutex_lock(&p_session->lock);
+    if (p_session->event_pending == OMX_TRUE) {
+      CDBG("%s:%d] before wait", __func__, __LINE__);
+      pthread_cond_wait(&p_session->cond, &p_session->lock);
+      lEvent = p_session->omxEvent;
+      CDBG("%s:%d] after wait", __func__, __LINE__);
+    }
+    lEvent = p_session->omxEvent;
+    pthread_mutex_unlock(&p_session->lock);
+
+    if (lEvent != OMX_EventCmdComplete) {
+      CDBG("%s:%d] Unexpected event %d", __func__, __LINE__,lEvent);
+      return OMX_ErrorUndefined;
+    }
+  }
+  return OMX_ErrorNone;
+}
+
+static OMX_ERRORTYPE mm_jpeg_session_port_disable(
+    mm_jpeg_job_session_t *p_session,
+    OMX_U32 nPortIndex,
+    OMX_BOOL wait)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_EVENTTYPE lEvent;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->event_pending = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+      nPortIndex, NULL);
+
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+  if (wait == OMX_TRUE) {
+    // Wait for cmd complete
+    pthread_mutex_lock(&p_session->lock);
+    if (p_session->event_pending == OMX_TRUE) {
+      CDBG("%s:%d] before wait", __func__, __LINE__);
+      pthread_cond_wait(&p_session->cond, &p_session->lock);
+
+      CDBG("%s:%d] after wait", __func__, __LINE__);
+    }
+    lEvent = p_session->omxEvent;
+    pthread_mutex_unlock(&p_session->lock);
+
+    if (lEvent != OMX_EventCmdComplete) {
+      CDBG("%s:%d] Unexpected event %d", __func__, __LINE__,lEvent);
+      return OMX_ErrorUndefined;
+    }
+  }
+  return OMX_ErrorNone;
+}
+
+
+/** mm_jpegdec_session_decode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpegdec_session_decode(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+  mm_jpeg_decode_job_t *p_jobparams = &p_session->decode_job;
+  OMX_EVENTTYPE lEvent;
+  uint32_t i;
+  QOMX_BUFFER_INFO lbuffer_info;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->encoding = OMX_FALSE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (OMX_FALSE == p_session->config) {
+    ret = mm_jpegdec_session_configure(p_session);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+    }
+    p_session->config = OMX_TRUE;
+  }
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->encoding = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  p_session->event_pending = OMX_TRUE;
+
+  ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+    p_session->p_in_omx_buf[p_jobparams->src_index]);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    goto error;
+  }
+
+  // Wait for port settings changed
+  pthread_mutex_lock(&p_session->lock);
+  if (p_session->event_pending == OMX_TRUE) {
+    CDBG("%s:%d] before wait", __func__, __LINE__);
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+  }
+  lEvent = p_session->omxEvent;
+  CDBG("%s:%d] after wait", __func__, __LINE__);
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (lEvent != OMX_EventPortSettingsChanged) {
+    CDBG("%s:%d] Unexpected event %d", __func__, __LINE__,lEvent);
+    goto error;
+  }
+
+  // Disable output port (wait)
+  mm_jpeg_session_port_disable(p_session,
+      p_session->outputPort.nPortIndex,
+      OMX_TRUE);
+
+  // Get port definition
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+      &p_session->outputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  // Set port definition
+  p_session->outputPort.format.image.nFrameWidth =
+    (OMX_U32)p_jobparams->main_dim.dst_dim.width;
+  p_session->outputPort.format.image.nFrameHeight =
+    (OMX_U32)p_jobparams->main_dim.dst_dim.height;
+  p_session->outputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+
+  p_session->outputPort.nBufferSize =
+     p_params->dest_buf[p_jobparams->dst_index].buf_size;
+   p_session->outputPort.nBufferCountActual = (OMX_U32)p_params->num_dst_bufs;
+
+   p_session->outputPort.format.image.nSliceHeight =
+       (OMX_U32)
+       p_params->dest_buf[p_jobparams->dst_index].offset.mp[0].scanline;
+   p_session->outputPort.format.image.nStride =
+       p_params->dest_buf[p_jobparams->dst_index].offset.mp[0].stride;
+
+   ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+     &p_session->outputPort);
+   if (ret) {
+     CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+     return ret;
+   }
+
+  // Enable port (no wait)
+  mm_jpeg_session_port_enable(p_session,
+      p_session->outputPort.nPortIndex,
+      OMX_FALSE);
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+  // Use buffers
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    lbuffer_info.fd = (OMX_U32)p_params->dest_buf[i].fd;
+    CDBG("%s:%d] Dest buffer %d", __func__, __LINE__, (unsigned int)i);
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+        1, &lbuffer_info, p_params->dest_buf[i].buf_size,
+        p_params->dest_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      return ret;
+    }
+  }
+
+  // Wait for port enable completion
+  pthread_mutex_lock(&p_session->lock);
+  if (p_session->event_pending == OMX_TRUE) {
+    CDBG("%s:%d] before wait", __func__, __LINE__);
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+    lEvent = p_session->omxEvent;
+    CDBG("%s:%d] after wait", __func__, __LINE__);
+  }
+  lEvent = p_session->omxEvent;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (lEvent != OMX_EventCmdComplete) {
+    CDBG("%s:%d] Unexpected event %d", __func__, __LINE__,lEvent);
+    goto error;
+  }
+
+  ret = OMX_FillThisBuffer(p_session->omx_handle,
+    p_session->p_out_omx_buf[p_jobparams->dst_index]);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    goto error;
+  }
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+  CDBG("%s:%d] X ", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpegdec_process_decoding_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg client
+ *    @job_node: job node
+ *
+ *  Return:
+ *       0 for success -1 otherwise
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpegdec_process_decoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  /* check if valid session */
+  p_session = mm_jpeg_get_session(my_obj, job_node->dec_info.job_id);
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid job id %x", __func__, __LINE__,
+      job_node->dec_info.job_id);
+    return -1;
+  }
+
+  /* sent encode cmd to OMX, queue job into ongoing queue */
+  qdata.p = job_node;
+  rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, qdata);
+  if (rc) {
+    CDBG_ERROR("%s:%d] jpeg enqueue failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+  p_session->decode_job = job_node->dec_info.decode_job;
+  p_session->jobId = job_node->dec_info.job_id;
+  ret = mm_jpegdec_session_decode(p_session);
+  if (ret) {
+    CDBG_ERROR("%s:%d] encode session failed", __func__, __LINE__);
+    goto error;
+  }
+
+  CDBG("%s:%d] Success X ", __func__, __LINE__);
+  return rc;
+
+error:
+
+  if ((OMX_ErrorNone != ret) &&
+    (NULL != p_session->dec_params.jpeg_cb)) {
+    p_session->job_status = JPEG_JOB_STATUS_ERROR;
+    CDBG("%s:%d] send jpeg error callback %d", __func__, __LINE__,
+      p_session->job_status);
+    p_session->dec_params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      NULL,
+      p_session->dec_params.userdata);
+  }
+
+  /*remove the job*/
+  mm_jpegdec_job_done(p_session);
+  CDBG("%s:%d] Error X ", __func__, __LINE__);
+
+  return rc;
+}
+
+/** mm_jpeg_start_decode_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @job: pointer to encode job
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpegdec_start_decode_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t *job,
+  uint32_t *job_id)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = -1;
+  uint8_t session_idx = 0;
+  uint8_t client_idx = 0;
+  mm_jpeg_job_q_node_t* node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_decode_job_t *p_jobparams  = &job->decode_job;
+
+  *job_id = 0;
+
+  /* check if valid session */
+  session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+  client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+  CDBG("%s:%d] session_idx %d client idx %d", __func__, __LINE__,
+    session_idx, client_idx);
+
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    CDBG_ERROR("%s:%d] invalid session id %x", __func__, __LINE__,
+      job->decode_job.session_id);
+    return rc;
+  }
+
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  if (OMX_FALSE == p_session->active) {
+    CDBG_ERROR("%s:%d] session not active %x", __func__, __LINE__,
+      job->decode_job.session_id);
+    return rc;
+  }
+
+  if ((p_jobparams->src_index >= (int32_t)p_session->dec_params.num_src_bufs) ||
+    (p_jobparams->dst_index >= (int32_t)p_session->dec_params.num_dst_bufs)) {
+    CDBG_ERROR("%s:%d] invalid buffer indices", __func__, __LINE__);
+    return rc;
+  }
+
+  /* enqueue new job into todo job queue */
+  node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+    return -1;
+  }
+
+  *job_id = job->decode_job.session_id |
+    ((p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->dec_info.decode_job = job->decode_job;
+  node->dec_info.job_id = *job_id;
+  node->dec_info.client_handle = p_session->client_hdl;
+  node->type = MM_JPEG_CMD_TYPE_DECODE_JOB;
+
+  qdata.p = node;
+  rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, qdata);
+  if (0 == rc) {
+    cam_sem_post(&my_obj->job_mgr.job_sem);
+  }
+
+  return rc;
+}
+
+/** mm_jpegdec_create_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @p_params: pointer to encode params
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding session
+ *
+ **/
+int32_t mm_jpegdec_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_decode_params_t *p_params,
+  uint32_t* p_session_id)
+{
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint8_t clnt_idx = 0;
+  int session_idx = -1;
+  mm_jpeg_job_session_t *p_session = NULL;
+  *p_session_id = 0;
+
+  /* validate the parameters */
+  if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+    || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+    CDBG_ERROR("%s:%d] invalid num buffers", __func__, __LINE__);
+    return rc;
+  }
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+    return rc;
+  }
+
+  session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+  if (session_idx < 0) {
+    CDBG_ERROR("%s:%d] invalid session id (%d)", __func__, __LINE__, session_idx);
+    return rc;
+  }
+
+  ret = mm_jpegdec_session_create(p_session);
+  if (OMX_ErrorNone != ret) {
+    p_session->active = OMX_FALSE;
+    CDBG_ERROR("%s:%d] jpeg session create failed", __func__, __LINE__);
+    return rc;
+  }
+
+  *p_session_id = (JOB_ID_MAGICVAL << 24) |
+    ((unsigned)session_idx << 8) | clnt_idx;
+
+  /*copy the params*/
+  p_session->dec_params = *p_params;
+  p_session->client_hdl = client_hdl;
+  p_session->sessionId = *p_session_id;
+  p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+  CDBG("%s:%d] session id %x", __func__, __LINE__, *p_session_id);
+
+  return rc;
+}
+
+/** mm_jpegdec_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpegdec_destroy_session(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  uint32_t session_id = p_session->sessionId;
+
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid session", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  CDBG("%s:%d] abort todo jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  CDBG("%s:%d] abort ongoing jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpegdec_session_destroy(p_session);
+  mm_jpeg_remove_session_idx(my_obj, session_id);
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+  CDBG("%s:%d] X", __func__, __LINE__);
+
+  return rc;
+}
+
+/** mm_jpegdec_destroy_session_by_id:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpegdec_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+  mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+  if (p_session == NULL) {
+      CDBG_ERROR("%s: error: mm_jpeg_get_session returned NULL",__func__);
+      return -1;
+  }
+  return mm_jpegdec_destroy_session(my_obj, p_session);
+}
+
+
+
+OMX_ERRORTYPE mm_jpegdec_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  CDBG("%s:%d] count %d ", __func__, __LINE__, p_session->ebd_count);
+  pthread_mutex_lock(&p_session->lock);
+  p_session->ebd_count++;
+  pthread_mutex_unlock(&p_session->lock);
+  return 0;
+}
+
+OMX_ERRORTYPE mm_jpegdec_fbd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+  mm_jpeg_output_t output_buf;
+
+  CDBG("%s:%d] count %d ", __func__, __LINE__, p_session->fbd_count);
+
+  pthread_mutex_lock(&p_session->lock);
+
+  if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+
+  p_session->fbd_count++;
+  if (NULL != p_session->dec_params.jpeg_cb) {
+    p_session->job_status = JPEG_JOB_STATUS_DONE;
+    output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+    output_buf.buf_vaddr = pBuffer->pBuffer;
+    output_buf.fd = 0;
+    CDBG("%s:%d] send jpeg callback %d", __func__, __LINE__,
+      p_session->job_status);
+    p_session->dec_params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      &output_buf,
+      p_session->dec_params.userdata);
+
+    /* remove from ready queue */
+    mm_jpegdec_job_done(p_session);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  return ret;
+}
+
+OMX_ERRORTYPE mm_jpegdec_event_handler(OMX_HANDLETYPE hComponent __unused,
+  OMX_PTR pAppData,
+  OMX_EVENTTYPE eEvent,
+  OMX_U32 nData1,
+  OMX_U32 nData2,
+  OMX_PTR pEventData __unused)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  CDBG("%s:%d] %d %d %d state %d", __func__, __LINE__, eEvent, (int)nData1,
+    (int)nData2, p_session->abort_state);
+
+  CDBG("%s:%d] AppData=%p ", __func__, __LINE__, pAppData);
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->omxEvent = eEvent;
+  if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+    p_session->abort_state = MM_JPEG_ABORT_DONE;
+    pthread_cond_signal(&p_session->cond);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  if (eEvent == OMX_EventError) {
+    if (p_session->encoding == OMX_TRUE) {
+      CDBG("%s:%d] Error during encoding", __func__, __LINE__);
+
+      /* send jpeg callback */
+      if (NULL != p_session->dec_params.jpeg_cb) {
+        p_session->job_status = JPEG_JOB_STATUS_ERROR;
+        CDBG("%s:%d] send jpeg error callback %d", __func__, __LINE__,
+          p_session->job_status);
+        p_session->dec_params.jpeg_cb(p_session->job_status,
+          p_session->client_hdl,
+          p_session->jobId,
+          NULL,
+          p_session->dec_params.userdata);
+      }
+
+      /* remove from ready queue */
+      mm_jpegdec_job_done(p_session);
+    }
+    pthread_cond_signal(&p_session->cond);
+  } else if (eEvent == OMX_EventCmdComplete) {
+    p_session->state_change_pending = OMX_FALSE;
+    p_session->event_pending = OMX_FALSE;
+    pthread_cond_signal(&p_session->cond);
+  }  else if (eEvent == OMX_EventPortSettingsChanged) {
+    p_session->event_pending = OMX_FALSE;
+    pthread_cond_signal(&p_session->cond);
+  }
+
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d]", __func__, __LINE__);
+  return OMX_ErrorNone;
+}
+
+/** mm_jpegdec_abort_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Abort the encoding session
+ *
+ **/
+int32_t mm_jpegdec_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId)
+{
+  int32_t rc = -1;
+  mm_jpeg_job_q_node_t *node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+  if (NULL != node) {
+    free(node);
+    goto abort_done;
+  }
+
+  /* abort job if in ongoing queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+  if (NULL != node) {
+    /* find job that is OMX ongoing, ask OMX to abort the job */
+    p_session = mm_jpeg_get_session(my_obj, node->dec_info.job_id);
+    if (p_session) {
+      mm_jpeg_session_abort(p_session);
+    } else {
+      CDBG_ERROR("%s:%d] Invalid job id 0x%x", __func__, __LINE__,
+        node->dec_info.job_id);
+    }
+    free(node);
+    goto abort_done;
+  }
+
+abort_done:
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  return rc;
+}
+/** mm_jpegdec_init:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpegdec_init(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+
+  /* init locks */
+  pthread_mutex_init(&my_obj->job_lock, NULL);
+
+  /* init ongoing job queue */
+  rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+
+  /* init job semaphore and launch jobmgr thread */
+  CDBG("%s:%d] Launch jobmgr thread rc %d", __func__, __LINE__, rc);
+  rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+
+  /* load OMX */
+  if (OMX_ErrorNone != OMX_Init()) {
+    /* roll back in error case */
+    CDBG_ERROR("%s:%d] OMX_Init failed (%d)", __func__, __LINE__, rc);
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+  }
+
+  return rc;
+}
+
+/** mm_jpegdec_deinit:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpegdec_deinit(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+
+  /* release jobmgr thread */
+  rc = mm_jpeg_jobmgr_thread_release(my_obj);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  /* unload OMX engine */
+  OMX_Deinit();
+
+  /* deinit ongoing job and cb queue */
+  rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  /* destroy locks */
+  pthread_mutex_destroy(&my_obj->job_lock);
+
+  return rc;
+}
diff --git a/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c
new file mode 100644
index 0000000..08c7d1d
--- /dev/null
+++ b/msm8974/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c
@@ -0,0 +1,304 @@
+/* Copyright (c) 2013-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+static pthread_mutex_t g_dec_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_jpeg_obj* g_jpegdec_obj = NULL;
+
+/** mm_jpeg_intf_start_job:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @job: jpeg job object
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       start the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+  int32_t rc = -1;
+
+  if (NULL == job ||
+    NULL == job_id) {
+    CDBG_ERROR("%s:%d] invalid parameters for job or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+  rc = mm_jpegdec_start_decode_job(g_jpegdec_obj, job, job_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @p_params: encode parameters
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Create new jpeg session
+ *
+ **/
+static int32_t mm_jpegdec_intf_create_session(uint32_t client_hdl,
+    mm_jpeg_decode_params_t *p_params,
+    uint32_t *p_session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+    CDBG_ERROR("%s:%d] invalid client_hdl or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpegdec_create_session(g_jpegdec_obj, client_hdl, p_params, p_session_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ *  Arguments:
+ *    @session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpegdec_intf_destroy_session(uint32_t session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == session_id) {
+    CDBG_ERROR("%s:%d] invalid client_hdl or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpegdec_destroy_session_by_id(g_jpegdec_obj, session_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpegdec_intf_abort_job:
+ *
+ *  Arguments:
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_abort_job(uint32_t job_id)
+{
+  int32_t rc = -1;
+
+  if (0 == job_id) {
+    CDBG_ERROR("%s:%d] invalid jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpegdec_abort_job(g_jpegdec_obj, job_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Close the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_close(uint32_t client_hdl)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl) {
+    CDBG_ERROR("%s:%d] invalid client_hdl", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_close(g_jpegdec_obj, client_hdl);
+  g_jpegdec_obj->num_clients--;
+  if(0 == rc) {
+    if (0 == g_jpegdec_obj->num_clients) {
+      /* No client, close jpeg internally */
+      rc = mm_jpegdec_deinit(g_jpegdec_obj);
+      free(g_jpegdec_obj);
+      g_jpegdec_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+
+
+/** jpegdec_open:
+ *
+ *  Arguments:
+ *    @ops: ops table pointer
+ *
+ *  Return:
+ *       0 failure, success otherwise
+ *
+ *  Description:
+ *       Open a jpeg client
+ *
+ **/
+uint32_t jpegdec_open(mm_jpegdec_ops_t *ops)
+{
+  int32_t rc = 0;
+  uint32_t clnt_hdl = 0;
+  mm_jpeg_obj* jpeg_obj = NULL;
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  /* first time open */
+  if(NULL == g_jpegdec_obj) {
+    jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+    if(NULL == jpeg_obj) {
+      CDBG_ERROR("%s:%d] no mem", __func__, __LINE__);
+      pthread_mutex_unlock(&g_dec_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* initialize jpeg obj */
+    memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+    rc = mm_jpegdec_init(jpeg_obj);
+    if(0 != rc) {
+      CDBG_ERROR("%s:%d] mm_jpeg_init err = %d", __func__, __LINE__, rc);
+      free(jpeg_obj);
+      pthread_mutex_unlock(&g_dec_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* remember in global variable */
+    g_jpegdec_obj = jpeg_obj;
+  }
+
+  /* open new client */
+  clnt_hdl = mm_jpeg_new_client(g_jpegdec_obj);
+  if (clnt_hdl > 0) {
+    /* valid client */
+    if (NULL != ops) {
+      /* fill in ops tbl if ptr not NULL */
+      ops->start_job = mm_jpegdec_intf_start_job;
+      ops->abort_job = mm_jpegdec_intf_abort_job;
+      ops->create_session = mm_jpegdec_intf_create_session;
+      ops->destroy_session = mm_jpegdec_intf_destroy_session;
+      ops->close = mm_jpegdec_intf_close;
+    }
+  } else {
+    /* failed new client */
+    CDBG_ERROR("%s:%d] mm_jpeg_new_client failed", __func__, __LINE__);
+
+    if (0 == g_jpegdec_obj->num_clients) {
+      /* no client, close jpeg */
+      mm_jpegdec_deinit(g_jpegdec_obj);
+      free(g_jpegdec_obj);
+      g_jpegdec_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return clnt_hdl;
+}
+
+
+
diff --git a/msm8974/QCamera2/util/QCameraCmdThread.cpp b/msm8974/QCamera2/util/QCameraCmdThread.cpp
new file mode 100644
index 0000000..c5be4ad
--- /dev/null
+++ b/msm8974/QCamera2/util/QCameraCmdThread.cpp
@@ -0,0 +1,210 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <sys/prctl.h>
+#include "QCameraCmdThread.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraCmdThread
+ *
+ * DESCRIPTION: default constructor of QCameraCmdThread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCmdThread::QCameraCmdThread() :
+    cmd_queue()
+{
+    cmd_pid = 0;
+    cam_sem_init(&sync_sem, 0);
+    cam_sem_init(&cmd_sem, 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCmdThread
+ *
+ * DESCRIPTION: deconstructor of QCameraCmdThread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCmdThread::~QCameraCmdThread()
+{
+    cam_sem_destroy(&sync_sem);
+    cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION   : launch
+ *
+ * DESCRIPTION: launch Cmd Thread
+ *
+ * PARAMETERS :
+ *   @start_routine : thread routine function ptr
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::launch(void *(*start_routine)(void *),
+                                 void* user_data)
+{
+    /* launch the thread */
+    pthread_create(&cmd_pid,
+                   NULL,
+                   start_routine,
+                   user_data);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setName
+ *
+ * DESCRIPTION: name the cmd thread
+ *
+ * PARAMETERS :
+ *   @name : desired name for the thread
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::setName(const char* name)
+{
+    /* name the thread */
+    prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendCmd
+ *
+ * DESCRIPTION: send a command to the Cmd Thread
+ *
+ * PARAMETERS :
+ *   @cmd     : command to be executed.
+ *   @sync_cmd: flag to indicate if this is a synchorinzed cmd. If true, this call
+ *              will wait until signal is set after the command is completed.
+ *   @priority: flag to indicate if this is a cmd with priority. If true, the cmd
+ *              will be enqueued to the head with priority.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::sendCmd(camera_cmd_type_t cmd, uint8_t sync_cmd, uint8_t priority)
+{
+    camera_cmd_t *node = (camera_cmd_t *)malloc(sizeof(camera_cmd_t));
+    if (NULL == node) {
+        ALOGE("%s: No memory for camera_cmd_t", __func__);
+        return NO_MEMORY;
+    }
+    memset(node, 0, sizeof(camera_cmd_t));
+    node->cmd = cmd;
+
+    if (priority) {
+        cmd_queue.enqueueWithPriority((void *)node);
+    } else {
+        cmd_queue.enqueue((void *)node);
+    }
+    cam_sem_post(&cmd_sem);
+
+    /* if is a sync call, need to wait until it returns */
+    if (sync_cmd) {
+        cam_sem_wait(&sync_sem);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCmd
+ *
+ * DESCRIPTION: dequeue a cmommand from cmd queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : cmd dequeued
+ *==========================================================================*/
+camera_cmd_type_t QCameraCmdThread::getCmd()
+{
+    camera_cmd_type_t cmd = CAMERA_CMD_TYPE_NONE;
+    camera_cmd_t *node = (camera_cmd_t *)cmd_queue.dequeue();
+    if (NULL == node) {
+        ALOGD("%s: No notify avail", __func__);
+        return CAMERA_CMD_TYPE_NONE;
+    } else {
+        cmd = node->cmd;
+        free(node);
+    }
+    return cmd;
+}
+
+/*===========================================================================
+ * FUNCTION   : exit
+ *
+ * DESCRIPTION: exit the CMD thread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::exit()
+{
+    int32_t rc = NO_ERROR;
+
+    if (cmd_pid == 0) {
+        return rc;
+    }
+
+    rc = sendCmd(CAMERA_CMD_TYPE_EXIT, 0, 1);
+    if (NO_ERROR != rc) {
+        ALOGE("%s: Error during exit, rc = %d", __func__, rc);
+        return rc;
+    }
+
+    /* wait until cmd thread exits */
+    if (pthread_join(cmd_pid, NULL) != 0) {
+        ALOGD("%s: pthread dead already\n", __func__);
+    }
+    cmd_pid = 0;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/util/QCameraCmdThread.h b/msm8974/QCamera2/util/QCameraCmdThread.h
new file mode 100644
index 0000000..a9511dc
--- /dev/null
+++ b/msm8974/QCamera2/util/QCameraCmdThread.h
@@ -0,0 +1,74 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CMD_THREAD_H__
+#define __QCAMERA_CMD_THREAD_H__
+
+#include <pthread.h>
+#include <cam_semaphore.h>
+
+#include "cam_types.h"
+#include "QCameraQueue.h"
+
+namespace qcamera {
+
+typedef enum
+{
+    CAMERA_CMD_TYPE_NONE,
+    CAMERA_CMD_TYPE_START_DATA_PROC,
+    CAMERA_CMD_TYPE_STOP_DATA_PROC,
+    CAMERA_CMD_TYPE_DO_NEXT_JOB,
+    CAMERA_CMD_TYPE_EXIT,
+    CAMERA_CMD_TYPE_MAX
+} camera_cmd_type_t;
+
+typedef struct {
+    camera_cmd_type_t cmd;
+} camera_cmd_t;
+
+class QCameraCmdThread {
+public:
+    QCameraCmdThread();
+    ~QCameraCmdThread();
+
+    int32_t launch(void *(*start_routine)(void *), void* user_data);
+    int32_t setName(const char* name);
+    int32_t exit();
+    int32_t sendCmd(camera_cmd_type_t cmd, uint8_t sync_cmd, uint8_t priority);
+    camera_cmd_type_t getCmd();
+
+    QCameraQueue cmd_queue;      /* cmd queue */
+    pthread_t cmd_pid;           /* cmd thread ID */
+    cam_semaphore_t cmd_sem;               /* semaphore for cmd thread */
+    cam_semaphore_t sync_sem;              /* semaphore for synchronized call signal */
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CMD_THREAD_H__ */
diff --git a/msm8974/QCamera2/util/QCameraQueue.cpp b/msm8974/QCamera2/util/QCameraQueue.cpp
new file mode 100644
index 0000000..4c6585d
--- /dev/null
+++ b/msm8974/QCamera2/util/QCameraQueue.cpp
@@ -0,0 +1,338 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include "QCameraQueue.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraQueue
+ *
+ * DESCRIPTION: default constructor of QCameraQueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::QCameraQueue()
+{
+    pthread_mutex_init(&m_lock, NULL);
+    cam_list_init(&m_head.list);
+    m_size = 0;
+    m_dataFn = NULL;
+    m_userData = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraQueue
+ *
+ * DESCRIPTION: constructor of QCameraQueue
+ *
+ * PARAMETERS :
+ *   @data_rel_fn : function ptr to release node data internal resource
+ *   @user_data   : user data ptr
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::QCameraQueue(release_data_fn data_rel_fn, void *user_data)
+{
+    pthread_mutex_init(&m_lock, NULL);
+    cam_list_init(&m_head.list);
+    m_size = 0;
+    m_dataFn = data_rel_fn;
+    m_userData = user_data;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraQueue
+ *
+ * DESCRIPTION: deconstructor of QCameraQueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::~QCameraQueue()
+{
+    flush();
+    pthread_mutex_destroy(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : isEmpty
+ *
+ * DESCRIPTION: return if the queue is empty or not
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- queue is empty; false -- not empty
+ *==========================================================================*/
+bool QCameraQueue::isEmpty()
+{
+    bool flag = true;
+    pthread_mutex_lock(&m_lock);
+    if (m_size > 0) {
+        flag = false;
+    }
+    pthread_mutex_unlock(&m_lock);
+    return flag;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueue
+ *
+ * DESCRIPTION: enqueue data into the queue
+ *
+ * PARAMETERS :
+ *   @data    : data to be enqueued
+ *
+ * RETURN     : true -- success; false -- failed
+ *==========================================================================*/
+bool QCameraQueue::enqueue(void *data)
+{
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        ALOGE("%s: No memory for camera_q_node", __func__);
+        return false;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&m_lock);
+    cam_list_add_tail_node(&node->list, &m_head.list);
+    m_size++;
+    pthread_mutex_unlock(&m_lock);
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueueWithPriority
+ *
+ * DESCRIPTION: enqueue data into queue with priority, will insert into the
+ *              head of the queue
+ *
+ * PARAMETERS :
+ *   @data    : data to be enqueued
+ *
+ * RETURN     : true -- success; false -- failed
+ *==========================================================================*/
+bool QCameraQueue::enqueueWithPriority(void *data)
+{
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        ALOGE("%s: No memory for camera_q_node", __func__);
+        return false;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&m_lock);
+    struct cam_list *p_next = m_head.list.next;
+
+    m_head.list.next = &node->list;
+    p_next->prev = &node->list;
+    node->list.next = p_next;
+    node->list.prev = &m_head.list;
+
+    m_size++;
+    pthread_mutex_unlock(&m_lock);
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeue
+ *
+ * DESCRIPTION: dequeue data from the queue
+ *
+ * PARAMETERS :
+ *   @bFromHead : if true, dequeue from the head
+ *                if false, dequeue from the tail
+ *
+ * RETURN     : data ptr. NULL if not any data in the queue.
+ *==========================================================================*/
+void* QCameraQueue::dequeue(bool bFromHead)
+{
+    camera_q_node* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    if (bFromHead) {
+        pos = head->next;
+    } else {
+        pos = head->prev;
+    }
+    if (pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        cam_list_del_node(&node->list);
+        m_size--;
+    }
+    pthread_mutex_unlock(&m_lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: flush all nodes from the queue, queue will be empty after this
+ *              operation.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flush(){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        pos = pos->next;
+        cam_list_del_node(&node->list);
+        m_size--;
+
+        if (NULL != node->data) {
+            if (m_dataFn) {
+                m_dataFn(node->data, m_userData);
+            }
+            free(node->data);
+        }
+        free(node);
+
+    }
+    m_size = 0;
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : flushNodes
+ *
+ * DESCRIPTION: flush only specific nodes, depending on
+ *              the given matching function.
+ *
+ * PARAMETERS :
+ *   @match   : matching function
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flushNodes(match_fn match){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    if ( NULL == match ) {
+        return;
+    }
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        pos = pos->next;
+        if ( match(node->data, m_userData) ) {
+            cam_list_del_node(&node->list);
+            m_size--;
+
+            if (NULL != node->data) {
+                if (m_dataFn) {
+                    m_dataFn(node->data, m_userData);
+                }
+                free(node->data);
+            }
+            free(node);
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : flushNodes
+ *
+ * DESCRIPTION: flush only specific nodes, depending on
+ *              the given matching function.
+ *
+ * PARAMETERS :
+ *   @match   : matching function
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flushNodes(match_fn_data match, void *match_data){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    if ( NULL == match ) {
+        return;
+    }
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        pos = pos->next;
+        if ( match(node->data, m_userData, match_data) ) {
+            cam_list_del_node(&node->list);
+            m_size--;
+
+            if (NULL != node->data) {
+                if (m_dataFn) {
+                    m_dataFn(node->data, m_userData);
+                }
+                free(node->data);
+            }
+            free(node);
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+}; // namespace qcamera
diff --git a/msm8974/QCamera2/util/QCameraQueue.h b/msm8974/QCamera2/util/QCameraQueue.h
new file mode 100644
index 0000000..e1bf70c
--- /dev/null
+++ b/msm8974/QCamera2/util/QCameraQueue.h
@@ -0,0 +1,70 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_QUEUE_H__
+#define __QCAMERA_QUEUE_H__
+
+#include <pthread.h>
+#include "cam_list.h"
+
+namespace qcamera {
+
+typedef bool (*match_fn_data)(void *data, void *user_data, void *match_data);
+typedef void (*release_data_fn)(void* data, void *user_data);
+typedef bool (*match_fn)(void *data, void *user_data);
+
+class QCameraQueue {
+public:
+    QCameraQueue();
+    QCameraQueue(release_data_fn data_rel_fn, void *user_data);
+    virtual ~QCameraQueue();
+    bool enqueue(void *data);
+    bool enqueueWithPriority(void *data);
+    void flush();
+    void flushNodes(match_fn match);
+    void flushNodes(match_fn_data match, void *spec_data);
+    void* dequeue(bool bFromHead = true);
+    bool isEmpty();
+    int getCurrentSize() {return m_size;}
+private:
+    typedef struct {
+        struct cam_list list;
+        void* data;
+    } camera_q_node;
+
+    camera_q_node m_head; // dummy head
+    int m_size;
+    pthread_mutex_t m_lock;
+    release_data_fn m_dataFn;
+    void * m_userData;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_QUEUE_H__ */
diff --git a/msm8974/mm-image-codec/Android.mk b/msm8974/mm-image-codec/Android.mk
new file mode 100644
index 0000000..5053e7d
--- /dev/null
+++ b/msm8974/mm-image-codec/Android.mk
@@ -0,0 +1 @@
+include $(call all-subdir-makefiles)
diff --git a/msm8974/mm-image-codec/qexif/qexif.h b/msm8974/mm-image-codec/qexif/qexif.h
new file mode 100644
index 0000000..91aedde
--- /dev/null
+++ b/msm8974/mm-image-codec/qexif/qexif.h
@@ -0,0 +1,1728 @@
+/*Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+
+#ifndef __QEXIF_H__
+#define __QEXIF_H__
+
+#include <stdio.h>
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+    uint32_t  num;    // Numerator
+    uint32_t  denom;  // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+    int32_t  num;    // Numerator
+    int32_t  denom;  // Denominator
+
+} srat_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+    EXIF_BYTE      = 1,
+    EXIF_ASCII     = 2,
+    EXIF_SHORT     = 3,
+    EXIF_LONG      = 4,
+    EXIF_RATIONAL  = 5,
+    EXIF_UNDEFINED = 7,
+    EXIF_SLONG     = 9,
+    EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+/* Exif Tag Entry
+ * Used in exif_set_tag as an input argument and
+ * in exif_get_tag as an output argument. */
+typedef struct
+{
+    /* The Data Type of the Tag *
+     * Rational, etc */
+    exif_tag_type_t type;
+
+    /* Copy
+     * This field is used when a user pass this structure to
+     * be stored in an exif_info_t via the exif_set_tag method.
+     * The routine would look like this field and decide whether
+     * it is necessary to make a copy of the data pointed by this
+     * structure (all string and array types).
+     * If this field is set to false, only a pointer to the actual
+     * data is retained and it is the caller's responsibility to
+     * ensure the validity of the data before the exif_info_t object
+     * is destroyed.
+     */
+    uint8_t copy;
+
+    /* Data count
+     * This indicates the number of elements of the data. For example, if
+     * the type is EXIF_BYTE and the count is 1, that means the actual data
+     * is one byte and is accessible by data._byte. If the type is EXIF_BYTE
+     * and the count is more than one, the actual data is contained in an
+     * array and is accessible by data._bytes. In case of EXIF_ASCII, it
+     * indicates the string length and in case of EXIF_UNDEFINED, it indicates
+     * the length of the array.
+     */
+    uint32_t count;
+
+    /* Data
+     * A union which covers all possible data types. The user should pick
+     * the right field to use depending on the data type and the count.
+     * See in-line comment below.
+     */
+    union
+    {
+        char      *_ascii;      // EXIF_ASCII (count indicates string length)
+        uint8_t   *_bytes;      // EXIF_BYTE  (count > 1)
+        uint8_t    _byte;       // EXIF_BYTE  (count = 1)
+        uint16_t  *_shorts;     // EXIF_SHORT (count > 1)
+        uint16_t   _short;      // EXIF_SHORT (count = 1)
+        uint32_t  *_longs;      // EXIF_LONG  (count > 1)
+        uint32_t   _long;       // EXIF_LONG  (count = 1)
+        rat_t     *_rats;       // EXIF_RATIONAL  (count > 1)
+        rat_t      _rat;        // EXIF_RATIONAL  (count = 1)
+        uint8_t   *_undefined;  // EXIF_UNDEFINED (count indicates length)
+        int32_t   *_slongs;     // EXIF_SLONG (count > 1)
+        int32_t    _slong;      // EXIF_SLONG (count = 1)
+        srat_t    *_srats;      // EXIF_SRATIONAL (count > 1)
+        srat_t     _srat;       // EXIF_SRATIONAL (count = 1)
+
+    } data;
+
+} exif_tag_entry_t;
+
+/* =======================================================================
+**                          Macro Definitions
+** ======================================================================= */
+/* Enum defined to let compiler generate unique offset numbers for different
+ * tags - ordering matters! NOT INTENDED to be used by any application. */
+typedef enum
+{
+    // GPS IFD
+    GPS_VERSION_ID = 0,
+    GPS_LATITUDE_REF,
+    GPS_LATITUDE,
+    GPS_LONGITUDE_REF,
+    GPS_LONGITUDE,
+    GPS_ALTITUDE_REF,
+    GPS_ALTITUDE,
+    GPS_TIMESTAMP,
+    GPS_SATELLITES,
+    GPS_STATUS,
+    GPS_MEASUREMODE,
+    GPS_DOP,
+    GPS_SPEED_REF,
+    GPS_SPEED,
+    GPS_TRACK_REF,
+    GPS_TRACK,
+    GPS_IMGDIRECTION_REF,
+    GPS_IMGDIRECTION,
+    GPS_MAPDATUM,
+    GPS_DESTLATITUDE_REF,
+    GPS_DESTLATITUDE,
+    GPS_DESTLONGITUDE_REF,
+    GPS_DESTLONGITUDE,
+    GPS_DESTBEARING_REF,
+    GPS_DESTBEARING,
+    GPS_DESTDISTANCE_REF,
+    GPS_DESTDISTANCE,
+    GPS_PROCESSINGMETHOD,
+    GPS_AREAINFORMATION,
+    GPS_DATESTAMP,
+    GPS_DIFFERENTIAL,
+
+    // TIFF IFD
+    NEW_SUBFILE_TYPE,
+    SUBFILE_TYPE,
+    IMAGE_WIDTH,
+    IMAGE_LENGTH,
+    BITS_PER_SAMPLE,
+    COMPRESSION,
+    PHOTOMETRIC_INTERPRETATION,
+    THRESH_HOLDING,
+    CELL_WIDTH,
+    CELL_HEIGHT,
+    FILL_ORDER,
+    DOCUMENT_NAME,
+    IMAGE_DESCRIPTION,
+    MAKE,
+    MODEL,
+    STRIP_OFFSETS,
+    ORIENTATION,
+    SAMPLES_PER_PIXEL,
+    ROWS_PER_STRIP,
+    STRIP_BYTE_COUNTS,
+    MIN_SAMPLE_VALUE,
+    MAX_SAMPLE_VALUE,
+    X_RESOLUTION,
+    Y_RESOLUTION,
+    PLANAR_CONFIGURATION,
+    PAGE_NAME,
+    X_POSITION,
+    Y_POSITION,
+    FREE_OFFSET,
+    FREE_BYTE_COUNTS,
+    GRAY_RESPONSE_UNIT,
+    GRAY_RESPONSE_CURVE,
+    T4_OPTION,
+    T6_OPTION,
+    RESOLUTION_UNIT,
+    PAGE_NUMBER,
+    TRANSFER_FUNCTION,
+    SOFTWARE,
+    DATE_TIME,
+    ARTIST,
+    HOST_COMPUTER,
+    PREDICTOR,
+    WHITE_POINT,
+    PRIMARY_CHROMATICITIES,
+    COLOR_MAP,
+    HALFTONE_HINTS,
+    TILE_WIDTH,
+    TILE_LENGTH,
+    TILE_OFFSET,
+    TILE_BYTE_COUNTS,
+    INK_SET,
+    INK_NAMES,
+    NUMBER_OF_INKS,
+    DOT_RANGE,
+    TARGET_PRINTER,
+    EXTRA_SAMPLES,
+    SAMPLE_FORMAT,
+    TRANSFER_RANGE,
+    JPEG_PROC,
+    JPEG_INTERCHANGE_FORMAT,
+    JPEG_INTERCHANGE_FORMAT_LENGTH,
+    JPEG_RESTART_INTERVAL,
+    JPEG_LOSSLESS_PREDICTORS,
+    JPEG_POINT_TRANSFORMS,
+    JPEG_Q_TABLES,
+    JPEG_DC_TABLES,
+    JPEG_AC_TABLES,
+    YCBCR_COEFFICIENTS,
+    YCBCR_SUB_SAMPLING,
+    YCBCR_POSITIONING,
+    REFERENCE_BLACK_WHITE,
+    GAMMA,
+    ICC_PROFILE_DESCRIPTOR,
+    SRGB_RENDERING_INTENT,
+    IMAGE_TITLE,
+    COPYRIGHT,
+    EXIF_IFD,
+    ICC_PROFILE,
+    GPS_IFD,
+
+
+    // TIFF IFD (Thumbnail)
+    TN_IMAGE_WIDTH,
+    TN_IMAGE_LENGTH,
+    TN_BITS_PER_SAMPLE,
+    TN_COMPRESSION,
+    TN_PHOTOMETRIC_INTERPRETATION,
+    TN_IMAGE_DESCRIPTION,
+    TN_MAKE,
+    TN_MODEL,
+    TN_STRIP_OFFSETS,
+    TN_ORIENTATION,
+    TN_SAMPLES_PER_PIXEL,
+    TN_ROWS_PER_STRIP,
+    TN_STRIP_BYTE_COUNTS,
+    TN_X_RESOLUTION,
+    TN_Y_RESOLUTION,
+    TN_PLANAR_CONFIGURATION,
+    TN_RESOLUTION_UNIT,
+    TN_TRANSFER_FUNCTION,
+    TN_SOFTWARE,
+    TN_DATE_TIME,
+    TN_ARTIST,
+    TN_WHITE_POINT,
+    TN_PRIMARY_CHROMATICITIES,
+    TN_JPEGINTERCHANGE_FORMAT,
+    TN_JPEGINTERCHANGE_FORMAT_L,
+    TN_YCBCR_COEFFICIENTS,
+    TN_YCBCR_SUB_SAMPLING,
+    TN_YCBCR_POSITIONING,
+    TN_REFERENCE_BLACK_WHITE,
+    TN_COPYRIGHT,
+
+    // EXIF IFD
+    EXPOSURE_TIME,
+    F_NUMBER,
+    EXPOSURE_PROGRAM,
+    SPECTRAL_SENSITIVITY,
+    ISO_SPEED_RATING,
+    OECF,
+    EXIF_VERSION,
+    EXIF_DATE_TIME_ORIGINAL,
+    EXIF_DATE_TIME_DIGITIZED,
+    EXIF_COMPONENTS_CONFIG,
+    EXIF_COMPRESSED_BITS_PER_PIXEL,
+    SHUTTER_SPEED,
+    APERTURE,
+    BRIGHTNESS,
+    EXPOSURE_BIAS_VALUE,
+    MAX_APERTURE,
+    SUBJECT_DISTANCE,
+    METERING_MODE,
+    LIGHT_SOURCE,
+    FLASH,
+    FOCAL_LENGTH,
+    SUBJECT_AREA,
+    EXIF_MAKER_NOTE,
+    EXIF_USER_COMMENT,
+    SUBSEC_TIME,
+    SUBSEC_TIME_ORIGINAL,
+    SUBSEC_TIME_DIGITIZED,
+    EXIF_FLASHPIX_VERSION,
+    EXIF_COLOR_SPACE,
+    EXIF_PIXEL_X_DIMENSION,
+    EXIF_PIXEL_Y_DIMENSION,
+    RELATED_SOUND_FILE,
+    INTEROP,
+    FLASH_ENERGY,
+    SPATIAL_FREQ_RESPONSE,
+    FOCAL_PLANE_X_RESOLUTION,
+    FOCAL_PLANE_Y_RESOLUTION,
+    FOCAL_PLANE_RESOLUTION_UNIT,
+    SUBJECT_LOCATION,
+    EXPOSURE_INDEX,
+    SENSING_METHOD,
+    FILE_SOURCE,
+    SCENE_TYPE,
+    CFA_PATTERN,
+    CUSTOM_RENDERED,
+    EXPOSURE_MODE,
+    WHITE_BALANCE,
+    DIGITAL_ZOOM_RATIO,
+    FOCAL_LENGTH_35MM,
+    SCENE_CAPTURE_TYPE,
+    GAIN_CONTROL,
+    CONTRAST,
+    SATURATION,
+    SHARPNESS,
+    DEVICE_SETTINGS_DESCRIPTION,
+    SUBJECT_DISTANCE_RANGE,
+    IMAGE_UID,
+    PIM,
+
+    EXIF_TAG_MAX_OFFSET
+
+} exif_tag_offset_t;
+
+/* Below are the supported Tags (ID and structure for their data) */
+#define CONSTRUCT_TAGID(offset,ID) (offset << 16 | ID)
+
+// GPS tag version
+// Use EXIFTAGTYPE_GPS_VERSION_ID as the exif_tag_type (EXIF_BYTE)
+// Count should be 4
+#define _ID_GPS_VERSION_ID 0x0000
+#define EXIFTAGID_GPS_VERSION_ID \
+  CONSTRUCT_TAGID(GPS_VERSION_ID, _ID_GPS_VERSION_ID)
+#define EXIFTAGTYPE_GPS_VERSION_ID EXIF_BYTE
+// North or South Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LATITUDE_REF 0x0001
+#define EXIFTAGID_GPS_LATITUDE_REF \
+  CONSTRUCT_TAGID(GPS_LATITUDE_REF, _ID_GPS_LATITUDE_REF)
+#define EXIFTAGTYPE_GPS_LATITUDE_REF EXIF_ASCII
+// Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LATITUDE 0x0002
+#define EXIFTAGID_GPS_LATITUDE CONSTRUCT_TAGID(GPS_LATITUDE, _ID_GPS_LATITUDE)
+#define EXIFTAGTYPE_GPS_LATITUDE EXIF_RATIONAL
+// East or West Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LONGITUDE_REF 0x0003
+#define EXIFTAGID_GPS_LONGITUDE_REF \
+  CONSTRUCT_TAGID(GPS_LONGITUDE_REF, _ID_GPS_LONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_LONGITUDE_REF EXIF_ASCII
+// Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LONGITUDE 0x0004
+#define EXIFTAGID_GPS_LONGITUDE \
+  CONSTRUCT_TAGID(GPS_LONGITUDE, _ID_GPS_LONGITUDE)
+#define EXIFTAGTYPE_GPS_LONGITUDE EXIF_RATIONAL
+// Altitude reference
+// Use EXIFTAGTYPE_GPS_ALTITUDE_REF as the exif_tag_type (EXIF_BYTE)
+#define _ID_GPS_ALTITUDE_REF 0x0005
+#define EXIFTAGID_GPS_ALTITUDE_REF \
+  CONSTRUCT_TAGID(GPS_ALTITUDE_REF, _ID_GPS_ALTITUDE_REF)
+#define EXIFTAGTYPE_GPS_ALTITUDE_REF EXIF_BYTE
+// Altitude
+// Use EXIFTAGTYPE_GPS_ALTITUDE as the exif_tag_type (EXIF_RATIONAL)
+#define _ID_GPS_ALTITUDE 0x0006
+#define EXIFTAGID_GPS_ALTITUDE CONSTRUCT_TAGID(GPS_ALTITUDE, _ID_GPS_ALTITUDE)
+#define EXIFTAGTYPE_GPS_ALTITUE EXIF_RATIONAL
+// GPS time (atomic clock)
+// Use EXIFTAGTYPE_GPS_TIMESTAMP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_TIMESTAMP 0x0007
+#define EXIFTAGID_GPS_TIMESTAMP \
+  CONSTRUCT_TAGID(GPS_TIMESTAMP, _ID_GPS_TIMESTAMP)
+#define EXIFTAGTYPE_GPS_TIMESTAMP EXIF_RATIONAL
+// GPS Satellites
+// Use EXIFTAGTYPE_GPS_SATELLITES as the exif_tag_type (EXIF_ASCII)
+// Count can be anything.
+#define _ID_GPS_SATELLITES 0x0008
+#define EXIFTAGID_GPS_SATELLITES \
+ CONSTRUCT_TAGID(GPS_SATELLITES, _ID_GPS_SATELLITES)
+#define EXIFTAGTYPE_GPS_SATELLITES EXIF_ASCII
+// GPS Status
+// Use EXIFTAGTYPE_GPS_STATUS as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "A" - Measurement in progress
+// "V" - Measurement Interoperability
+// Other - Reserved
+#define _ID_GPS_STATUS 0x0009
+#define EXIFTAGID_GPS_STATUS CONSTRUCT_TAGID(GPS_STATUS, _ID_GPS_STATUS)
+#define EXIFTATTYPE_GPS_STATUS EXIF_ASCII
+// GPS Measure Mode
+// Use EXIFTAGTYPE_GPS_MEASUREMODE as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "2" - 2-dimensional measurement
+// "3" - 3-dimensional measurement
+// Other - Reserved
+#define _ID_GPS_MEASUREMODE 0x000a
+#define EXIFTAGID_GPS_MEASUREMODE \
+  CONSTRUCT_TAGID(GPS_MEASUREMODE, _ID_GPS_MEASUREMODE)
+#define EXIFTAGTYPE_GPS_MEASUREMODE EXIF_ASCII
+// GPS Measurement precision (DOP)
+// Use EXIFTAGTYPE_GPS_DOP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DOP 0x000b
+#define EXIFTAGID_GPS_DOP CONSTRUCT_TAGID(GPS_DOP, _ID_GPS_DOP)
+#define EXIFTAGTYPE_GPS_DOP EXIF_RATIONAL
+// Speed Unit
+// Use EXIFTAGTYPE_GPS_SPEED_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_SPEED_REF 0x000c
+#define EXIFTAGID_GPS_SPEED_REF \
+  CONSTRUCT_TAGID(GPS_SPEED_REF, _ID_GPS_SPEED_REF)
+#define EXIFTAGTYPE_GPS_SPEED_REF EXIF_ASCII
+// Speed of GPS receiver
+// Use EXIFTAGTYPE_GPS_SPEED as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_SPEED 0x000d
+#define EXIFTAGID_GPS_SPEED CONSTRUCT_TAGID(GPS_SPEED, _ID_GPS_SPEED)
+#define EXIFTAGTYPE_GPS_SPEED EXIF_RATIONAL
+// Reference of direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_TRACK_REF 0x000e
+#define EXIFTAGID_GPS_TRACK_REF \
+  CONSTRUCT_TAGID(GPS_TRACK_REF, _ID_GPS_TRACK_REF)
+#define EXIFTAGTYPE_GPS_TRACK_REF EXIF_ASCII
+// Direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_TRACK 0x000f
+#define EXIFTAGID_GPS_TRACK CONSTRUCT_TAGID(GPS_TRACK, _ID_GPS_TRACK)
+#define EXIFTAGTYPE_GPS_TRACK EXIF_RATIONAL
+// Reference of direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_IMGDIRECTION_REF 0x0010
+#define EXIFTAGID_GPS_IMGDIRECTION_REF \
+  CONSTRUCT_TAGID(GPS_IMGDIRECTION_REF, _ID_GPS_IMGDIRECTION_REF)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION_REF EXIF_ASCII
+// Direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_IMGDIRECTION 0x0011
+#define EXIFTAGID_GPS_IMGDIRECTION \
+  CONSTRUCT_TAGID(GPS_IMGDIRECTION, _ID_GPS_IMGDIRECTION)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION EXIF_RATIONAL
+// Geodetic survey data used
+// Use EXIFTAGTYPE_GPS_MAPDATUM as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_GPS_MAPDATUM 0x0012
+#define EXIFTAGID_GPS_MAPDATUM CONSTRUCT_TAGID(GPS_MAPDATUM, _ID_GPS_MAPDATUM)
+#define EXIFTAGTYPE_GPS_MAPDATUM EXIF_ASCII
+// Reference for latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "N" - North latitude
+// "S" - South latitude
+// Other - Reserved
+#define _ID_GPS_DESTLATITUDE_REF 0x0013
+#define EXIFTAGID_GPS_DESTLATITUDE_REF \
+  CONSTRUCT_TAGID(GPS_DESTLATITUDE_REF, _ID_GPS_DESTLATITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE_REF EXIF_ASCII
+// Latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLATITUDE 0x0014
+#define EXIFTAGID_GPS_DESTLATITUDE \
+  CONSTRUCT_TAGID(GPS_DESTLATITUDE, _ID_GPS_DESTLATITUDE)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE EXIF_RATIONAL
+// Reference for longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "E" - East longitude
+// "W" - West longitude
+// Other - Reserved
+#define _ID_GPS_DESTLONGITUDE_REF 0x0015
+#define EXIFTAGID_GPS_DESTLONGITUDE_REF \
+  CONSTRUCT_TAGID(GPS_DESTLONGITUDE_REF, _ID_GPS_DESTLONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE_REF EXIF_ASCII
+// Longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLONGITUDE 0x0016
+#define EXIFTAGID_GPS_DESTLONGITUDE CONSTRUCT_TAGID(GPS_DESTLONGITUDE, _ID_GPS_DESTLONGITUDE)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE EXIF_RATIONAL
+// Reference for bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_DESTBEARING_REF 0x0017
+#define EXIFTAGID_GPS_DESTBEARING_REF \
+  CONSTRUCT_TAGID(GPS_DESTBEARING_REF, _ID_GPS_DESTBEARING_REF)
+#define EXIFTAGTYPE_GPS_DESTBEARING_REF EXIF_ASCII
+// Bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTBEARING 0x0018
+#define EXIFTAGID_GPS_DESTBEARING \
+  CONSTRUCT_TAGID(GPS_DESTBEARING, _ID_GPS_DESTBEARING)
+#define EXIFTAGTYPE_GPS_DESTBEARING EXIF_RATIONAL
+// Reference for distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_DESTDISTANCE_REF 0x0019
+#define EXIFTAGID_GPS_DESTDISTANCE_REF \
+  CONSTRUCT_TAGID(GPS_DESTDISTANCE_REF, _ID_GPS_DESTDISTANCE_REF)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE_REF EXIF_ASCII
+// Distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTDISTANCE 0x001a
+#define EXIFTAGID_GPS_DESTDISTANCE \
+  CONSTRUCT_TAGID(GPS_DESTDISTANCE, _ID_GPS_DESTDISTANCE)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE EXIF_RATIONAL
+// Name of GPS processing method
+// Use EXIFTAGTYPE_GPS_PROCESSINGMETHOD as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_PROCESSINGMETHOD 0x001b
+#define EXIFTAGID_GPS_PROCESSINGMETHOD \
+  CONSTRUCT_TAGID(GPS_PROCESSINGMETHOD, _ID_GPS_PROCESSINGMETHOD)
+#define EXIFTAGTYPE_GPS_PROCESSINGMETHOD EXIF_UNDEFINED
+// Name of GPS area
+// Use EXIFTAGTYPE_GPS_AREAINFORMATION as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_AREAINFORMATION 0x001c
+#define EXIFTAGID_GPS_AREAINFORMATION \
+  CONSTRUCT_TAGID(GPS_AREAINFORMATION, _ID_GPS_AREAINFORMATION)
+#define EXIFTAGTYPE_GPS_AREAINFORMATION EXIF_UNDEFINED
+// GPS date
+// Use EXIFTAGTYPE_GPS_DATESTAMP as the exif_tag_type (EXIF_ASCII)
+// It should be 11 characters long including the null-terminating character.
+#define _ID_GPS_DATESTAMP 0x001d
+#define EXIFTAGID_GPS_DATESTAMP \
+  CONSTRUCT_TAGID(GPS_DATESTAMP, _ID_GPS_DATESTAMP)
+#define EXIFTAGTYPE_GPS_DATESTAMP EXIF_ASCII
+// GPS differential correction
+// Use EXIFTAGTYPE_GPS_DIFFERENTIAL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+// 0 - Measurement without differential correction
+// 1 - Differential correction applied
+// Other - Reserved
+#define _ID_GPS_DIFFERENTIAL 0x001e
+#define EXIFTAGID_GPS_DIFFERENTIAL \
+  CONSTRUCT_TAGID(GPS_DIFFERENTIAL, _ID_GPS_DIFFERENTIAL)
+#define EXIFTAGTYPE_GPS_DIFFERENTIAL EXIF_SHORT
+// Image width
+// Use EXIFTAGTYPE_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_IMAGE_WIDTH CONSTRUCT_TAGID(IMAGE_WIDTH, _ID_IMAGE_WIDTH)
+#define EXIFTAGTYPE_IMAGE_WIDTH EXIF_LONG
+// Image height
+// Use EXIFTAGTYPE_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_IMAGE_LENGTH CONSTRUCT_TAGID(IMAGE_LENGTH, _ID_IMAGE_LENGTH)
+#define EXIFTAGTYPE_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component
+// Use EXIFTAGTYPE_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_BITS_PER_SAMPLE \
+  CONSTRUCT_TAGID(BITS_PER_SAMPLE, _ID_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme
+// Use EXIFTAGTYPE_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_COMPRESSION 0x0103
+#define EXIFTAGID_COMPRESSION CONSTRUCT_TAGID(COMPRESSION, _ID_COMPRESSION)
+#define EXIFTAGTYPE_COMPRESSION EXIF_SHORT
+// Pixel composition
+// Use EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_PHOTOMETRIC_INTERPRETATION \
+  CONSTRUCT_TAGID(PHOTOMETRIC_INTERPRETATION, _ID_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+
+// Thresholding
+// Use EXIFTAGTYPE_THRESH_HOLDING as the exif_tag_type (EXIF_SHORT)
+//
+//1 = No dithering or halftoning
+//2 = Ordered dither or halftone
+//3 = Randomized dither
+#define _ID_THRESH_HOLDING 0x0107
+#define EXIFTAGID_THRESH_HOLDING \
+  CONSTRUCT_TAGID(THRESH_HOLDING, _ID_THRESH_HOLDING)
+#define EXIFTAGTYPE_THRESH_HOLDING EXIF_SHORT
+
+// Cell Width
+// Use EXIFTAGTYPE_CELL_WIDTH as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_CELL_WIDTH 0x0108
+#define EXIFTAGID_CELL_WIDTH CONSTRUCT_TAGID(CELL_WIDTH, _ID_CELL_WIDTH)
+#define EXIFTAGTYPE_CELL_WIDTH EXIF_SHORT
+// Cell Height
+// Use EXIFTAGTYPE_CELL_HEIGHT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CELL_HEIGHT 0x0109
+#define EXIFTAGID_CELL_HEIGHT CONSTRUCT_TAGID(CELL_HEIGHT, _ID_CELL_HEIGHT)
+#define EXIFTAGTYPE_CELL_HEIGHT EXIF_SHORT
+// Fill Order
+// Use EXIFTAGTYPE_FILL_ORDER as the exif_tag_type (EXIF_SHORT)
+// 	1 = Normal
+//  2 = Reversed
+#define _ID_FILL_ORDER 0x010A
+#define EXIFTAGID_FILL_ORDER CONSTRUCT_TAGID(FILL_ORDER, _ID_FILL_ORDER)
+#define EXIFTAGTYPE_FILL_ORDER EXIF_SHORT
+
+// DOCUMENT NAME
+// Use EXIFTAGTYPE_DOCUMENT_NAME as the exif_tag_type (EXIF_ASCII)
+//
+#define _ID_DOCUMENT_NAME 0x010D
+#define EXIFTAGID_DOCUMENT_NAME CONSTRUCT_TAGID(DOCUMENT_NAME, _ID_DOCUMENT_NAME)
+#define EXIFTAGTYPE_DOCUMENT_NAME EXIF_ASCII
+
+// Image title
+// Use EXIFTAGTYPE_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_IMAGE_DESCRIPTION \
+  CONSTRUCT_TAGID(IMAGE_DESCRIPTION, _ID_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer
+// Use EXIFTAGTYPE_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MAKE 0x010f
+#define EXIFTAGID_MAKE CONSTRUCT_TAGID(MAKE, _ID_MAKE)
+#define EXIFTAGTYPE_MAKE EXIF_ASCII
+// Image input equipment model
+// Use EXIFTAGTYPE_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MODEL 0x0110
+#define EXIFTAGID_MODEL CONSTRUCT_TAGID(MODEL, _ID_MODEL)
+#define EXIFTAGTYPE_MODEL EXIF_ASCII
+// Image data location
+// Use EXIFTAGTYPE_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_STRIP_OFFSETS \
+  CONSTRUCT_TAGID(STRIP_OFFSETS, _ID_STRIP_OFFSETS)
+#define EXIFTAGTYPE_STRIP_OFFSETS EXIF_LONG
+// Orientation of image
+// Use EXIFTAGTYPE_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_ORIENTATION 0x0112
+#define EXIFTAGID_ORIENTATION CONSTRUCT_TAGID(ORIENTATION, _ID_ORIENTATION)
+#define EXIFTAGTYPE_ORIENTATION EXIF_SHORT
+// Number of components
+// Use EXIFTAGTYPE_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_SAMPLES_PER_PIXEL \
+  CONSTRUCT_TAGID(SAMPLES_PER_PIXEL, _ID_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip
+// Use EXIFTAGTYPE_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_ROWS_PER_STRIP \
+  CONSTRUCT_TAGID(ROWS_PER_STRIP, _ID_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip
+// Use EXIFTAGTYPE_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_STRIP_BYTE_COUNTS \
+  CONSTRUCT_TAGID(STRIP_BYTE_COUNTS, _ID_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_STRIP_BYTE_COUNTS EXIF_LONG
+// MinSampleValue
+// Use EXIFTAGTYPE_MIN_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MIN_SAMPLE_VALUE 0x0118
+#define EXIFTAGID_MIN_SAMPLE_VALUE  \
+  CONSTRUCT_TAGID(MIN_SAMPLE_VALUE, _ID_MIN_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MIN_SAMPLE_VALUE EXIF_SHORT
+// MaxSampleValue
+// Use EXIFTAGTYPE_MAX_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MAX_SAMPLE_VALUE 0x0119
+#define EXIFTAGID_MAX_SAMPLE_VALUE CONSTRUCT_TAGID(MAX_SAMPLE_VALUE, _ID_MAX_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MAX_SAMPLE_VALUE EXIF_SHORT
+
+// Image resolution in width direction
+// Use EXIFTAGTYPE_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_X_RESOLUTION 0x011a
+#define EXIFTAGID_X_RESOLUTION \
+  CONSTRUCT_TAGID(X_RESOLUTION, _ID_X_RESOLUTION)
+#define EXIFTAGTYPE_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction
+// Use EXIFTAGTYPE_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_Y_RESOLUTION 0x011b
+#define EXIFTAGID_Y_RESOLUTION \
+  CONSTRUCT_TAGID(Y_RESOLUTION, _ID_Y_RESOLUTION)
+#define EXIFTAGTYPE_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement
+// Use EXIFTAGTYPE_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_PLANAR_CONFIGURATION \
+  CONSTRUCT_TAGID(PLANAR_CONFIGURATION, _ID_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_PLANAR_CONFIGURATION EXIF_SHORT
+// PageName
+// Use EXIFTAGTYPE_PAGE_NAME as the exif_tag_type (EXIF_ASCII)
+// Count should be 1
+#define _ID_PAGE_NAME 0x011d
+#define EXIFTAGID_PAGE_NAME CONSTRUCT_TAGID(PAGE_NAME, _ID_PAGE_NAME)
+#define EXIFTAGTYPE_PAGE_NAME EXIF_ASCII
+// XPosition
+// Use EXIFTAGTYPE_X_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_X_POSITION 0x011e
+#define EXIFTAGID_X_POSITION CONSTRUCT_TAGID(X_POSITION, _ID_X_POSITION)
+#define EXIFTAGTYPE_X_POSITION EXIF_RATIONAL
+// YPosition
+// Use EXIFTAGTYPE_Y_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_Y_POSITION 0x011f
+#define EXIFTAGID_Y_POSITION CONSTRUCT_TAGID(Y_POSITION, _ID_Y_POSITION)
+#define EXIFTAGTYPE_Y_POSITION EXIF_RATIONAL
+
+// FREE_OFFSET
+// Use EXIFTAGTYPE_FREE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_OFFSET 0x0120
+#define EXIFTAGID_FREE_OFFSET CONSTRUCT_TAGID(FREE_OFFSET, _ID_FREE_OFFSET)
+#define EXIFTAGTYPE_FREE_OFFSET EXIF_LONG
+// FREE_BYTE_COUNTS
+// Use EXIFTAGTYPE_FREE_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_BYTE_COUNTS 0x0121
+#define EXIFTAGID_FREE_BYTE_COUNTS \
+  CONSTRUCT_TAGID(FREE_BYTE_COUNTS, _ID_FREE_BYTE_COUNTS)
+#define EXIFTAGTYPE_FREE_BYTE_COUNTS EXIF_LONG
+
+// GrayResponseUnit
+// Use EXIFTAGTYPE_GRAY_RESPONSE_UNIT as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_UNIT 0x0122
+#define EXIFTAGID_GRAY_RESPONSE_UNIT \
+  CONSTRUCT_TAGID(GRAY_RESPONSE_UNIT, _ID_GRAY_RESPONSE_UNIT)
+#define EXIFTAGTYPE_GRAY_RESPONSE_UNIT EXIF_SHORT
+// GrayResponseCurve
+// Use EXIFTAGTYPE_GRAY_RESPONSE_CURVE  as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_CURVE 0x0123
+#define EXIFTAGID_GRAY_RESPONSE_CURVE \
+  CONSTRUCT_TAGID(GRAY_RESPONSE_CURVE , _ID_GRAY_RESPONSE_CURVE )
+#define EXIFTAGTYPE_GRAY_RESPONSE_CURVE EXIF_SHORT
+
+// T4_OPTION
+// Use EXIFTAGTYPE_T4_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T4_OPTION  0x0124
+#define EXIFTAGID_T4_OPTION CONSTRUCT_TAGID(T4_OPTION, _ID_T4_OPTION)
+#define EXIFTAGTYPE_T4_OPTION EXIF_LONG
+// T6_OPTION
+// Use EXIFTAGTYPE_T6_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T6_OPTION 0x0125
+#define EXIFTAGID_T6_OPTION CONSTRUCT_TAGID(T6_OPTION, _ID_T6_OPTION)
+#define EXIFTAGTYPE_T6_OPTION EXIF_LONG
+
+// Unit of X and Y resolution
+// Use EXIFTAGTYPE_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_RESOLUTION_UNIT 0x0128
+#define EXIFTAGID_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(RESOLUTION_UNIT, _ID_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_RESOLUTION_UNIT EXIF_SHORT
+
+// Page Number
+// Use EXIFTAGTYPE_PAGE_NUMBER  as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PAGE_NUMBER 0x0129
+#define EXIFTAGID_PAGE_NUMBER CONSTRUCT_TAGID(PAGE_NUMBER, _ID_PAGE_NUMBER)
+#define EXIFTAGTYPE_PAGE_NUMBER EXIF_SHORT
+// Transfer function
+// Use EXIFTAGTYPE_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TRANSFER_FUNCTION \
+  CONSTRUCT_TAGID(TRANSFER_FUNCTION, _ID_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TRANSFER_FUNCTION EXIF_SHORT
+// Software used
+// Use EXIFTAGTYPE_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SOFTWARE 0x0131
+#define EXIFTAGID_SOFTWARE CONSTRUCT_TAGID(SOFTWARE, _ID_SOFTWARE)
+#define EXIFTAGTYPE_SOFTWARE EXIF_ASCII
+// File change date and time
+// Use EXIFTAGTYPE_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_DATE_TIME 0x0132
+#define EXIFTAGID_DATE_TIME CONSTRUCT_TAGID(DATE_TIME, _ID_DATE_TIME)
+#define EXIFTAGTYPE_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image
+// Use EXIFTAGTYPE_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_ARTIST 0x013b
+#define EXIFTAGID_ARTIST CONSTRUCT_TAGID(ARTIST, _ID_ARTIST)
+#define EXIFTAGTYPE_ARTIST EXIF_ASCII
+// Host Computer Name
+// Use EXIFTAGTYPE_HOST_COMPUTER as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_HOST_COMPUTER 0x013c
+#define EXIFTAGID_HOST_COMPUTER \
+  CONSTRUCT_TAGID(HOST_COMPUTER , _ID_HOST_COMPUTER )
+#define EXIFTAGTYPE_HOST_COMPUTER EXIF_ASCII
+// Predictor
+// Use EXIFTAGTYPE_PREDICTOR as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_PREDICTOR 0x013d
+#define EXIFTAGID_PREDICTOR CONSTRUCT_TAGID(PREDICTOR , _ID_PREDICTOR )
+#define EXIFTAGTYPE_PREDICTOR EXIF_SHORT
+// White point chromaticity
+// Use EXIFTAGTYPE_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_WHITE_POINT 0x013e
+#define EXIFTAGID_WHITE_POINT CONSTRUCT_TAGID(WHITE_POINT, _ID_WHITE_POINT)
+#define EXIFTAGTYPE_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries
+// Use EXIFTAGTYPE_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_PRIMARY_CHROMATICITIES \
+  CONSTRUCT_TAGID(PRIMARY_CHROMATICITIES, _ID_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+
+// COLOR_MAP
+// Use EXIFTAGTYPE_COLOR_MAP as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_COLOR_MAP 0x0140
+#define EXIFTAGID_COLOR_MAP CONSTRUCT_TAGID(COLOR_MAP, _ID_COLOR_MAP)
+#define EXIFTAGTYPE_COLOR_MAP EXIF_SHORT
+// HALFTONE_HINTS
+// Use EXIFTAGTYPE_HALFTONE_HINTS as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_HALFTONE_HINTS 0x0141
+#define EXIFTAGID_HALFTONE_HINTS \
+  CONSTRUCT_TAGID(HALFTONE_HINTS, _ID_HALFTONE_HINTS)
+#define EXIFTAGTYPE_HALFTONE_HINTS EXIF_SHORT
+
+// TILE_WIDTH
+// Use EXIFTAGTYPE_TILE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_WIDTH 0x0142
+#define EXIFTAGID_TILE_WIDTH CONSTRUCT_TAGID(TILE_WIDTH, _ID_TILE_WIDTH)
+#define EXIFTAGTYPE_TILE_WIDTH EXIF_LONG
+// TILE_LENGTH
+// Use EXIFTAGTYPE_TILE_LENGTH  as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_LENGTH 0x0143
+#define EXIFTAGID_TILE_LENGTH CONSTRUCT_TAGID(TILE_LENGTH , _ID_TILE_LENGTH )
+#define EXIFTAGTYPE_TILE_LENGTH EXIF_LONG
+// TILE_OFFSET
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_OFFSET 0x0144
+#define EXIFTAGID_TILE_OFFSET CONSTRUCT_TAGID(TILE_OFFSET , _ID_TILE_OFFSET )
+#define EXIFTAGTYPE_TILE_OFFSET EXIF_LONG
+// tile Byte Counts
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_BYTE_COUNTS 0x0145
+#define EXIFTAGID_TILE_BYTE_COUNTS  \
+  CONSTRUCT_TAGID(TILE_BYTE_COUNTS  , _ID_TILE_BYTE_COUNTS  )
+#define EXIFTAGTYPE_TILE_BYTE_COUNTS EXIF_LONG
+
+// INK_SET
+// Use EXIFTAGTYPE_TILE_LENGTH  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_INK_SET 0x014c
+#define EXIFTAGID_INK_SET CONSTRUCT_TAGID(INK_SET , _ID_INK_SET )
+#define EXIFTAGTYPE_INK_SET EXIF_SHORT
+// INK_NAMES
+// Use EXIFTAGTYPE_INK_NAMES  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_INK_NAMES 0x014D
+#define EXIFTAGID_INK_NAMES CONSTRUCT_TAGID(INK_NAMES , _ID_INK_NAMES)
+#define EXIFTAGTYPE_INK_NAMES EXIF_ASCII
+// NUMBER_OF_INKS
+// Use EXIFTAGTYPE_NUMBER_OF_INKS  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_NUMBER_OF_INKS 0x014e
+#define EXIFTAGID_NUMBER_OF_INKS \
+  CONSTRUCT_TAGID(NUMBER_OF_INKS , _ID_NUMBER_OF_INKS )
+#define EXIFTAGTYPE_NUMBER_OF_INKS EXIF_SHORT
+
+// DOT_RANGE
+// Use EXIFTAGTYPE_DOT_RANGE  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_DOT_RANGE 0x0150
+#define EXIFTAGID_DOT_RANGE CONSTRUCT_TAGID(DOT_RANGE , _ID_DOT_RANGE )
+#define EXIFTAGTYPE_DOT_RANGE EXIF_ASCII
+
+// TARGET_PRINTER
+// Use EXIFTAGTYPE_TARGET_PRINTER  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_TARGET_PRINTER 0x0151
+#define EXIFTAGID_TARGET_PRINTER \
+  CONSTRUCT_TAGID(TARGET_PRINTER , _ID_TARGET_PRINTER)
+#define EXIFTAGTYPE_TARGET_PRINTER EXIF_ASCII
+// EXTRA_SAMPLES
+// Use EXIFTAGTYPE_EXTRA_SAMPLES as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_EXTRA_SAMPLES 0x0152
+#define EXIFTAGID_EXTRA_SAMPLES \
+  CONSTRUCT_TAGID(EXTRA_SAMPLES , _ID_EXTRA_SAMPLES )
+#define EXIFTAGTYPE_EXTRA_SAMPLES EXIF_SHORT
+
+// SAMPLE_FORMAT
+// Use EXIFTAGTYPE_SAMPLE_FORMAT  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_SAMPLE_FORMAT 0x0153
+#define EXIFTAGID_SAMPLE_FORMAT \
+  CONSTRUCT_TAGID(SAMPLE_FORMAT , _ID_SAMPLE_FORMAT )
+#define EXIFTAGTYPE_SAMPLE_FORMAT EXIF_SHORT
+
+// Table of values that extends the range of the transfer function.
+// Use EXIFTAGTYPE_TRANSFER_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_TRANSFER_RANGE 0x0156
+#define EXIFTAGID_TRANSFER_RANGE \
+  CONSTRUCT_TAGID(TRANSFER_RANGE , _ID_TRANSFER_RANGE )
+#define EXIFTAGTYPE_TRANSFER_RANGE EXIF_SHORT
+
+// JPEG compression process.
+// Use EXIFTAGTYPE_JPEG_PROC as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_JPEG_PROC 0x0200
+#define EXIFTAGID_JPEG_PROC CONSTRUCT_TAGID(JPEG_PROC , _ID_JPEG_PROC )
+#define EXIFTAGTYPE_JPEG_PROC EXIF_SHORT
+
+
+// Offset to JPEG SOI
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT \
+  CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT, _ID_JPEG_INTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT_LENGTH 0x0202
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT_LENGTH \
+  CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT_LENGTH, \
+  _ID_JPEG_INTERCHANGE_FORMAT_LENGTH)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH EXIF_LONG
+
+// Length of the restart interval.
+// Use EXIFTAGTYPE_JPEG_RESTART_INTERVAL as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_RESTART_INTERVAL 0x0203
+#define EXIFTAGID_JPEG_RESTART_INTERVAL \
+  CONSTRUCT_TAGID(JPEG_RESTART_INTERVAL, _ID_JPEG_RESTART_INTERVAL)
+#define EXIFTAGTYPE_JPEG_RESTART_INTERVAL EXIF_SHORT
+
+// JPEGLosslessPredictors
+// Use EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_LOSSLESS_PREDICTORS 0x0205
+#define EXIFTAGID_JPEG_LOSSLESS_PREDICTORS  \
+  CONSTRUCT_TAGID(JPEG_LOSSLESS_PREDICTORS, _ID_JPEG_LOSSLESS_PREDICTORS)
+#define EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS EXIF_SHORT
+
+// JPEGPointTransforms
+// Use EXIFTAGTYPE_JPEG_POINT_TRANSFORMS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_POINT_TRANSFORMS 0x0206
+#define EXIFTAGID_JPEG_POINT_TRANSFORMS  \
+  CONSTRUCT_TAGID(JPEG_POINT_TRANSFORMS, _ID_JPEG_POINT_TRANSFORMS)
+#define EXIFTAGTYPE_JPEG_POINT_TRANSFORMS EXIF_SHORT
+
+// JPEG_Q_TABLES
+// Use EXIFTAGTYPE_JPEG_Q_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_Q_TABLES 0x0207
+#define EXIFTAGID_JPEG_Q_TABLES \
+  CONSTRUCT_TAGID(JPEG_Q_TABLES, _ID_JPEG_Q_TABLES)
+#define EXIFTAGTYPE_JPEG_Q_TABLES EXIF_LONG
+// JPEG_DC_TABLES
+// Use EXIFTAGTYPE_JPEG_DC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_DC_TABLES 0x0208
+#define EXIFTAGID_JPEG_DC_TABLES \
+  CONSTRUCT_TAGID(JPEG_DC_TABLES, _ID_JPEG_DC_TABLES)
+#define EXIFTAGTYPE_JPEG_DC_TABLES EXIF_LONG
+// JPEG_AC_TABLES
+// Use EXIFTAGTYPE_JPEG_AC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_AC_TABLES 0x0209
+#define EXIFTAGID_JPEG_AC_TABLES \
+  CONSTRUCT_TAGID(JPEG_AC_TABLES, _ID_JPEG_AC_TABLES)
+#define EXIFTAGTYPE_JPEG_AC_TABLES EXIF_LONG
+
+// Color space transformation matrix coefficients
+// Use EXIFTAGTYPE_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_YCBCR_COEFFICIENTS \
+  CONSTRUCT_TAGID(YCBCR_COEFFICIENTS, _ID_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C
+// Use EXIFTAGTYPE_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_YCBCR_SUB_SAMPLING  \
+  CONSTRUCT_TAGID(YCBCR_SUB_SAMPLING, _ID_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning
+// Use EXIFTAGTYPE_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_YCBCR_POSITIONING  \
+  CONSTRUCT_TAGID(YCBCR_POSITIONING, _ID_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_YCBCR_POSITIONING EXIF_SHORT
+// Pair of black and white reference values
+// Use EXIFTAGTYPE_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_REFERENCE_BLACK_WHITE \
+  CONSTRUCT_TAGID(REFERENCE_BLACK_WHITE, _ID_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// GAMMA
+// Use EXIFTAGTYPE_GAMMA as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_GAMMA 0x0301
+#define EXIFTAGID_GAMMA CONSTRUCT_TAGID(GAMMA, _ID_GAMMA)
+#define EXIFTAGTYPE_GAMMA EXIF_RATIONAL
+// Null-terminated character string that identifies an ICC profile.
+// Use EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_ICC_PROFILE_DESCRIPTOR 0x0302
+#define EXIFTAGID_ICC_PROFILE_DESCRIPTOR \
+  CONSTRUCT_TAGID(ICC_PROFILE_DESCRIPTOR, _ID_ICC_PROFILE_DESCRIPTOR)
+#define EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR EXIF_ASCII
+// SRGB_RENDERING_INTENT
+// Use EXIFTAGTYPE_SRGB_RENDERING_INTENT as the exif_tag_type (EXIF_BYTE)
+// Count should be 6
+#define _ID_SRGB_RENDERING_INTENT 0x0303
+#define EXIFTAGID_SRGB_RENDERING_INTENT \
+  CONSTRUCT_TAGID(SRGB_RENDERING_INTENT, _ID_SRGB_RENDERING_INTENT)
+#define EXIFTAGTYPE_SRGB_RENDERING_INTENT EXIF_BYTE
+
+// Null-terminated character string that specifies the title of the image.
+// Use EXIFTAGTYPE_IMAGE_TITLE as the exif_tag_type (EXIF_ASCII		)
+//
+#define _ID_IMAGE_TITLE 0x0320
+#define EXIFTAGID_IMAGE_TITLE CONSTRUCT_TAGID(IMAGE_TITLE, _ID_IMAGE_TITLE)
+#define EXIFTAGTYPE_IMAGE_TITLE EXIF_ASCII
+
+// Copyright holder
+// Use EXIFTAGTYPE_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_COPYRIGHT 0x8298
+#define EXIFTAGID_COPYRIGHT CONSTRUCT_TAGID(COPYRIGHT, _ID_COPYRIGHT)
+#define EXIFTAGTYPE_COPYRIGHT EXIF_ASCII
+// Old Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_NEW_SUBFILE_TYPE 0x00fe
+#define EXIFTAGID_NEW_SUBFILE_TYPE \
+  CONSTRUCT_TAGID(NEW_SUBFILE_TYPE, _ID_NEW_SUBFILE_TYPE)
+#define EXIFTAGTYPE_NEW_SUBFILE_TYPE EXIF_SHORT
+
+// New Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_LONG)
+// Count can be any
+#define _ID_SUBFILE_TYPE 0x00ff
+#define EXIFTAGID_SUBFILE_TYPE CONSTRUCT_TAGID(SUBFILE_TYPE, _ID_SUBFILE_TYPE)
+#define EXIFTAGTYPE_SUBFILE_TYPE EXIF_LONG
+
+// Image width (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_TN_IMAGE_WIDTH \
+  CONSTRUCT_TAGID(TN_IMAGE_WIDTH, _ID_TN_IMAGE_WIDTH)
+#define EXIFTAGTYPE_TN_IMAGE_WIDTH EXIF_LONG
+// Image height (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_TN_IMAGE_LENGTH \
+  CONSTRUCT_TAGID(TN_IMAGE_LENGTH, _ID_TN_IMAGE_LENGTH)
+#define EXIFTAGTYPE_TN_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component (of thumbnail)
+// Use EXIFTAGTYPE_TN_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_TN_BITS_PER_SAMPLE \
+  CONSTRUCT_TAGID(TN_BITS_PER_SAMPLE, _ID_TN_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_TN_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme (of thumbnail)
+// Use EXIFTAGTYPE_TN_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_COMPRESSION 0x0103
+#define EXIFTAGID_TN_COMPRESSION \
+  CONSTRUCT_TAGID(TN_COMPRESSION, _ID_TN_COMPRESSION)
+#define EXIFTAGTYPE_TN_COMPRESSION EXIF_SHORT
+// Pixel composition (of thumbnail)
+// Use EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION as the
+// exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_TN_PHOTOMETRIC_INTERPRETATION \
+  CONSTRUCT_TAGID(TN_PHOTOMETRIC_INTERPRETATION, \
+  _ID_TN_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+// Image title (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_TN_IMAGE_DESCRIPTION \
+  CONSTRUCT_TAGID(TN_IMAGE_DESCRIPTION, _ID_TN_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_TN_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer (of thumbnail)
+// Use EXIFTAGTYPE_TN_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MAKE 0x010f
+#define EXIFTAGID_TN_MAKE CONSTRUCT_TAGID(TN_MAKE, _ID_TN_MAKE)
+#define EXIFTAGTYPE_TN_MAKE EXIF_ASCII
+// Image input equipment model (of thumbnail)
+// Use EXIFTAGTYPE_TN_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MODEL 0x0110
+#define EXIFTAGID_TN_MODEL CONSTRUCT_TAGID(TN_MODEL, _ID_TN_MODEL)
+#define EXIFTAGTYPE_TN_MODEL EXIF_ASCII
+// Image data location (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_TN_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_TN_STRIP_OFFSETS \
+  CONSTRUCT_TAGID(STRIP_TN_OFFSETS, _ID_TN_STRIP_OFFSETS)
+#define EXIFTAGTYPE_TN_STRIP_OFFSETS EXIF_LONG
+// Orientation of image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_ORIENTATION 0x0112
+#define EXIFTAGID_TN_ORIENTATION \
+  CONSTRUCT_TAGID(TN_ORIENTATION, _ID_TN_ORIENTATION)
+#define EXIFTAGTYPE_TN_ORIENTATION EXIF_SHORT
+// Number of components (of thumbnail)
+// Use EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_TN_SAMPLES_PER_PIXEL \
+  CONSTRUCT_TAGID(TN_SAMPLES_PER_PIXEL, _ID_TN_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_TN_ROWS_PER_STRIP \
+  CONSTRUCT_TAGID(TN_ROWS_PER_STRIP, _ID_TN_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_TN_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_TN_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_TN_STRIP_BYTE_COUNTS \
+  CONSTRUCT_TAGID(TN_STRIP_BYTE_COUNTS, _ID_TN_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS EXIF_LONG
+// Image resolution in width direction (of thumbnail)
+// Use EXIFTAGTYPE_TN_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_X_RESOLUTION 0x011a
+#define EXIFTAGID_TN_X_RESOLUTION \
+  CONSTRUCT_TAGID(TN_X_RESOLUTION, _ID_TN_X_RESOLUTION)
+#define EXIFTAGTYPE_TN_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction  (of thumbnail)
+// Use EXIFTAGTYPE_TN_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_Y_RESOLUTION 0x011b
+#define EXIFTAGID_TN_Y_RESOLUTION \
+  CONSTRUCT_TAGID(TN_Y_RESOLUTION, _ID_TN_Y_RESOLUTION)
+#define EXIFTAGTYPE_TN_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement (of thumbnail)
+// Use EXIFTAGTYPE_TN_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_TN_PLANAR_CONFIGURATION \
+  CONSTRUCT_TAGID(TN_PLANAR_CONFIGURATION, _ID_TN_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_TN_PLANAR_CONFIGURATION EXIF_SHORT
+// Unit of X and Y resolution (of thumbnail)
+// Use EXIFTAGTYPE_TN_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_RESOLUTION_UNIT 0x128
+#define EXIFTAGID_TN_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(TN_RESOLUTION_UNIT, _ID_TN_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_TN_RESOLUTION_UNIT EXIF_SHORT
+// Transfer function (of thumbnail)
+// Use EXIFTAGTYPE_TN_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TN_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TN_TRANSFER_FUNCTION \
+  CONSTRUCT_TAGID(TN_TRANSFER_FUNCTION, _ID_TN_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TN_TRANSFER_FUNCTION EXIF_SHORT
+// Software used (of thumbnail)
+// Use EXIFTAGTYPE_TN_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_SOFTWARE 0x0131
+#define EXIFTAGID_TN_SOFTWARE CONSTRUCT_TAGID(TN_SOFTWARE, _ID_TN_SOFTWARE)
+#define EXIFTAGTYPE_TN_SOFTWARE EXIF_ASCII
+// File change date and time (of thumbnail)
+// Use EXIFTAGTYPE_TN_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_TN_DATE_TIME 0x0132
+#define EXIFTAGID_TN_DATE_TIME CONSTRUCT_TAGID(TN_DATE_TIME, _ID_TN_DATE_TIME)
+#define EXIFTAGTYPE_TN_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_ARTIST 0x013b
+#define EXIFTAGID_TN_ARTIST CONSTRUCT_TAGID(TN_ARTIST, _ID_TN_ARTIST)
+#define EXIFTAGTYPE_TN_ARTIST EXIF_ASCII
+// White point chromaticity (of thumbnail)
+// Use EXIFTAGTYPE_TN_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_TN_WHITE_POINT 0x013e
+#define EXIFTAGID_TN_WHITE_POINT \
+  CONSTRUCT_TAGID(TN_WHITE_POINT, _ID_TN_WHITE_POINT)
+#define EXIFTAGTYPE_TN_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries (of thumbnail)
+// Use EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_TN_PRIMARY_CHROMATICITIES \
+  CONSTRUCT_TAGID(TN_PRIMARY_CHROMATICITIES, _ID_TN_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+// Offset to JPEG SOI (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT \
+  CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT, _ID_TN_JPEGINTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT_L 0x0202
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT_L \
+  CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT_L, _ID_TN_JPEGINTERCHANGE_FORMAT_L)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L EXIF_LONG
+// Color space transformation matrix coefficients (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_TN_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_TN_YCBCR_COEFFICIENTS \
+  CONSTRUCT_TAGID(TN_YCBCR_COEFFICIENTS, _ID_TN_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_TN_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_TN_YCBCR_SUB_SAMPLING \
+  CONSTRUCT_TAGID(TN_YCBCR_SUB_SAMPLING, _ID_TN_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_TN_YCBCR_POSITIONING \
+  CONSTRUCT_TAGID(TN_YCBCR_POSITIONING, _ID_TN_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_TN_YCBCR_POSITIONING    EXIF_SHORT
+// Pair of black and white reference values (of thumbnail)
+// Use EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_TN_REFERENCE_BLACK_WHITE \
+  CONSTRUCT_TAGID(TN_REFERENCE_BLACK_WHITE, _ID_TN_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// Copyright holder (of thumbnail)
+// Use EXIFTAGTYPE_TN_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_COPYRIGHT 0x8298
+#define EXIFTAGID_TN_COPYRIGHT CONSTRUCT_TAGID(TN_COPYRIGHT, _ID_TN_COPYRIGHT)
+#define EXIFTAGTYPE_TN_COPYRIGHT EXIF_ASCII
+// Exposure time
+// Use EXIFTAGTYPE_EXPOSURE_TIME as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_TIME 0x829a
+#define EXIFTAGID_EXPOSURE_TIME \
+  CONSTRUCT_TAGID(EXPOSURE_TIME, _ID_EXPOSURE_TIME)
+#define EXIFTAGTYPE_EXPOSURE_TIME EXIF_RATIONAL
+// F number
+// Use EXIFTAGTYPE_F_NUMBER as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_F_NUMBER 0x829d
+#define EXIFTAGID_F_NUMBER \
+  CONSTRUCT_TAGID(F_NUMBER, _ID_F_NUMBER)
+#define EXIFTAGTYPE_F_NUMBER EXIF_RATIONAL
+// Exif IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_EXIF_IFD_PTR 0x8769
+#define EXIFTAGID_EXIF_IFD_PTR \
+  CONSTRUCT_TAGID(EXIF_IFD, _ID_EXIF_IFD_PTR)
+#define EXIFTAGTYPE_EXIF_IFD_PTR EXIF_LONG
+
+// ICC_PROFILE (NOT INTENDED to be accessible to user)
+#define _ID_ICC_PROFILE 0x8773
+#define EXIFTAGID_ICC_PROFILE CONSTRUCT_TAGID(ICC_PROFILE, _ID_ICC_PROFILE)
+#define EXIFTAGTYPE_ICC_PROFILE EXIF_LONG
+// Exposure program
+// Use EXIFTAGTYPE_EXPOSURE_PROGRAM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_PROGRAM 0x8822
+#define EXIFTAGID_EXPOSURE_PROGRAM \
+  CONSTRUCT_TAGID(EXPOSURE_PROGRAM, _ID_EXPOSURE_PROGRAM)
+#define EXIFTAGTYPE_EXPOSURE_PROGRAM EXIF_SHORT
+// Spectral sensitivity
+// Use EXIFTAGTYPE_SPECTRAL_SENSITIVITY as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SPECTRAL_SENSITIVITY 0x8824
+#define EXIFTAGID_SPECTRAL_SENSITIVITY \
+  CONSTRUCT_TAGID(SPECTRAL_SENSITIVITY, _ID_SPECTRAL_SENSITIVITY)
+#define EXIFTAGTYPE_SPECTRAL_SENSITIVITY EXIF_ASCII
+// GPS IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_GPS_IFD_PTR 0x8825
+#define EXIFTAGID_GPS_IFD_PTR \
+  CONSTRUCT_TAGID(GPS_IFD, _ID_GPS_IFD_PTR)
+#define EXIFTAGTYPE_GPS_IFD_PTR EXIF_LONG
+// ISO Speed Rating
+// Use EXIFTAGTYPE_ISO_SPEED_RATING as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_ISO_SPEED_RATING 0x8827
+#define EXIFTAGID_ISO_SPEED_RATING \
+  CONSTRUCT_TAGID(ISO_SPEED_RATING, _ID_ISO_SPEED_RATING)
+#define EXIFTAGTYPE_ISO_SPEED_RATING EXIF_SHORT
+// Optoelectric conversion factor
+// Use EXIFTAGTYPE_OECF as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_OECF 0x8828
+#define EXIFTAGID_OECF CONSTRUCT_TAGID(OECF, _ID_OECF)
+#define EXIFTAGTYPE_OECF EXIF_UNDEFINED
+// Exif version
+// Use EXIFTAGTYPE_EXIF_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_VERSION 0x9000
+#define EXIFTAGID_EXIF_VERSION \
+  CONSTRUCT_TAGID(EXIF_VERSION, _ID_EXIF_VERSION)
+#define EXIFTAGTYPE_EXIF_VERSION EXIF_UNDEFINED
+// Date and time of original data gerneration
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_ORIGINAL 0x9003
+#define EXIFTAGID_EXIF_DATE_TIME_ORIGINAL \
+  CONSTRUCT_TAGID(EXIF_DATE_TIME_ORIGINAL, _ID_EXIF_DATE_TIME_ORIGINAL)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL EXIF_ASCII
+// Date and time of digital data generation
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_DIGITIZED 0x9004
+#define EXIFTAGID_EXIF_DATE_TIME_DIGITIZED \
+  CONSTRUCT_TAGID(EXIF_DATE_TIME_DIGITIZED, _ID_EXIF_DATE_TIME_DIGITIZED)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED EXIF_ASCII
+// Meaning of each component
+// Use EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_COMPONENTS_CONFIG 0x9101
+#define EXIFTAGID_EXIF_COMPONENTS_CONFIG \
+  CONSTRUCT_TAGID(EXIF_COMPONENTS_CONFIG, _ID_EXIF_COMPONENTS_CONFIG)
+#define EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG EXIF_UNDEFINED
+// Meaning of Image compression mode
+// Use EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXIF_COMPRESSED_BITS_PER_PIXEL 0x9102
+#define EXIFTAGID_EXIF_COMPRESSED_BITS_PER_PIXEL \
+  CONSTRUCT_TAGID(EXIF_COMPRESSED_BITS_PER_PIXEL, _ID_EXIF_COMPRESSED_BITS_PER_PIXEL)
+#define EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL EXIF_RATIONAL
+// Shutter speed
+// Use EXIFTAGTYPE_SHUTTER_SPEED as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_SHUTTER_SPEED 0x9201
+#define EXIFTAGID_SHUTTER_SPEED \
+  CONSTRUCT_TAGID(SHUTTER_SPEED, _ID_SHUTTER_SPEED)
+#define EXIFTAGTYPE_SHUTTER_SPEED EXIF_SRATIONAL
+// Aperture
+// Use EXIFTAGTYPE_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_APERTURE 0x9202
+#define EXIFTAGID_APERTURE CONSTRUCT_TAGID(APERTURE, _ID_APERTURE)
+#define EXIFTAGTYPE_APERTURE EXIF_RATIONAL
+// Brigthness
+// Use EXIFTAGTYPE_BRIGHTNESS as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_BRIGHTNESS 0x9203
+#define EXIFTAGID_BRIGHTNESS CONSTRUCT_TAGID(BRIGHTNESS, _ID_BRIGHTNESS)
+#define EXIFTAGTYPE_BRIGHTNESS EXIF_SRATIONAL
+// Exposure bias
+// Use EXIFTAGTYPE_EXPOSURE_BIAS_VALUE as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_BIAS_VALUE 0x9204
+#define EXIFTAGID_EXPOSURE_BIAS_VALUE \
+  CONSTRUCT_TAGID(EXPOSURE_BIAS_VALUE, _ID_EXPOSURE_BIAS_VALUE)
+#define EXIFTAGTYPE_EXPOSURE_BIAS_VALUE EXIF_SRATIONAL
+// Maximum lens aperture
+// Use EXIFTAGTYPE_MAX_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_MAX_APERTURE 0x9205
+#define EXIFTAGID_MAX_APERTURE CONSTRUCT_TAGID(MAX_APERTURE, _ID_MAX_APERTURE)
+#define EXIFTAGTYPE_MAX_APERTURE EXIF_RATIONAL
+// Subject distance
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE 0x9206
+#define EXIFTAGID_SUBJECT_DISTANCE \
+  CONSTRUCT_TAGID(SUBJECT_DISTANCE, _ID_SUBJECT_DISTANCE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE EXIF_RATIONAL
+// Metering mode
+// Use EXIFTAGTYPE_METERING_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_METERING_MODE 0x9207
+#define EXIFTAGID_METERING_MODE \
+  CONSTRUCT_TAGID(METERING_MODE, _ID_METERING_MODE)
+#define EXIFTAGTYPE_METERING_MODE EXIF_SHORT
+// Light source
+// Use EXIFTAGTYPE_LIGHT_SOURCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_LIGHT_SOURCE 0x9208
+#define EXIFTAGID_LIGHT_SOURCE CONSTRUCT_TAGID(LIGHT_SOURCE, _ID_LIGHT_SOURCE)
+#define EXIFTAGTYPE_LIGHT_SOURCE EXIF_SHORT
+// Flash
+// Use EXIFTAGTYPE_FLASH as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FLASH 0x9209
+#define EXIFTAGID_FLASH CONSTRUCT_TAGID(FLASH, _ID_FLASH)
+#define EXIFTAGTYPE_FLASH EXIF_SHORT
+// Lens focal length
+// Use EXIFTAGTYPE_FOCAL_LENGTH as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_LENGTH 0x920a
+#define EXIFTAGID_FOCAL_LENGTH CONSTRUCT_TAGID(FOCAL_LENGTH, _ID_FOCAL_LENGTH)
+#define EXIFTAGTYPE_FOCAL_LENGTH EXIF_RATIONAL
+// Subject area
+// Use EXIFTAGTYPE_SUBJECT_AREA as exif_tag_type (EXIF_SHORT)
+// Count should be 2 or 3 or 4
+#define _ID_SUBJECT_AREA 0x9214
+#define EXIFTAGID_SUBJECT_AREA CONSTRUCT_TAGID(SUBJECT_AREA, _ID_SUBJECT_AREA)
+#define EXIFTAGTYPE_SUBJECT_AREA EXIF_SHORT
+// Maker note
+// Use EXIFTAGTYPE_EXIF_MAKER_NOTE as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_MAKER_NOTE 0x927c
+#define EXIFTAGID_EXIF_MAKER_NOTE \
+  CONSTRUCT_TAGID(EXIF_MAKER_NOTE, _ID_EXIF_MAKER_NOTE)
+#define EXIFTAGTYPE_EXIF_MAKER_NOTE EXIF_UNDEFINED
+// User comments
+// Use EXIFTAGTYPE_EXIF_USER_COMMENT as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_USER_COMMENT 0x9286
+#define EXIFTAGID_EXIF_USER_COMMENT \
+  CONSTRUCT_TAGID(EXIF_USER_COMMENT, _ID_EXIF_USER_COMMENT)
+#define EXIFTAGTYPE_EXIF_USER_COMMENT EXIF_UNDEFINED
+// Date time sub-seconds
+// Use EXIFTAGTYPE_SUBSEC_TIME as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME 0x9290
+#define EXIFTAGID_SUBSEC_TIME CONSTRUCT_TAGID(SUBSEC_TIME, _ID_SUBSEC_TIME)
+#define EXIFTAGTYPE_SEBSEC_TIME EXIF_ASCII
+// Date time original sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_ORIGINAL 0x9291
+#define EXIFTAGID_SUBSEC_TIME_ORIGINAL \
+  CONSTRUCT_TAGID(SUBSEC_TIME_ORIGINAL, _ID_SUBSEC_TIME_ORIGINAL)
+#define EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL EXIF_ASCII
+// Date time digitized sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_DIGITIZED 0x9292
+#define EXIFTAGID_SUBSEC_TIME_DIGITIZED \
+  CONSTRUCT_TAGID(SUBSEC_TIME_DIGITIZED, _ID_SUBSEC_TIME_DIGITIZED)
+#define EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED EXIF_ASCII
+// Supported Flashpix version
+// Use EXIFTAGTYPE_EXIF_FLASHPIX_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_FLASHPIX_VERSION 0xa000
+#define EXIFTAGID_EXIF_FLASHPIX_VERSION \
+  CONSTRUCT_TAGID(EXIF_FLASHPIX_VERSION, _ID_EXIF_FLASHPIX_VERSION)
+#define EXIFTAGTYPE_EXIF_FLASHPIX_VERSION EXIF_UNDEFINED
+//  Color space information
+// Use EXIFTAGTYPE_EXIF_COLOR_SPACE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_COLOR_SPACE 0xa001
+#define EXIFTAGID_EXIF_COLOR_SPACE \
+  CONSTRUCT_TAGID(EXIF_COLOR_SPACE, _ID_EXIF_COLOR_SPACE)
+#define EXIFTAGTYPE_EXIF_COLOR_SPACE EXIF_SHORT
+//  Valid image width
+// Use EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_X_DIMENSION 0xa002
+#define EXIFTAGID_EXIF_PIXEL_X_DIMENSION \
+  CONSTRUCT_TAGID(EXIF_PIXEL_X_DIMENSION, _ID_EXIF_PIXEL_X_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION EXIF_SHORT
+// Valid image height
+// Use EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_Y_DIMENSION 0xa003
+#define EXIFTAGID_EXIF_PIXEL_Y_DIMENSION \
+  CONSTRUCT_TAGID(EXIF_PIXEL_Y_DIMENSION, _ID_EXIF_PIXEL_Y_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION  EXIF_SHORT
+// Related audio file
+// Use EXIFTAGTYPE_EXIF_RELATED_SOUND_FILE as the exif_tag_type (EXIF_ASCII)
+// Count should be 13
+#define _ID_RELATED_SOUND_FILE 0xa004
+#define EXIFTAGID_RELATED_SOUND_FILE \
+  CONSTRUCT_TAGID(RELATED_SOUND_FILE, _ID_RELATED_SOUND_FILE)
+#define EXIFTAGTYPE_RELATED_SOUND_FILE EXIF_ASCII
+// Interop IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_INTEROP_IFD_PTR 0xa005
+#define EXIFTAGID_INTEROP_IFD_PTR CONSTRUCT_TAGID(INTEROP, _ID_INTEROP_IFD_PTR)
+#define EXIFTAGTYPE_INTEROP_IFD_PTR EXIF_LONG
+// Flash energy
+// Use EXIFTAGTYPE_EXIF_FLASH_ENERGY as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FLASH_ENERGY 0xa20b
+#define EXIFTAGID_FLASH_ENERGY CONSTRUCT_TAGID(FLASH_ENERGY, _ID_FLASH_ENERGY)
+#define EXIFTAGTYPE_FLASH_ENERGY EXIF_RATIONAL
+// Spatial frequency response
+// Use EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE as exif_tag_type (EXIF_UNDEFINED)
+// Count would be any
+#define _ID_SPATIAL_FREQ_RESPONSE 0xa20c
+#define EXIFTAGID_SPATIAL_FREQ_RESPONSE \
+  CONSTRUCT_TAGID(SPATIAL_FREQ_RESPONSE, _ID_SPATIAL_FREQ_RESPONSE)
+#define EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE EXIF_UNDEFINED
+// Focal plane x resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_X_RESOLUTION 0xa20e
+#define EXIFTAGID_FOCAL_PLANE_X_RESOLUTION \
+  CONSTRUCT_TAGID(FOCAL_PLANE_X_RESOLUTION, _ID_FOCAL_PLANE_X_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION EXIF_RATIONAL
+// Focal plane y resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_Y_RESOLUTION 0xa20f
+#define EXIFTAGID_FOCAL_PLANE_Y_RESOLUTION \
+  CONSTRUCT_TAGID(FOCAL_PLANE_Y_RESOLUTION, _ID_FOCAL_PLANE_Y_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION EXIF_RATIONAL
+// Focal plane  resolution unit
+// Use EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT as exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_PLANE_RESOLUTION_UNIT 0xa210
+#define EXIFTAGID_FOCAL_PLANE_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(FOCAL_PLANE_RESOLUTION_UNIT, _ID_FOCAL_PLANE_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT EXIF_SHORT
+// Subject location
+// Use EXIFTAGTYPE_SUBJECT_LOCATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_SUBJECT_LOCATION 0xa214
+#define EXIFTAGID_SUBJECT_LOCATION \
+  CONSTRUCT_TAGID(SUBJECT_LOCATION, _ID_SUBJECT_LOCATION)
+#define EXIFTAGTYPE_SUBJECT_LOCATION EXIF_SHORT
+// Exposure index
+// Use EXIFTAGTYPE_EXPOSURE_INDEX as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_INDEX 0xa215
+#define EXIFTAGID_EXPOSURE_INDEX \
+  CONSTRUCT_TAGID(EXPOSURE_INDEX, _ID_EXPOSURE_INDEX)
+#define EXIFTAGTYPE_EXPOSURE_INDEX EXIF_RATIONAL
+// Sensing method
+// Use EXIFTAGTYPE_SENSING_METHOD as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SENSING_METHOD 0xa217
+#define EXIFTAGID_SENSING_METHOD \
+  CONSTRUCT_TAGID(SENSING_METHOD, _ID_SENSING_METHOD)
+#define EXIFTAGTYPE_SENSING_METHOD EXIF_SHORT
+// File source
+// Use EXIFTAGTYPE_FILE_SOURCE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_FILE_SOURCE 0xa300
+#define EXIFTAGID_FILE_SOURCE CONSTRUCT_TAGID(FILE_SOURCE, _ID_FILE_SOURCE)
+#define EXIFTAGTYPE_FILE_SOURCE EXIF_UNDEFINED
+// Scene type
+// Use EXIFTAGTYPE_SCENE_TYPE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_SCENE_TYPE 0xa301
+#define EXIFTAGID_SCENE_TYPE CONSTRUCT_TAGID(SCENE_TYPE, _ID_SCENE_TYPE)
+#define EXIFTAGTYPE_SCENE_TYPE EXIF_UNDEFINED
+// CFA pattern
+// Use EXIFTAGTYPE_CFA_PATTERN as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_CFA_PATTERN 0xa302
+#define EXIFTAGID_CFA_PATTERN CONSTRUCT_TAGID(CFA_PATTERN, _ID_CFA_PATTERN)
+#define EXIFTAGTYPE_CFA_PATTERN EXIF_UNDEFINED
+// Custom image processing
+// Use EXIFTAGTYPE_CUSTOM_RENDERED as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CUSTOM_RENDERED 0xa401
+#define EXIFTAGID_CUSTOM_RENDERED \
+  CONSTRUCT_TAGID(CUSTOM_RENDERED, _ID_CUSTOM_RENDERED)
+#define EXIFTAGTYPE_CUSTOM_RENDERED EXIF_SHORT
+// Exposure mode
+// Use EXIFTAGTYPE_EXPOSURE_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_MODE 0xa402
+#define EXIFTAGID_EXPOSURE_MODE \
+  CONSTRUCT_TAGID(EXPOSURE_MODE, _ID_EXPOSURE_MODE)
+#define EXIFTAGTYPE_EXPOSURE_MODE EXIF_SHORT
+// White balance
+// Use EXIFTAGTYPE_WHITE_BALANCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_WHITE_BALANCE 0xa403
+#define EXIFTAGID_WHITE_BALANCE \
+  CONSTRUCT_TAGID(WHITE_BALANCE, _ID_WHITE_BALANCE)
+#define EXIFTAGTYPE_WHITE_BALANCE EXIF_SHORT
+// Digital zoom ratio
+// Use EXIFTAGTYPE_DIGITAL_ZOOM_RATIO as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_DIGITAL_ZOOM_RATIO 0xa404
+#define EXIFTAGID_DIGITAL_ZOOM_RATIO \
+  CONSTRUCT_TAGID(DIGITAL_ZOOM_RATIO, _ID_DIGITAL_ZOOM_RATIO)
+#define EXIFTAGTYPE_DIGITAL_ZOOM_RATIO EXIF_RATIONAL
+// Focal length in 35mm film
+// Use EXIFTAGTYPE_FOCAL_LENGTH_35MM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_LENGTH_35MM 0xa405
+#define EXIFTAGID_FOCAL_LENGTH_35MM CONSTRUCT_TAGID(FOCAL_LENGTH_35MM, _ID_FOCAL_LENGTH_35MM)
+#define EXIFTAGTYPE_FOCAL_LENGTH_35MM EXIF_SHORT
+// Scene capture type
+// Use EXIFTAGTYPE_SCENE_CAPTURE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SCENE_CAPTURE_TYPE 0xa406
+#define EXIFTAGID_SCENE_CAPTURE_TYPE \
+  CONSTRUCT_TAGID(SCENE_CAPTURE_TYPE, _ID_SCENE_CAPTURE_TYPE)
+#define EXIFTAGTYPE_SCENE_CAPTURE_TYPE EXIF_SHORT
+// Gain control
+// Use EXIFTAGTYPE_GAIN_CONTROL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_GAIN_CONTROL 0xa407
+#define EXIFTAGID_GAIN_CONTROL CONSTRUCT_TAGID(GAIN_CONTROL, _ID_GAIN_CONTROL)
+#define EXIFTAGTYPE_GAIN_CONTROL EXIF_SHORT
+// Contrast
+// Use EXIFTAGTYPE_CONTRAST as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CONTRAST 0xa408
+#define EXIFTAGID_CONTRAST CONSTRUCT_TAGID(CONTRAST, _ID_CONTRAST)
+#define EXIFTAGTYPE_CONTRAST EXIF_SHORT
+// Saturation
+// Use EXIFTAGTYPE_SATURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SATURATION  0xa409
+#define EXIFTAGID_SATURATION CONSTRUCT_TAGID(SATURATION, _ID_SATURATION)
+#define EXIFTAGTYPE_SATURATION EXIF_SHORT
+// Sharpness
+// Use EXIFTAGTYPE_SHARPNESS as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SHARPNESS 0xa40a
+#define EXIFTAGID_SHARPNESS CONSTRUCT_TAGID(SHARPNESS, _ID_SHARPNESS)
+#define EXIFTAGTYPE_SHARPNESS EXIF_SHORT
+// Device settings description
+// Use EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION as exif_tag_type (EXIF_UNDEFINED)
+// Count could be any
+#define _ID_DEVICE_SETTINGS_DESCRIPTION 0xa40b
+#define EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION \
+  CONSTRUCT_TAGID(DEVICE_SETTINGS_DESCRIPTION, _ID_DEVICE_SETTINGS_DESCRIPTION)
+#define EXIFTAGTYPE_DEVIC_SETTIGNS_DESCRIPTION EXIF_UNDEFINED
+// Subject distance range
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE_RANGE 0xa40c
+#define EXIFTAGID_SUBJECT_DISTANCE_RANGE \
+  CONSTRUCT_TAGID(SUBJECT_DISTANCE_RANGE, _ID_SUBJECT_DISTANCE_RANGE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE EXIF_SHORT
+// Unique image id
+// Use EXIFTAG_TYPE_IMAGE_UIDas the exif_tag_type (EXIF_ASCII)
+// Count should be 33
+#define _ID_IMAGE_UID 0xa420
+#define EXIFTAGID_IMAGE_UID CONSTRUCT_TAGID(IMAGE_UID, _ID_IMAGE_UID)
+#define EXIFTAGTYPE_IMAGE_UID EXIF_ASCII
+// PIM tag
+// Use EXIFTAGTYPE_PIM_TAG as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_PIM 0xc4a5
+#define EXIFTAGID_PIM_TAG CONSTRUCT_TAGID(PIM, _ID_PIM)
+#define EXIFTAGTYPE_PIM_TAG EXIF_UNDEFINED
+#endif // __QEXIF_H__
+
diff --git a/msm8974/mm-image-codec/qomx_core/Android.mk b/msm8974/mm-image-codec/qomx_core/Android.mk
new file mode 100644
index 0000000..1e1f047
--- /dev/null
+++ b/msm8974/mm-image-codec/qomx_core/Android.mk
@@ -0,0 +1,18 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS := -Wall -Werror -g -O0
+
+LOCAL_C_INCLUDES := \
+    frameworks/native/include/media/openmax \
+    $(LOCAL_PATH)/../qexif
+
+LOCAL_SRC_FILES := qomx_core.c
+
+LOCAL_MODULE := libqomx_core
+LOCAL_SHARED_LIBRARIES := libcutils libdl liblog
+LOCAL_VENDOR_MODULE := true
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/msm8974/mm-image-codec/qomx_core/QOMX_JpegExtensions.h b/msm8974/mm-image-codec/qomx_core/QOMX_JpegExtensions.h
new file mode 100644
index 0000000..5f7dffd
--- /dev/null
+++ b/msm8974/mm-image-codec/qomx_core/QOMX_JpegExtensions.h
@@ -0,0 +1,313 @@
+/*Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef __QOMX_EXTENSIONS_H__
+#define __QOMX_EXTENSIONS_H__
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <OMX_Image.h>
+#include <qexif.h>
+
+/** qomx_image_eventd
+*  Qcom specific events extended from OMX_EVENT
+*  @ OMX_EVENT_THUMBNAIL_DROPPED - Indicates that the thumbnail
+*                                 size id too big to be included
+*                                 in the exif and will be
+*                                 dropped
+**/
+typedef enum {
+ OMX_EVENT_THUMBNAIL_DROPPED = OMX_EventVendorStartUnused+1
+} QOMX_IMAGE_EXT_EVENTS;
+
+/**
+*  The following macros defines the string to be used for
+*  getting the extension indices.
+**/
+#define QOMX_IMAGE_EXT_EXIF_NAME                  "OMX.QCOM.image.exttype.exif"
+#define QOMX_IMAGE_EXT_THUMBNAIL_NAME        "OMX.QCOM.image.exttype.thumbnail"
+#define QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME "OMX.QCOM.image.exttype.bufferOffset"
+#define QOMX_IMAGE_EXT_MOBICAT_NAME            "OMX.QCOM.image.exttype.mobicat"
+#define QOMX_IMAGE_EXT_ENCODING_MODE_NAME        "OMX.QCOM.image.encoding.mode"
+#define QOMX_IMAGE_EXT_WORK_BUFFER_NAME      "OMX.QCOM.image.exttype.workbuffer"
+#define QOMX_IMAGE_EXT_METADATA_NAME      "OMX.QCOM.image.exttype.metadata"
+#define QOMX_IMAGE_EXT_META_ENC_KEY_NAME      "OMX.QCOM.image.exttype.metaEncKey"
+#define QOMX_IMAGE_EXT_MEM_OPS_NAME      "OMX.QCOM.image.exttype.mem_ops"
+#define QOMX_IMAGE_EXT_JPEG_SPEED_NAME      "OMX.QCOM.image.exttype.jpeg.speed"
+
+/** QOMX_IMAGE_EXT_INDEXTYPE
+*  This enum is an extension of the OMX_INDEXTYPE enum and
+*  specifies Qcom supported extention indexes. These indexes are
+*  associated with the extension names and can be used as
+*  Indexes in the SetParameter and Getparameter functins to set
+*  or get values from qcom specific data structures
+**/
+typedef enum {
+  //Name: OMX.QCOM.image.exttype.exif
+  QOMX_IMAGE_EXT_EXIF = 0x07F00000,
+
+  //Name: OMX.QCOM.image.exttype.thumbnail
+  QOMX_IMAGE_EXT_THUMBNAIL = 0x07F00001,
+
+  //Name: OMX.QCOM.image.exttype.bufferOffset
+  QOMX_IMAGE_EXT_BUFFER_OFFSET = 0x07F00002,
+
+  //Name: OMX.QCOM.image.exttype.mobicat
+  QOMX_IMAGE_EXT_MOBICAT = 0x07F00003,
+
+  //Name: OMX.QCOM.image.encoding.approach
+  QOMX_IMAGE_EXT_ENCODING_MODE = 0x07F00004,
+
+  //Name: OMX.QCOM.image.exttype.workbuffer
+  QOMX_IMAGE_EXT_WORK_BUFFER = 0x07F00005,
+
+  //Name: OMX.QCOM.image.exttype.metadata
+  QOMX_IMAGE_EXT_METADATA = 0x07F00008,
+
+  //Name: OMX.QCOM.image.exttype.metaEncKey
+  QOMX_IMAGE_EXT_META_ENC_KEY = 0x07F00009,
+
+  //Name: OMX.QCOM.image.exttype.memOps
+  QOMX_IMAGE_EXT_MEM_OPS = 0x07F0000A,
+
+  //Name: OMX.QCOM.image.exttype.jpeg.speed
+  QOMX_IMAGE_EXT_JPEG_SPEED = 0x07F000B,
+
+} QOMX_IMAGE_EXT_INDEXTYPE;
+
+/** QOMX_BUFFER_INFO
+*  The structure specifies informaton
+*   associated with the buffers and should be passed as appData
+*   in UseBuffer calls to the OMX component with buffer specific
+*   data. @ fd - FD of the buffer allocated. If the buffer is
+*          allocated on the heap, it can be zero.
+*   @offset - Buffer offset
+**/
+
+typedef struct {
+  OMX_U32 fd;
+  OMX_U32 offset;
+} QOMX_BUFFER_INFO;
+
+/** QEXIF_INFO_DATA
+*   The basic exif structure used to construct
+*   information for a single exif tag.
+*   @tag_entry
+*   @tag_id
+**/
+typedef struct{
+  exif_tag_entry_t tag_entry;
+  exif_tag_id_t tag_id;
+} QEXIF_INFO_DATA;
+
+/**QOMX_EXIF_INFO
+*  The structure contains an array of exif tag
+*  structures(qexif_info_data) and should be passed to the OMX
+*  layer by the OMX client using the extension index.
+*  @exif_data - Array of exif tags
+*  @numOfEntries - Number of exif tags entries being passed in
+*                 the array
+**/
+typedef struct {
+  QEXIF_INFO_DATA *exif_data;
+  OMX_U32 numOfEntries;
+} QOMX_EXIF_INFO;
+
+/**QOMX_YUV_FRAME_INFO
+*  The structre contains all the offsets
+*  associated with the Y and cbcr buffers.
+*  @yOffset - Offset within the Y buffer
+*  @cbcrOffset - Offset within the cb/cr buffer. The array
+*                should be populated in order depending on cb
+*                first or cr first in case of planar data. For
+*                pseusoplanar, only the first array element
+*                needs to be filled and the secnd element should
+*                be set to zero.
+*  @cbcrStartOffset - Start offset of the cb/cr buffer starting
+*                     starting from the Y buffer. The array
+*                     should be populated in order depending on
+*                     cb first or cr first in case of planar
+*                     data. For pseusoplanar, only the first
+*                     array element needs to be filled and the
+*                     secnd element should be set to zero.
+**/
+typedef struct {
+  OMX_U32 yOffset;
+  OMX_U32 cbcrOffset[2];
+  OMX_U32 cbcrStartOffset[2];
+} QOMX_YUV_FRAME_INFO;
+
+/** qomx_thumbnail_info
+*  Includes all information associated with the thumbnail
+*  @input_width - Width of the input thumbnail buffer
+*  @input_height - Heighr of the input thumbnail buffer
+*  @scaling_enabled - Flag indicating if thumbnail scaling is
+*  enabled.
+*  @crop_info - Includes the crop width, crop height,
+*               horizontal and vertical offsets.
+*  @output_width - Output Width of the the thumbnail. This is
+*                the width after scaling if scaling is enabled
+*                or width after cropping if only cropping is
+*                enabled or same same input width otherwise
+*  @output_height - Output height of the thumbnail. This is
+*                the height after scaling if scaling is enabled
+*                or height after cropping if only cropping is
+*                enabled or same same input height otherwise
+**/
+typedef struct {
+  OMX_U32 input_width;
+  OMX_U32 input_height;
+  OMX_U8 scaling_enabled;
+  OMX_CONFIG_RECTTYPE crop_info;
+  OMX_U32 output_width;
+  OMX_U32 output_height;
+  QOMX_YUV_FRAME_INFO tmbOffset;
+  OMX_U32 rotation;
+} QOMX_THUMBNAIL_INFO;
+
+/**qomx_mobicat
+*  Mobicat data to padded tot he OMX layer
+*  @mobicatData - Mobicate data
+*  @mobicatDataLength - length of the mobicat data
+**/
+typedef struct {
+  OMX_U8 *mobicatData;
+  OMX_U32 mobicatDataLength;
+} QOMX_MOBICAT;
+
+/**qomx_workbuffer
+*  Ion buffer to be used for the H/W encoder
+*  @fd - FD of the buffer allocated
+*  @vaddr - Buffer address
+*  @length - Buffer length
+**/
+typedef struct {
+  int fd;
+  uint8_t *vaddr;
+  uint32_t length;
+} QOMX_WORK_BUFFER;
+
+/**QOMX_METADATA
+ *
+ * meta data to be set in EXIF
+ */
+typedef struct {
+  OMX_U8  *metadata;
+  OMX_U32 metaPayloadSize;
+  OMX_U8 mobicat_mask;
+} QOMX_METADATA;
+
+/**QOMX_META_ENC_KEY
+ *
+ * meta data encryption key
+ */
+typedef struct {
+  OMX_U8  *metaKey;
+  OMX_U32 keyLen;
+} QOMX_META_ENC_KEY;
+
+/** QOMX_IMG_COLOR_FORMATTYPE
+*  This enum is an extension of the OMX_COLOR_FORMATTYPE enum.
+*  It specifies Qcom supported color formats.
+**/
+typedef enum QOMX_IMG_COLOR_FORMATTYPE {
+  OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar = OMX_COLOR_FormatVendorStartUnused + 0x300,
+  OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYVU420Planar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422Planar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422Planar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYUV422Planar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYVU444Planar,
+  OMX_QCOM_IMG_COLOR_FormatYUV444Planar
+} QOMX_IMG_COLOR_FORMATTYPE;
+
+/** QOMX_ENCODING_MODE
+*  This enum is used to select parallel encoding
+*  or sequential encoding for the thumbnail and
+*  main image
+**/
+typedef enum {
+  OMX_Serial_Encoding,
+  OMX_Parallel_Encoding
+} QOMX_ENCODING_MODE;
+
+
+/**omx_jpeg_ouput_buf_t
+*  Structure describing jpeg output buffer
+*  @handle - Handle to the containing class
+*  @mem_hdl - Handle to camera memory struct
+*  @vaddr - Buffer address
+*  @size - Buffer size
+*  @fd - file descriptor
+**/
+typedef struct {
+  void *handle;
+  void *mem_hdl;
+  int8_t isheap;
+  size_t size; /*input*/
+  void *vaddr;
+  int fd;
+} omx_jpeg_ouput_buf_t;
+
+/** QOMX_MEM_OPS
+* Structure holding the function pointers to
+* buffer memory operations
+* @get_memory - function to allocate buffer memory
+**/
+typedef struct {
+  int (*get_memory)( omx_jpeg_ouput_buf_t *p_out_buf);
+} QOMX_MEM_OPS;
+
+/** QOMX_JPEG_SPEED_MODE
+* Enum specifying the values for the jpeg
+* speed mode setting
+**/
+typedef enum {
+  QOMX_JPEG_SPEED_MODE_NORMAL,
+  QOMX_JPEG_SPEED_MODE_HIGH
+} QOMX_JPEG_SPEED_MODE;
+
+/** QOMX_JPEG_SPEED
+* Structure used to set the jpeg speed mode
+* parameter
+* @speedMode - jpeg speed mode
+**/
+typedef struct {
+  QOMX_JPEG_SPEED_MODE speedMode;
+} QOMX_JPEG_SPEED;
+
+#ifdef __cplusplus
+ }
+#endif
+
+#endif
diff --git a/msm8974/mm-image-codec/qomx_core/qomx_core.c b/msm8974/mm-image-codec/qomx_core/qomx_core.c
new file mode 100644
index 0000000..17fec5b
--- /dev/null
+++ b/msm8974/mm-image-codec/qomx_core/qomx_core.c
@@ -0,0 +1,367 @@
+/*Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "qomx_image_core"
+#include <utils/Log.h>
+
+#include "qomx_core.h"
+
+#define BUFF_SIZE 255
+
+static omx_core_t *g_omxcore;
+static pthread_mutex_t g_omxcore_lock = PTHREAD_MUTEX_INITIALIZER;
+static int g_omxcore_cnt = 0;
+
+//Map the library name with the component name
+static const comp_info_t g_comp_info[] =
+{
+  { "OMX.qcom.image.jpeg.encoder", "libqomx_jpegenc.so" },
+  { "OMX.qcom.image.jpeg.decoder", "libqomx_jpegdec.so" }
+};
+
+static int get_idx_from_handle(OMX_IN OMX_HANDLETYPE *ahComp, int *acompIndex,
+  int *ainstanceIndex);
+
+/*==============================================================================
+* Function : OMX_Init
+* Parameters: None
+* Description: This is the first call that is made to the OMX Core
+* and initializes the OMX IL core
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Init()
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int i = 0;
+  int comp_cnt = sizeof(g_comp_info)/sizeof(g_comp_info[0]);
+
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  /* check if core is created */
+  if (g_omxcore) {
+    g_omxcore_cnt++;
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return rc;
+  }
+
+  if (comp_cnt > OMX_COMP_MAX_NUM) {
+    ALOGE("%s:%d] cannot exceed max number of components",
+      __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorUndefined;
+  }
+  /* create new global object */
+  g_omxcore = malloc(sizeof(omx_core_t));
+  if (g_omxcore) {
+    memset(g_omxcore, 0x0, sizeof(omx_core_t));
+
+    /* populate the library name and component name */
+    for (i = 0; i < comp_cnt; i++) {
+      g_omxcore->component[i].comp_name = g_comp_info[i].comp_name;
+      g_omxcore->component[i].lib_name = g_comp_info[i].lib_name;
+    }
+    g_omxcore->comp_cnt = comp_cnt;
+    g_omxcore_cnt++;
+  } else {
+    rc = OMX_ErrorInsufficientResources;
+  }
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGE("%s:%d] Complete %d", __func__, __LINE__, comp_cnt);
+  return rc;
+}
+
+/*==============================================================================
+* Function : OMX_Deinit
+* Parameters: None
+* Return Value : OMX_ERRORTYPE
+* Description: Deinit all the OMX components
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Deinit()
+{
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  if (g_omxcore_cnt == 1) {
+    if (g_omxcore) {
+      free(g_omxcore);
+      g_omxcore = NULL;
+    }
+  }
+  if (g_omxcore_cnt) {
+    g_omxcore_cnt--;
+  }
+
+  ALOGE("%s:%d] Complete", __func__, __LINE__);
+  pthread_mutex_unlock(&g_omxcore_lock);
+  return OMX_ErrorNone;
+}
+
+/*==============================================================================
+* Function : get_comp_from_list
+* Parameters: componentName
+* Return Value : component_index
+* Description: If the componnt is already present in the list, return the
+* component index. If not return the next index to create the component.
+==============================================================================*/
+static int get_comp_from_list(char *comp_name)
+{
+  int index = -1, i = 0;
+
+  if (NULL == comp_name)
+    return -1;
+
+  for (i = 0; i < g_omxcore->comp_cnt; i++) {
+    if (!strcmp(g_omxcore->component[i].comp_name, comp_name)) {
+      index = i;
+      break;
+    }
+  }
+  return index;
+}
+
+/*==============================================================================
+* Function : get_free_inst_idx
+* Parameters: p_comp
+* Return Value : The next instance index if available
+* Description: Get the next available index for to store the new instance of the
+*            component being created.
+*============================================================================*/
+static int get_free_inst_idx(omx_core_component_t *p_comp)
+{
+  int idx = -1, i = 0;
+
+  for (i = 0; i < OMX_COMP_MAX_INSTANCES; i++) {
+    if (NULL == p_comp->handle[i]) {
+      idx = i;
+      break;
+    }
+  }
+  return idx;
+}
+
+/*==============================================================================
+* Function : OMX_GetHandle
+* Parameters: handle, componentName, appData, callbacks
+* Return Value : OMX_ERRORTYPE
+* Description: Construct and load the requested omx library
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_GetHandle(
+  OMX_OUT OMX_HANDLETYPE* handle,
+  OMX_IN OMX_STRING componentName,
+  OMX_IN OMX_PTR appData,
+  OMX_IN OMX_CALLBACKTYPE* callBacks)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int comp_idx = 0, inst_idx = 0;
+  void *p_obj = NULL;
+  OMX_COMPONENTTYPE *p_comp = NULL;
+  omx_core_component_t *p_core_comp = NULL;
+  OMX_BOOL close_handle = OMX_FALSE;
+
+  if (NULL == handle) {
+    ALOGE("%s:%d] Error invalid input ", __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  comp_idx = get_comp_from_list(componentName);
+  if (comp_idx < 0) {
+    ALOGE("%s:%d] Cannot find the component", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+  p_core_comp = &g_omxcore->component[comp_idx];
+
+  *handle = NULL;
+
+  //If component already present get the instance index
+  inst_idx = get_free_inst_idx(p_core_comp);
+  if (inst_idx < 0) {
+    ALOGE("%s:%d] Cannot alloc new instance", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  if (FALSE == p_core_comp->open) {
+    /* load the library */
+    p_core_comp->lib_handle = dlopen(p_core_comp->lib_name, RTLD_NOW);
+    if (NULL == p_core_comp->lib_handle) {
+      ALOGE("%s:%d] Cannot load the library", __func__, __LINE__);
+      rc = OMX_ErrorInvalidComponent;
+      goto error;
+    }
+
+    p_core_comp->open = TRUE;
+    /* Init the component and get component functions */
+    p_core_comp->create_comp_func = dlsym(p_core_comp->lib_handle,
+      "create_component_fns");
+    p_core_comp->get_instance = dlsym(p_core_comp->lib_handle, "getInstance");
+
+    close_handle = OMX_TRUE;
+    if (!p_core_comp->create_comp_func || !p_core_comp->get_instance) {
+      ALOGE("%s:%d] Cannot maps the symbols", __func__, __LINE__);
+      rc = OMX_ErrorInvalidComponent;
+      goto error;
+    }
+  }
+
+  /* Call the function from the address to create the obj */
+  p_obj = (*p_core_comp->get_instance)();
+  ALOGE("%s:%d] get instance pts is %p", __func__, __LINE__, p_obj);
+  if (NULL == p_obj) {
+    ALOGE("%s:%d] Error cannot create object", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  /* Call the function from the address to get the func ptrs */
+  p_comp = (*p_core_comp->create_comp_func)(p_obj);
+  if (NULL == p_comp) {
+    ALOGE("%s:%d] Error cannot create component", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  *handle = p_core_comp->handle[inst_idx] = (OMX_HANDLETYPE)p_comp;
+
+  ALOGD("%s:%d] handle = %p Instanceindex = %d,"
+    "comp_idx %d g_ptr %p", __func__, __LINE__,
+    p_core_comp->handle[inst_idx], inst_idx,
+    comp_idx, g_omxcore);
+
+  p_comp->SetCallbacks(p_comp, callBacks, appData);
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGE("%s:%d] Success", __func__, __LINE__);
+  return OMX_ErrorNone;
+
+error:
+
+  if (OMX_TRUE == close_handle) {
+    dlclose(p_core_comp->lib_handle);
+    p_core_comp->lib_handle = NULL;
+  }
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGE("%s:%d] Error %d", __func__, __LINE__, rc);
+  return rc;
+}
+
+/*==============================================================================
+* Function : getIndexFromComponent
+* Parameters: handle,
+* Return Value : Component present - true or false, Instance Index, Component
+* Index
+* Description: Check if the handle is present in the list and get the component
+* index and instance index for the component handle.
+==============================================================================*/
+static int get_idx_from_handle(OMX_IN OMX_HANDLETYPE *ahComp, int *aCompIdx,
+  int *aInstIdx)
+{
+  int i = 0, j = 0;
+  for (i = 0; i < g_omxcore->comp_cnt; i++) {
+    for (j = 0; j < OMX_COMP_MAX_INSTANCES; j++) {
+      if ((OMX_COMPONENTTYPE *)g_omxcore->component[i].handle[j] ==
+        (OMX_COMPONENTTYPE *)ahComp) {
+        ALOGE("%s:%d] comp_idx %d inst_idx %d", __func__, __LINE__, i, j);
+        *aCompIdx = i;
+        *aInstIdx = j;
+        return TRUE;
+      }
+    }
+  }
+  return FALSE;
+}
+
+/*==============================================================================
+* Function : is_comp_active
+* Parameters: p_core_comp
+* Return Value : int
+* Description: Check if the component has any active instances
+==============================================================================*/
+static uint8_t is_comp_active(omx_core_component_t *p_core_comp)
+{
+  uint8_t i = 0;
+  for (i = 0; i < OMX_COMP_MAX_INSTANCES; i++) {
+    if (NULL != p_core_comp->handle[i]) {
+      return TRUE;
+    }
+  }
+  return FALSE;
+}
+
+/*==============================================================================
+* Function : OMX_FreeHandle
+* Parameters: hComp
+* Return Value : OMX_ERRORTYPE
+* Description: Deinit the omx component and remove it from the global list
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_FreeHandle(
+  OMX_IN OMX_HANDLETYPE hComp)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int comp_idx, inst_idx;
+  OMX_COMPONENTTYPE *p_comp = NULL;
+  omx_core_component_t *p_core_comp = NULL;
+
+  ALOGE("%s:%d] ", __func__, __LINE__);
+  if (hComp == NULL) {
+    return OMX_ErrorBadParameter;
+  }
+
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  p_comp = (OMX_COMPONENTTYPE *)hComp;
+  if (FALSE == get_idx_from_handle(hComp, &comp_idx, &inst_idx)) {
+    ALOGE("%s:%d] Error invalid component", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+
+
+  //Deinit the component;
+  rc = p_comp->ComponentDeInit(hComp);
+  if (rc != OMX_ErrorNone) {
+    /* Remove the handle from the comp structure */
+    ALOGE("%s:%d] Error comp deinit failed", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+  p_core_comp = &g_omxcore->component[comp_idx];
+  p_core_comp->handle[inst_idx] = NULL;
+  if (!is_comp_active(p_core_comp)) {
+    rc = dlclose(p_core_comp->lib_handle);
+    p_core_comp->lib_handle = NULL;
+    p_core_comp->get_instance = NULL;
+    p_core_comp->create_comp_func = NULL;
+    p_core_comp->open = FALSE;
+  } else {
+    ALOGE("%s:%d] Error Component is still Active", __func__, __LINE__);
+  }
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGE("%s:%d] Success", __func__, __LINE__);
+  return rc;
+}
diff --git a/msm8974/mm-image-codec/qomx_core/qomx_core.h b/msm8974/mm-image-codec/qomx_core/qomx_core.h
new file mode 100644
index 0000000..1049742
--- /dev/null
+++ b/msm8974/mm-image-codec/qomx_core/qomx_core.h
@@ -0,0 +1,97 @@
+/*Copyright (c) 2012-2015, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef QOMX_CORE_H
+#define QOMX_CORE_H
+
+#include <stdio.h>
+#include <unistd.h>
+#include <malloc.h>
+#include <pthread.h>
+#include <dlfcn.h>
+#include <stdlib.h>
+#include <string.h>
+#include "OMX_Component.h"
+
+#define TRUE 1
+#define FALSE 0
+#define OMX_COMP_MAX_INSTANCES 3
+#define OMX_CORE_MAX_ROLES 1
+#define OMX_COMP_MAX_NUM 3
+#define OMX_SPEC_VERSION 0x00000101
+
+typedef void *(*get_instance_t)(void);
+typedef void *(*create_comp_func_t)(OMX_PTR aobj);
+
+/** comp_info_t: Structure containing the mapping
+*    between the library name and the corresponding .so name
+*    @comp_name: name of the component
+     @lib_name: Name of the .so library
+**/
+typedef struct comp_info_t {
+  char *comp_name;
+  char *lib_name;
+} comp_info_t;
+
+/** omx_core_component_t: OMX Component structure
+*    @handle: array of number of instances of the component
+*    @roles: array of roles played by the component
+*    @comp_info: Component information such as libname,
+*              component name
+*    @open: Is the component active
+*    @lib_handle: Library handle after dlopen
+*    @obj_ptr: Function ptr to get the instance of the component
+*    @comp_func_ptr: Function ptr to map the functions in the
+*     OMX handle to its respective function implementation in
+*     the component
+**/
+typedef struct _omx_core_component_t {
+  OMX_HANDLETYPE *handle[OMX_COMP_MAX_INSTANCES];  //Instance handle
+  char *roles[OMX_CORE_MAX_ROLES];  //Roles played by the component
+  char *name;  //Component Name
+  uint8_t open;  //Is component active
+  void *lib_handle;
+  get_instance_t get_instance;
+  create_comp_func_t create_comp_func;
+  char *comp_name;
+  char *lib_name;
+} omx_core_component_t;
+
+/** omx_core_t: Global structure that contains all the active
+*   components
+*    @component: array of active components
+*    @is_initialized: Flag to check if the OMX core has been
+*    initialized
+*    @core_lock: Lock to syncronize the omx core operations
+**/
+typedef struct _omx_core_t {
+  omx_core_component_t component[OMX_COMP_MAX_NUM];  //Array of pointers to components
+  int comp_cnt;
+  pthread_mutex_t core_lock;
+} omx_core_t;
+
+#endif