am 82bde8f8: Reconcile with jb-release

* commit '82bde8f8661400798eb7718430c71d9d90b78844':
diff --git a/data/etc/apns-conf.xml b/data/etc/apns-conf.xml
deleted file mode 100644
index 2fe90d9..0000000
--- a/data/etc/apns-conf.xml
+++ /dev/null
@@ -1,265 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
-/*
-** Copyright 2006, Google Inc.
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
--->
-
-<!-- use empty string to specify no proxy or port -->
-<!-- This version must agree with that in apps/common/res/apns.xml -->
-<apns version="7">
-    <apn carrier="T-Mobile US"
-         mcc="310"
-         mnc="260"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-    />
-
-    <apn carrier="T-Mobile US 250"
-         mcc="310"
-         mnc="250"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 660"
-         mcc="310"
-         mnc="660"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 230"
-         mcc="310"
-         mnc="230"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 310"
-         mcc="310"
-         mnc="310"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 580"
-         mcc="310"
-         mnc="580"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 240"
-         mcc="310"
-         mnc="240"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 800"
-         mcc="310"
-         mnc="800"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 210"
-         mcc="310"
-         mnc="210"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 160"
-         mcc="310"
-         mnc="160"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 270"
-         mcc="310"
-         mnc="270"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 200"
-         mcc="310"
-         mnc="200"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 220"
-         mcc="310"
-         mnc="220"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 490"
-         mcc="310"
-         mnc="490"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <!-- T-Mobile Europe -->
-    <apn carrier="T-Mobile UK"
-         mcc="234"
-         mnc="30"
-         apn="general.t-mobile.uk"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="149.254.201.135"
-         mmsport="8080"
-         mmsc="http://mmsc.t-mobile.co.uk:8002"
-    />
-
-    <apn carrier="T-Mobile D"
-         mcc="262"
-         mnc="01"
-         apn="internet.t-mobile"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="172.028.023.131"
-         mmsport="8008"
-         mmsc="http://mms.t-mobile.de/servlets/mms"
-    />
-
-    <apn carrier="T-Mobile A"
-         mcc="232"
-         mnc="03"
-         apn="gprsinternet"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="010.012.000.020"
-         mmsport="80"
-         mmsc="http://mmsc.t-mobile.at/servlets/mms"
-         type="default,supl"
-    />
-
-    <apn carrier="T-Mobile A MMS"
-         mcc="232"
-         mnc="03"
-         apn="gprsmms"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="010.012.000.020"
-         mmsport="80"
-         mmsc="http://mmsc.t-mobile.at/servlets/mms"
-         type="mms"
-    />
-
-    <apn carrier="T-Mobile CZ"
-         mcc="230"
-         mnc="01"
-         apn="internet.t-mobile.cz"
-         user="wap"
-         password="wap"
-         server="*"
-         mmsproxy="010.000.000.010"
-         mmsport="80"
-         mmsc="http://mms"
-         type="default,supl"
-    />
-
-    <apn carrier="T-Mobile CZ MMS"
-         mcc="230"
-         mnc="01"
-         apn="mms.t-mobile.cz"
-         user="mms"
-         password="mms"
-         server="*"
-         mmsproxy="010.000.000.010"
-         mmsport="80"
-         mmsc="http://mms"
-         type="mms"
-    />
-
-    <apn carrier="T-Mobile NL"
-         mcc="204"
-         mnc="16"
-         apn="internet"
-         user="*"
-         password="*"
-         server="*"
-         mmsproxy="010.010.010.011"
-         mmsport="8080"
-         mmsc="http://t-mobilemms"
-         type="default,supl"
-    />
-
-    <apn carrier="T-Mobile NL MMS"
-         mcc="204"
-         mnc="16"
-         apn="mms"
-         user="tmobilemms"
-         password="tmobilemms"
-         server="*"
-         mmsproxy="010.010.010.011"
-         mmsport="8080"
-         mmsc="http://t-mobilemms"
-         type="mms"
-    />
-</apns>
diff --git a/data/etc/apns-conf_sdk.xml b/data/etc/apns-conf_sdk.xml
deleted file mode 100644
index 0e9cf7d..0000000
--- a/data/etc/apns-conf_sdk.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2008 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-  
-          http://www.apache.org/licenses/LICENSE-2.0
-  
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-
-<!-- This file contains fake APNs that are necessary for the emulator
-     to talk to the network.  It should only be installed for SDK builds.
-
-     This file is not installed by the local Android.mk, it's installed using
-     a PRODUCT_COPY_FILES line in the sdk section of the toplevel Makefile.
--->
-
-<!-- use empty string to specify no proxy or port -->
-<!-- This version must agree with that in apps/common/res/apns.xml -->
-<apns version="7">
-    <apn carrier="Android"
-        mcc="310"
-        mnc="995"
-        apn="internet"
-        user="*"
-        server="*"
-        password="*"
-        mmsc="null"
-    />
-    <apn carrier="TelKila"
-        mcc="310"
-        mnc="260"
-        apn="internet"
-        user="*"
-        server="*"
-        password="*"
-        mmsc="null"
-    />
-</apns>
diff --git a/data/etc/vold.conf b/data/etc/vold.conf
deleted file mode 100644
index 7888936..0000000
--- a/data/etc/vold.conf
+++ /dev/null
@@ -1,10 +0,0 @@
-## vold configuration file for the emulator/SDK
-
-volume_sdcard {
-    ## This is the direct uevent device path to the SD slot on the device
-    emu_media_path /devices/platform/goldfish_mmc.0/mmc_host/mmc0
-
-    media_type     mmc
-    mount_point    /sdcard
-    ums_path       /devices/platform/usb_mass_storage/lun0
-}
diff --git a/ide/eclipse/.classpath b/ide/eclipse/.classpath
index 4a5ba3a..f40ae6f 100644
--- a/ide/eclipse/.classpath
+++ b/ide/eclipse/.classpath
@@ -1,16 +1,15 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
 	<classpathentry kind="src" path="packages/apps/Bluetooth/src"/>
+	<classpathentry kind="src" path="packages/apps/Camera/src"/>
 	<classpathentry kind="src" path="packages/apps/Browser/src"/>
 	<classpathentry kind="src" path="packages/apps/Calendar/src"/>
 	<classpathentry kind="src" path="packages/apps/Calculator/src"/>
-	<classpathentry kind="src" path="packages/apps/Camera/src"/>
 	<classpathentry kind="src" path="packages/apps/CertInstaller/src"/>
 	<classpathentry kind="src" path="packages/apps/Contacts/src"/>
 	<classpathentry kind="src" path="packages/apps/DeskClock/src"/>
 	<classpathentry kind="src" path="packages/apps/Email/src"/>
 	<classpathentry kind="src" path="packages/apps/Email/emailcommon/src"/>
-	<classpathentry kind="src" path="packages/apps/Exchange/exchange2/src"/>
 	<classpathentry kind="src" path="packages/apps/Gallery2/src"/>
 	<classpathentry kind="src" path="packages/apps/Gallery2/src_pd"/>
 	<classpathentry kind="src" path="packages/apps/Gallery2/gallerycommon/src"/>
@@ -117,5 +116,6 @@
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/android-common_intermediates/javalib.jar"/>
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/guava_intermediates/javalib.jar"/>
 	<classpathentry kind="lib" path="packages/apps/Calculator/arity-2.1.2.jar"/>
+	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/junit-runner_intermediates/javalib.jar"/>
 	<classpathentry kind="output" path="out/target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates/classes"/>
 </classpath>
diff --git a/ide/intellij/codestyles/AndroidStyle.xml b/ide/intellij/codestyles/AndroidStyle.xml
index 113ffca..cd6beb4 100644
--- a/ide/intellij/codestyles/AndroidStyle.xml
+++ b/ide/intellij/codestyles/AndroidStyle.xml
@@ -42,7 +42,7 @@
       <package name="" withSubpackages="true" />
     </value>
   </option>
-  <option name="RIGHT_MARGIN" value="80" />
+  <option name="RIGHT_MARGIN" value="100" />
   <option name="CALL_PARAMETERS_WRAP" value="1" />
   <option name="METHOD_PARAMETERS_WRAP" value="1" />
   <option name="EXTENDS_LIST_WRAP" value="1" />
diff --git a/testrunner/android_build.py b/testrunner/android_build.py
index 584ef52..a10d43b 100644
--- a/testrunner/android_build.py
+++ b/testrunner/android_build.py
@@ -42,7 +42,8 @@
   # TODO: does this need to be reimplemented to be like gettop() in envsetup.sh
   root_path = os.getenv("ANDROID_BUILD_TOP")
   if root_path is None:
-    logger.Log("Error: ANDROID_BUILD_TOP not defined. Please run envsetup.sh")
+    logger.Log("Error: ANDROID_BUILD_TOP not defined. Please run "
+               "envsetup.sh and lunch/choosecombo")
     raise errors.AbortError
   return root_path
 
@@ -109,7 +110,8 @@
   """
   path = os.getenv("ANDROID_PRODUCT_OUT")
   if path is None:
-    logger.Log("Error: ANDROID_PRODUCT_OUT not defined. Please run envsetup.sh")
+    logger.Log("Error: ANDROID_PRODUCT_OUT not defined. Please run "
+               "envsetup.sh and lunch/choosecombo")
     raise errors.AbortError
   return path
 
diff --git a/tools/emulator/opengl/system/egl/egl.cpp b/tools/emulator/opengl/system/egl/egl.cpp
index ee195ac..b1be38d 100644
--- a/tools/emulator/opengl/system/egl/egl.cpp
+++ b/tools/emulator/opengl/system/egl/egl.cpp
@@ -263,10 +263,9 @@
 
 EGLBoolean egl_window_surface_t::init()
 {
-    if (nativeWindow->dequeueBuffer(nativeWindow, &buffer) != NO_ERROR) {
+    if (nativeWindow->dequeueBuffer_DEPRECATED(nativeWindow, &buffer) != NO_ERROR) {
         setErrorReturn(EGL_BAD_ALLOC, EGL_FALSE);
     }
-    nativeWindow->lockBuffer(nativeWindow, buffer);
 
     DEFINE_AND_VALIDATE_HOST_CONNECTION(EGL_FALSE);
     rcSurface = rcEnc->rcCreateWindowSurface(rcEnc, (uint32_t)config,
@@ -300,7 +299,7 @@
         rcEnc->rcDestroyWindowSurface(rcEnc, rcSurface);
     }
     if (buffer) {
-        nativeWindow->cancelBuffer(nativeWindow, buffer);
+        nativeWindow->cancelBuffer_DEPRECATED(nativeWindow, buffer);
     }
     nativeWindow->common.decRef(&nativeWindow->common);
 }
@@ -316,12 +315,11 @@
 
     rcEnc->rcFlushWindowColorBuffer(rcEnc, rcSurface);
 
-    nativeWindow->queueBuffer(nativeWindow, buffer);
-    if (nativeWindow->dequeueBuffer(nativeWindow, &buffer)) {
+    nativeWindow->queueBuffer_DEPRECATED(nativeWindow, buffer);
+    if (nativeWindow->dequeueBuffer_DEPRECATED(nativeWindow, &buffer)) {
         buffer = NULL;
         setErrorReturn(EGL_BAD_ALLOC, EGL_FALSE);
     }
-    nativeWindow->lockBuffer(nativeWindow, buffer);
 
     rcEnc->rcSetWindowColorBuffer(rcEnc, rcSurface,
             ((cb_handle_t *)(buffer->handle))->hostHandle);
diff --git a/tools/emulator/opengl/system/gralloc/gralloc.cpp b/tools/emulator/opengl/system/gralloc/gralloc.cpp
index 4334835..90781f1 100644
--- a/tools/emulator/opengl/system/gralloc/gralloc.cpp
+++ b/tools/emulator/opengl/system/gralloc/gralloc.cpp
@@ -189,9 +189,20 @@
                 // Raw sensor data cannot be used by HW
                 return -EINVAL;
             }
+            // Not expecting to actually create any GL surfaces for this
             glFormat = GL_LUMINANCE;
             glType = GL_UNSIGNED_SHORT;
             break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            bpp = 1;
+            if (! (sw_read && sw_write) ) {
+                // Blob data cannot be used by HW
+                return -EINVAL;
+            }
+            // Not expecting to actually create any GL surfaces for this
+            glFormat = GL_LUMINANCE;
+            glType = GL_UNSIGNED_BYTE;
+            break;
         default:
             return -EINVAL;
     }
diff --git a/tools/emulator/system/camera/Android.mk b/tools/emulator/system/camera/Android.mk
index c51f621..ee08f94 100755
--- a/tools/emulator/system/camera/Android.mk
+++ b/tools/emulator/system/camera/Android.mk
@@ -33,6 +33,7 @@
 	libjpeg \
 	libskia \
 	libandroid_runtime \
+	libcamera_metadata
 
 LOCAL_C_INCLUDES += external/jpeg \
 	external/skia/include/core/ \
@@ -57,7 +58,11 @@
 	JpegCompressor.cpp \
     EmulatedCamera2.cpp \
 	EmulatedFakeCamera2.cpp \
-	EmulatedQemuCamera2.cpp
+	EmulatedQemuCamera2.cpp \
+	fake-pipeline2/Scene.cpp \
+	fake-pipeline2/Sensor.cpp \
+	fake-pipeline2/JpegCompressor.cpp
+
 
 ifeq ($(TARGET_PRODUCT),vbox_x86)
 LOCAL_MODULE := camera.vbox_x86
diff --git a/tools/emulator/system/camera/EmulatedCamera2.cpp b/tools/emulator/system/camera/EmulatedCamera2.cpp
index f7672f4..599bd16 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera2.cpp
@@ -22,7 +22,7 @@
  * for all camera API calls that defined by camera2_device_ops_t API.
  */
 
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera2_Camera"
 #include <cutils/log.h>
 
@@ -48,36 +48,10 @@
     ops = &sDeviceOps;
     priv = this;
 
-    mRequestQueueDstOps.notify_queue_not_empty =
-            EmulatedCamera2::request_queue_notify_queue_not_empty;
-    mRequestQueueDstOps.parent                 = this;
+    mNotifyCb = NULL;
 
-    mRequestQueueDstOps.notify_queue_not_empty =
-            EmulatedCamera2::reprocess_queue_notify_queue_not_empty;
-    mReprocessQueueDstOps.parent               = this;
-
-    mFrameQueueSrcOps.buffer_count = EmulatedCamera2::frame_queue_buffer_count;
-    mFrameQueueSrcOps.dequeue      = EmulatedCamera2::frame_queue_dequeue;
-    mFrameQueueSrcOps.free         = EmulatedCamera2::frame_queue_free;
-    mFrameQueueSrcOps.parent       = this;
-
-    mReprocessStreamOps.dequeue_buffer =
-            EmulatedCamera2::reprocess_stream_dequeue_buffer;
-    mReprocessStreamOps.enqueue_buffer =
-            EmulatedCamera2::reprocess_stream_enqueue_buffer;
-    mReprocessStreamOps.cancel_buffer =
-            EmulatedCamera2::reprocess_stream_cancel_buffer;
-    mReprocessStreamOps.set_buffer_count =
-            EmulatedCamera2::reprocess_stream_set_buffer_count;
-    mReprocessStreamOps.set_crop = EmulatedCamera2::reprocess_stream_set_crop;
-    mReprocessStreamOps.set_timestamp =
-            EmulatedCamera2::reprocess_stream_set_timestamp;
-    mReprocessStreamOps.set_usage = EmulatedCamera2::reprocess_stream_set_usage;
-    mReprocessStreamOps.get_min_undequeued_buffer_count =
-            EmulatedCamera2::reprocess_stream_get_min_undequeued_buffer_count;
-    mReprocessStreamOps.lock_buffer =
-            EmulatedCamera2::reprocess_stream_lock_buffer;
-    mReprocessStreamOps.parent   = this;
+    mRequestQueueSrc = NULL;
+    mFrameQueueDst = NULL;
 
     mVendorTagOps.get_camera_vendor_section_name =
             EmulatedCamera2::get_camera_vendor_section_name;
@@ -109,6 +83,7 @@
  ***************************************************************************/
 
 status_t EmulatedCamera2::connectCamera(hw_device_t** device) {
+    *device = &common;
     return NO_ERROR;
 }
 
@@ -117,126 +92,85 @@
 }
 
 status_t EmulatedCamera2::getCameraInfo(struct camera_info* info) {
-
     return EmulatedBaseCamera::getCameraInfo(info);
 }
 
 /****************************************************************************
- * Camera API implementation.
+ * Camera Device API implementation.
  * These methods are called from the camera API callback routines.
  ***************************************************************************/
 
 /** Request input queue */
 
-int EmulatedCamera2::setRequestQueueSrcOps(
-    camera2_metadata_queue_src_ops *request_queue_src_ops) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::requestQueueNotifyNotEmpty() {
-    return NO_ERROR;
-}
-
-/** Reprocessing input queue */
-
-int EmulatedCamera2::setReprocessQueueSrcOps(
-    camera2_metadata_queue_src_ops *reprocess_queue_src_ops) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessQueueNotifyNotEmpty() {
-    return NO_ERROR;
-}
-
-/** Frame output queue */
-
-int EmulatedCamera2::setFrameQueueDstOps(camera2_metadata_queue_dst_ops *frame_queue_dst_ops) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::frameQueueBufferCount() {
-    return NO_ERROR;
-}
-int EmulatedCamera2::frameQueueDequeue(camera_metadata_t **buffer) {
-    return NO_ERROR;
-}
-int EmulatedCamera2::frameQueueFree(camera_metadata_t *old_buffer) {
-    return NO_ERROR;
-}
-
-/** Notifications to application */
-int EmulatedCamera2::setNotifyCallback(camera2_notify_callback notify_cb) {
-    return NO_ERROR;
+int EmulatedCamera2::requestQueueNotify() {
+    return INVALID_OPERATION;
 }
 
 /** Count of requests in flight */
 int EmulatedCamera2::getInProgressCount() {
-    return NO_ERROR;
+    return INVALID_OPERATION;
 }
 
 /** Cancel all captures in flight */
 int EmulatedCamera2::flushCapturesInProgress() {
-    return NO_ERROR;
+    return INVALID_OPERATION;
 }
 
-/** Reprocessing input stream management */
-int EmulatedCamera2::reprocessStreamDequeueBuffer(buffer_handle_t** buffer,
-        int *stride) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamEnqueueBuffer(buffer_handle_t* buffer) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamCancelBuffer(buffer_handle_t* buffer) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetBufferCount(int count) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetCrop(int left, int top, int right, int bottom) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetTimestamp(int64_t timestamp) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetUsage(int usage) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetSwapInterval(int interval) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamGetMinUndequeuedBufferCount(int *count) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamLockBuffer(buffer_handle_t *buffer) {
-    return NO_ERROR;
+/** Construct a default request for a given use case */
+int EmulatedCamera2::constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request) {
+    return INVALID_OPERATION;
 }
 
 /** Output stream creation and management */
 
-int EmulatedCamera2::getStreamSlotCount() {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::allocateStream(uint32_t stream_slot,
+int EmulatedCamera2::allocateStream(
         uint32_t width,
         uint32_t height,
         int format,
-        camera2_stream_ops_t *stream_ops) {
-    return NO_ERROR;
+        const camera2_stream_ops_t *stream_ops,
+        uint32_t *stream_id,
+        uint32_t *format_actual,
+        uint32_t *usage,
+        uint32_t *max_buffers) {
+    return INVALID_OPERATION;
 }
 
-int EmulatedCamera2::releaseStream(uint32_t stream_slot) {
-    return NO_ERROR;
+int EmulatedCamera2::registerStreamBuffers(
+        uint32_t stream_id,
+        int num_buffers,
+        buffer_handle_t *buffers) {
+    return INVALID_OPERATION;
+}
+
+
+int EmulatedCamera2::releaseStream(uint32_t stream_id) {
+    return INVALID_OPERATION;
+}
+
+/** Reprocessing input stream management */
+
+int EmulatedCamera2::allocateReprocessStream(
+        uint32_t width,
+        uint32_t height,
+        uint32_t format,
+        const camera2_stream_in_ops_t *reprocess_stream_ops,
+        uint32_t *stream_id,
+        uint32_t *consumer_usage,
+        uint32_t *max_buffers) {
+    return INVALID_OPERATION;
+}
+
+int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
+    return INVALID_OPERATION;
+}
+
+/** 3A triggering */
+
+int EmulatedCamera2::triggerAction(uint32_t trigger_id,
+                                   int ext1, int ext2) {
+    return INVALID_OPERATION;
 }
 
 /** Custom tag query methods */
@@ -253,14 +187,10 @@
     return -1;
 }
 
-/** Shutdown and debug methods */
-
-int EmulatedCamera2::release() {
-    return NO_ERROR;
-}
+/** Debug methods */
 
 int EmulatedCamera2::dump(int fd) {
-    return NO_ERROR;
+    return INVALID_OPERATION;
 }
 
 /****************************************************************************
@@ -272,214 +202,117 @@
  * hardware/libhardware/include/hardware/camera2.h for information on each
  * of these callbacks. Implemented in this class, these callbacks simply
  * dispatch the call into an instance of EmulatedCamera2 class defined by the
- * 'camera_device2' parameter.
+ * 'camera_device2' parameter, or set a member value in the same.
  ***************************************************************************/
 
-int EmulatedCamera2::set_request_queue_src_ops(struct camera2_device *d,
-        camera2_metadata_queue_src_ops *queue_src_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setRequestQueueSrcOps(queue_src_ops);
+EmulatedCamera2* getInstance(const camera2_device_t *d) {
+    const EmulatedCamera2* cec = static_cast<const EmulatedCamera2*>(d);
+    return const_cast<EmulatedCamera2*>(cec);
 }
 
-int EmulatedCamera2::get_request_queue_dst_ops(struct camera2_device *d,
-        camera2_metadata_queue_dst_ops **queue_dst_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *queue_dst_ops = static_cast<camera2_metadata_queue_dst_ops*>(
-        &ec->mRequestQueueDstOps);
+int EmulatedCamera2::set_request_queue_src_ops(const camera2_device_t *d,
+        const camera2_request_queue_src_ops *queue_src_ops) {
+    EmulatedCamera2* ec = getInstance(d);
+    ec->mRequestQueueSrc = queue_src_ops;
     return NO_ERROR;
 }
 
-int EmulatedCamera2::request_queue_notify_queue_not_empty(
-        camera2_metadata_queue_dst_ops *q) {
-    EmulatedCamera2* ec = static_cast<QueueDstOps*>(q)->parent;
-    return ec->requestQueueNotifyNotEmpty();
+int EmulatedCamera2::notify_request_queue_not_empty(const camera2_device_t *d) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->requestQueueNotify();
 }
 
-int EmulatedCamera2::set_reprocess_queue_src_ops(struct camera2_device *d,
-        camera2_metadata_queue_src_ops *queue_src_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setReprocessQueueSrcOps(queue_src_ops);
-}
-
-int EmulatedCamera2::get_reprocess_queue_dst_ops(struct camera2_device *d,
-        camera2_metadata_queue_dst_ops **queue_dst_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *queue_dst_ops = static_cast<camera2_metadata_queue_dst_ops*>(
-        &ec->mReprocessQueueDstOps);
+int EmulatedCamera2::set_frame_queue_dst_ops(const camera2_device_t *d,
+        const camera2_frame_queue_dst_ops *queue_dst_ops) {
+    EmulatedCamera2* ec = getInstance(d);
+    ec->mFrameQueueDst = queue_dst_ops;
     return NO_ERROR;
 }
 
-int EmulatedCamera2::reprocess_queue_notify_queue_not_empty(
-        camera2_metadata_queue_dst_ops *q) {
-    EmulatedCamera2* ec = static_cast<QueueDstOps*>(q)->parent;
-    return ec->reprocessQueueNotifyNotEmpty();
-}
-
-int EmulatedCamera2::set_frame_queue_dst_ops(struct camera2_device *d,
-        camera2_metadata_queue_dst_ops *queue_dst_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setFrameQueueDstOps(queue_dst_ops);
-}
-
-int EmulatedCamera2::get_frame_queue_src_ops(struct camera2_device *d,
-        camera2_metadata_queue_src_ops **queue_src_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *queue_src_ops = static_cast<camera2_metadata_queue_src_ops*>(
-        &ec->mFrameQueueSrcOps);
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::frame_queue_buffer_count(camera2_metadata_queue_src_ops *q) {
-    EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
-    return ec->frameQueueBufferCount();
-}
-
-int EmulatedCamera2::frame_queue_dequeue(camera2_metadata_queue_src_ops *q,
-        camera_metadata_t **buffer) {
-    EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
-    return ec->frameQueueDequeue(buffer);
-}
-
-int EmulatedCamera2::frame_queue_free(camera2_metadata_queue_src_ops *q,
-        camera_metadata_t *old_buffer) {
-    EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
-    return ec->frameQueueFree(old_buffer);
-}
-
-int EmulatedCamera2::set_notify_callback(struct camera2_device *d,
-        camera2_notify_callback notify_cb) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setNotifyCallback(notify_cb);
-}
-
-int EmulatedCamera2::get_in_progress_count(struct camera2_device *d) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+int EmulatedCamera2::get_in_progress_count(const camera2_device_t *d) {
+    EmulatedCamera2* ec = getInstance(d);
     return ec->getInProgressCount();
 }
 
-int EmulatedCamera2::flush_captures_in_progress(struct camera2_device *d) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+int EmulatedCamera2::flush_captures_in_progress(const camera2_device_t *d) {
+    EmulatedCamera2* ec = getInstance(d);
     return ec->flushCapturesInProgress();
 }
 
-int EmulatedCamera2::get_reprocess_stream_ops(camera2_device_t *d,
-        camera2_stream_ops **stream) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *stream = static_cast<camera2_stream_ops*>(&ec->mReprocessStreamOps);
-    return NO_ERROR;
+int EmulatedCamera2::construct_default_request(const camera2_device_t *d,
+        int request_template,
+        camera_metadata_t **request) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->constructDefaultRequest(request_template, request);
 }
 
-int EmulatedCamera2::reprocess_stream_dequeue_buffer(camera2_stream_ops *s,
-        buffer_handle_t** buffer, int *stride) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamDequeueBuffer(buffer, stride);
+int EmulatedCamera2::allocate_stream(const camera2_device_t *d,
+        uint32_t width,
+        uint32_t height,
+        int format,
+        const camera2_stream_ops_t *stream_ops,
+        uint32_t *stream_id,
+        uint32_t *format_actual,
+        uint32_t *usage,
+        uint32_t *max_buffers) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->allocateStream(width, height, format, stream_ops,
+            stream_id, format_actual, usage, max_buffers);
 }
 
-int EmulatedCamera2::reprocess_stream_enqueue_buffer(camera2_stream_ops *s,
-        buffer_handle_t* buffer) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamEnqueueBuffer(buffer);
+int EmulatedCamera2::register_stream_buffers(const camera2_device_t *d,
+        uint32_t stream_id,
+        int num_buffers,
+        buffer_handle_t *buffers) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->registerStreamBuffers(stream_id,
+            num_buffers,
+            buffers);
+}
+int EmulatedCamera2::release_stream(const camera2_device_t *d,
+        uint32_t stream_id) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->releaseStream(stream_id);
 }
 
-int EmulatedCamera2::reprocess_stream_cancel_buffer(camera2_stream_ops *s,
-        buffer_handle_t* buffer) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamCancelBuffer(buffer);
-}
-
-int EmulatedCamera2::reprocess_stream_set_buffer_count(camera2_stream_ops *s,
-        int count) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetBufferCount(count);
-}
-
-int EmulatedCamera2::reprocess_stream_set_crop(camera2_stream_ops *s,
-        int left, int top, int right, int bottom) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetCrop(left, top, right, bottom);
-}
-
-int EmulatedCamera2::reprocess_stream_set_timestamp(camera2_stream_ops *s,
-        int64_t timestamp) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetTimestamp(timestamp);
-}
-
-int EmulatedCamera2::reprocess_stream_set_usage(camera2_stream_ops *s,
-        int usage) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetUsage(usage);
-}
-
-int EmulatedCamera2::reprocess_stream_set_swap_interval(camera2_stream_ops *s,
-        int interval) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetSwapInterval(interval);
-}
-
-int EmulatedCamera2::reprocess_stream_get_min_undequeued_buffer_count(
-        const camera2_stream_ops *s,
-        int *count) {
-    EmulatedCamera2* ec = static_cast<const StreamOps*>(s)->parent;
-    return ec->reprocessStreamGetMinUndequeuedBufferCount(count);
-}
-
-int EmulatedCamera2::reprocess_stream_lock_buffer(camera2_stream_ops *s,
-        buffer_handle_t* buffer) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamLockBuffer(buffer);
-}
-
-int EmulatedCamera2::get_stream_slot_count(struct camera2_device *d) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->getStreamSlotCount();
-}
-
-int EmulatedCamera2::allocate_stream(struct camera2_device *d,
-        uint32_t stream_slot,
+int EmulatedCamera2::allocate_reprocess_stream(const camera2_device_t *d,
         uint32_t width,
         uint32_t height,
         uint32_t format,
-        camera2_stream_ops_t *stream_ops) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->allocateStream(stream_slot, width, height, format, stream_ops);
+        const camera2_stream_in_ops_t *reprocess_stream_ops,
+        uint32_t *stream_id,
+        uint32_t *consumer_usage,
+        uint32_t *max_buffers) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->allocateReprocessStream(width, height, format,
+            reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
 }
 
-int EmulatedCamera2::release_stream(struct camera2_device *d,
-        uint32_t stream_slot) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->releaseStream(stream_slot);
+int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
+        uint32_t stream_id) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->releaseReprocessStream(stream_id);
 }
 
-void EmulatedCamera2::release(struct camera2_device *d) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    ec->release();
+int EmulatedCamera2::trigger_action(const camera2_device_t *d,
+        uint32_t trigger_id,
+        int ext1,
+        int ext2) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->triggerAction(trigger_id, ext1, ext2);
 }
 
-int EmulatedCamera2::dump(struct camera2_device *d, int fd) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->dump(fd);
+int EmulatedCamera2::set_notify_callback(const camera2_device_t *d,
+        camera2_notify_callback notify_cb, void* user) {
+    EmulatedCamera2* ec = getInstance(d);
+    ec->mNotifyCb = notify_cb;
+    ec->mNotifyUserPtr = user;
+    return NO_ERROR;
 }
 
-int EmulatedCamera2::close(struct hw_device_t* device) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(
-                reinterpret_cast<struct camera2_device*>(device) );
-    if (ec == NULL) {
-        ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
-        return -EINVAL;
-    }
-    return ec->closeCamera();
-}
-
-int EmulatedCamera2::get_metadata_vendor_tag_ops(struct camera2_device *d,
+int EmulatedCamera2::get_metadata_vendor_tag_ops(const camera2_device_t *d,
         vendor_tag_query_ops_t **ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+    EmulatedCamera2* ec = getInstance(d);
     *ops = static_cast<vendor_tag_query_ops_t*>(
             &ec->mVendorTagOps);
     return NO_ERROR;
@@ -506,22 +339,37 @@
     return ec->getVendorTagType(tag);
 }
 
+int EmulatedCamera2::dump(const camera2_device_t *d, int fd) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->dump(fd);
+}
+
+int EmulatedCamera2::close(struct hw_device_t* device) {
+    EmulatedCamera2* ec =
+            static_cast<EmulatedCamera2*>(
+                reinterpret_cast<camera2_device_t*>(device) );
+    if (ec == NULL) {
+        ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
+        return -EINVAL;
+    }
+    return ec->closeCamera();
+}
+
 camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
     EmulatedCamera2::set_request_queue_src_ops,
-    EmulatedCamera2::get_request_queue_dst_ops,
-    EmulatedCamera2::set_reprocess_queue_src_ops,
-    EmulatedCamera2::get_reprocess_queue_dst_ops,
+    EmulatedCamera2::notify_request_queue_not_empty,
     EmulatedCamera2::set_frame_queue_dst_ops,
-    EmulatedCamera2::get_frame_queue_src_ops,
-    EmulatedCamera2::set_notify_callback,
     EmulatedCamera2::get_in_progress_count,
     EmulatedCamera2::flush_captures_in_progress,
-    EmulatedCamera2::get_reprocess_stream_ops,
-    EmulatedCamera2::get_stream_slot_count,
+    EmulatedCamera2::construct_default_request,
     EmulatedCamera2::allocate_stream,
+    EmulatedCamera2::register_stream_buffers,
     EmulatedCamera2::release_stream,
+    EmulatedCamera2::allocate_reprocess_stream,
+    EmulatedCamera2::release_reprocess_stream,
+    EmulatedCamera2::trigger_action,
+    EmulatedCamera2::set_notify_callback,
     EmulatedCamera2::get_metadata_vendor_tag_ops,
-    EmulatedCamera2::release,
     EmulatedCamera2::dump
 };
 
diff --git a/tools/emulator/system/camera/EmulatedCamera2.h b/tools/emulator/system/camera/EmulatedCamera2.h
index feeadf9..2c813a4 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.h
+++ b/tools/emulator/system/camera/EmulatedCamera2.h
@@ -67,7 +67,7 @@
     virtual status_t Initialize();
 
     /****************************************************************************
-     * Camera API implementation
+     * Camera module API and generic hardware device API implementation
      ***************************************************************************/
 
 public:
@@ -75,7 +75,7 @@
 
     virtual status_t closeCamera();
 
-    virtual status_t getCameraInfo(struct camera_info* info);
+    virtual status_t getCameraInfo(struct camera_info* info) = 0;
 
     /****************************************************************************
      * Camera API implementation.
@@ -83,177 +83,134 @@
      ***************************************************************************/
 
 protected:
-    /** Request input queue */
-
-    int setRequestQueueSrcOps(
-        camera2_metadata_queue_src_ops *request_queue_src_ops);
-
-    int requestQueueNotifyNotEmpty();
-
-    /** Reprocessing input queue */
-
-    int setReprocessQueueSrcOps(
-        camera2_metadata_queue_src_ops *reprocess_queue_src_ops);
-
-    int reprocessQueueNotifyNotEmpty();
-
-    /** Frame output queue */
-
-    int setFrameQueueDstOps(camera2_metadata_queue_dst_ops *frame_queue_dst_ops);
-
-    int frameQueueBufferCount();
-    int frameQueueDequeue(camera_metadata_t **buffer);
-    int frameQueueFree(camera_metadata_t *old_buffer);
-
-    /** Notifications to application */
-    int setNotifyCallback(camera2_notify_callback notify_cb);
+    /** Request input queue notification */
+    virtual int requestQueueNotify();
 
     /** Count of requests in flight */
-    int getInProgressCount();
+    virtual int getInProgressCount();
 
     /** Cancel all captures in flight */
-    int flushCapturesInProgress();
+    virtual int flushCapturesInProgress();
 
-    /** Reprocessing input stream management */
-    int reprocessStreamDequeueBuffer(buffer_handle_t** buffer,
-            int *stride);
-
-    int reprocessStreamEnqueueBuffer(buffer_handle_t* buffer);
-
-    int reprocessStreamCancelBuffer(buffer_handle_t* buffer);
-
-    int reprocessStreamSetBufferCount(int count);
-
-    int reprocessStreamSetCrop(int left, int top, int right, int bottom);
-
-    int reprocessStreamSetTimestamp(int64_t timestamp);
-
-    int reprocessStreamSetUsage(int usage);
-
-    int reprocessStreamSetSwapInterval(int interval);
-
-    int reprocessStreamGetMinUndequeuedBufferCount(int *count);
-
-    int reprocessStreamLockBuffer(buffer_handle_t *buffer);
+    virtual int constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request);
 
     /** Output stream creation and management */
-
-    int getStreamSlotCount();
-
-    int allocateStream(uint32_t stream_slot,
+    virtual int allocateStream(
             uint32_t width,
             uint32_t height,
             int format,
-            camera2_stream_ops_t *stream_ops);
+            const camera2_stream_ops_t *stream_ops,
+            uint32_t *stream_id,
+            uint32_t *format_actual,
+            uint32_t *usage,
+            uint32_t *max_buffers);
 
-    int releaseStream(uint32_t stream_slot);
+    virtual int registerStreamBuffers(
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers);
+
+    virtual int releaseStream(uint32_t stream_id);
+
+    /** Input stream creation and management */
+    virtual int allocateReprocessStream(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id,
+            uint32_t *consumer_usage,
+            uint32_t *max_buffers);
+
+    virtual int releaseReprocessStream(uint32_t stream_id);
+
+    /** 3A action triggering */
+    virtual int triggerAction(uint32_t trigger_id,
+            int ext1, int ext2);
 
     /** Custom tag definitions */
-    const char* getVendorSectionName(uint32_t tag);
-    const char* getVendorTagName(uint32_t tag);
-    int         getVendorTagType(uint32_t tag);
+    virtual const char* getVendorSectionName(uint32_t tag);
+    virtual const char* getVendorTagName(uint32_t tag);
+    virtual int         getVendorTagType(uint32_t tag);
 
-    /** Shutdown and debug methods */
+    /** Debug methods */
 
-    int release();
-
-    int dump(int fd);
-
-    int close();
+    virtual int dump(int fd);
 
     /****************************************************************************
      * Camera API callbacks as defined by camera2_device_ops structure.  See
      * hardware/libhardware/include/hardware/camera2.h for information on each
      * of these callbacks. Implemented in this class, these callbacks simply
-     * dispatch the call into an instance of EmulatedCamera2 class defined in the
-     * 'camera_device2' parameter.
+     * dispatch the call into an instance of EmulatedCamera2 class defined in
+     * the 'camera_device2' parameter.
      ***************************************************************************/
 
 private:
     /** Input request queue */
-    static int set_request_queue_src_ops(camera2_device_t *,
-            camera2_metadata_queue_src_ops *queue_src_ops);
-    static int get_request_queue_dst_ops(camera2_device_t *,
-            camera2_metadata_queue_dst_ops **queue_dst_ops);
-    // for get_request_queue_dst_ops
-    static int request_queue_notify_queue_not_empty(
-        camera2_metadata_queue_dst_ops *);
-
-    /** Input reprocess queue */
-    static int set_reprocess_queue_src_ops(camera2_device_t *,
-            camera2_metadata_queue_src_ops *reprocess_queue_src_ops);
-    static int get_reprocess_queue_dst_ops(camera2_device_t *,
-            camera2_metadata_queue_dst_ops **queue_dst_ops);
-    // for reprocess_queue_dst_ops
-    static int reprocess_queue_notify_queue_not_empty(
-            camera2_metadata_queue_dst_ops *);
+    static int set_request_queue_src_ops(const camera2_device_t *,
+            const camera2_request_queue_src_ops *queue_src_ops);
+    static int notify_request_queue_not_empty(const camera2_device_t *);
 
     /** Output frame queue */
-    static int set_frame_queue_dst_ops(camera2_device_t *,
-            camera2_metadata_queue_dst_ops *queue_dst_ops);
-    static int get_frame_queue_src_ops(camera2_device_t *,
-            camera2_metadata_queue_src_ops **queue_src_ops);
-    // for get_frame_queue_src_ops
-    static int frame_queue_buffer_count(camera2_metadata_queue_src_ops *);
-    static int frame_queue_dequeue(camera2_metadata_queue_src_ops *,
-            camera_metadata_t **buffer);
-    static int frame_queue_free(camera2_metadata_queue_src_ops *,
-            camera_metadata_t *old_buffer);
-
-    /** Notifications to application */
-    static int set_notify_callback(camera2_device_t *,
-            camera2_notify_callback notify_cb);
+    static int set_frame_queue_dst_ops(const camera2_device_t *,
+            const camera2_frame_queue_dst_ops *queue_dst_ops);
 
     /** In-progress request management */
-    static int get_in_progress_count(camera2_device_t *);
+    static int get_in_progress_count(const camera2_device_t *);
 
-    static int flush_captures_in_progress(camera2_device_t *);
+    static int flush_captures_in_progress(const camera2_device_t *);
 
-    /** Input reprocessing stream */
-    static int get_reprocess_stream_ops(camera2_device_t *,
-            camera2_stream_ops_t **stream);
-    // for get_reprocess_stream_ops
-    static int reprocess_stream_dequeue_buffer(camera2_stream_ops *,
-            buffer_handle_t** buffer, int *stride);
-    static int reprocess_stream_enqueue_buffer(camera2_stream_ops *,
-            buffer_handle_t* buffer);
-    static int reprocess_stream_cancel_buffer(camera2_stream_ops *,
-            buffer_handle_t* buffer);
-    static int reprocess_stream_set_buffer_count(camera2_stream_ops *,
-            int count);
-    static int reprocess_stream_set_crop(camera2_stream_ops *,
-            int left, int top, int right, int bottom);
-    static int reprocess_stream_set_timestamp(camera2_stream_ops *,
-            int64_t timestamp);
-    static int reprocess_stream_set_usage(camera2_stream_ops *,
-            int usage);
-    static int reprocess_stream_set_swap_interval(camera2_stream_ops *,
-            int interval);
-    static int reprocess_stream_get_min_undequeued_buffer_count(
-            const camera2_stream_ops *,
-            int *count);
-    static int reprocess_stream_lock_buffer(camera2_stream_ops *,
-            buffer_handle_t* buffer);
+    /** Request template creation */
+    static int construct_default_request(const camera2_device_t *,
+            int request_template,
+            camera_metadata_t **request);
 
-    /** Output stream allocation and management */
+    /** Stream management */
+    static int allocate_stream(const camera2_device_t *,
+            uint32_t width,
+            uint32_t height,
+            int format,
+            const camera2_stream_ops_t *stream_ops,
+            uint32_t *stream_id,
+            uint32_t *format_actual,
+            uint32_t *usage,
+            uint32_t *max_buffers);
 
-    static int get_stream_slot_count(camera2_device_t *);
+    static int register_stream_buffers(const camera2_device_t *,
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers);
 
-    static int allocate_stream(camera2_device_t *,
-            uint32_t stream_slot,
+    static int release_stream(const camera2_device_t *,
+            uint32_t stream_id);
+
+    static int allocate_reprocess_stream(const camera2_device_t *,
             uint32_t width,
             uint32_t height,
             uint32_t format,
-            camera2_stream_ops_t *stream_ops);
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id,
+            uint32_t *consumer_usage,
+            uint32_t *max_buffers);
 
-    static int release_stream(camera2_device_t *,
-            uint32_t stream_slot);
+    static int release_reprocess_stream(const camera2_device_t *,
+            uint32_t stream_id);
 
-    static void release(camera2_device_t *);
+    /** 3A triggers*/
+    static int trigger_action(const camera2_device_t *,
+            uint32_t trigger_id,
+            int ext1,
+            int ext2);
+
+    /** Notifications to application */
+    static int set_notify_callback(const camera2_device_t *,
+            camera2_notify_callback notify_cb,
+            void *user);
 
     /** Vendor metadata registration */
-
-    static int get_metadata_vendor_tag_ops(camera2_device_t *,
+    static int get_metadata_vendor_tag_ops(const camera2_device_t *,
             vendor_tag_query_ops_t **ops);
     // for get_metadata_vendor_tag_ops
     static const char* get_camera_vendor_section_name(
@@ -266,38 +223,30 @@
             const vendor_tag_query_ops_t *,
             uint32_t tag);
 
-    static int dump(camera2_device_t *, int fd);
+    static int dump(const camera2_device_t *, int fd);
 
+    /** For hw_device_t ops */
     static int close(struct hw_device_t* device);
 
     /****************************************************************************
-     * Data members
+     * Data members shared with implementations
      ***************************************************************************/
-
-  private:
-    static camera2_device_ops_t sDeviceOps;
-
-    struct QueueDstOps : public camera2_metadata_queue_dst_ops {
-        EmulatedCamera2 *parent;
-    };
-
-    struct QueueSrcOps : public camera2_metadata_queue_src_ops {
-        EmulatedCamera2 *parent;
-    };
-
-    struct StreamOps : public camera2_stream_ops {
-        EmulatedCamera2 *parent;
-    };
+  protected:
+    const camera2_request_queue_src_ops *mRequestQueueSrc;
+    const camera2_frame_queue_dst_ops *mFrameQueueDst;
+    camera2_notify_callback mNotifyCb;
+    void* mNotifyUserPtr;
 
     struct TagOps : public vendor_tag_query_ops {
         EmulatedCamera2 *parent;
     };
-
-    QueueDstOps mRequestQueueDstOps;
-    QueueDstOps mReprocessQueueDstOps;
-    QueueSrcOps mFrameQueueSrcOps;
-    StreamOps   mReprocessStreamOps;
     TagOps      mVendorTagOps;
+
+    /****************************************************************************
+     * Data members
+     ***************************************************************************/
+  private:
+    static camera2_device_ops_t sDeviceOps;
 };
 
 }; /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedCameraFactory.cpp b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
index 2960751..84248ca 100755
--- a/tools/emulator/system/camera/EmulatedCameraFactory.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
@@ -19,7 +19,7 @@
  * available for emulation.
  */
 
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera_Factory"
 #include <cutils/log.h>
 #include <cutils/properties.h>
@@ -44,6 +44,7 @@
           mFakeCameraNum(0),
           mConstructedOK(false)
 {
+    status_t res;
     /* Connect to the factory service in the emulator, and create Qemu cameras. */
     if (mQemuClient.connectClient(NULL) == NO_ERROR) {
         /* Connection has succeeded. Create emulated cameras for each camera
@@ -75,11 +76,13 @@
         switch (getBackCameraHalVersion()) {
             case 1:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera(camera_id, true,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             case 2:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera2(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera2(camera_id, true,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             default:
                 ALOGE("%s: Unknown back camera hal version requested: %d", __FUNCTION__,
@@ -88,12 +91,15 @@
         if (mEmulatedCameras[camera_id] != NULL) {
             ALOGV("%s: Back camera device version is %d", __FUNCTION__,
                     getBackCameraHalVersion());
-            if (mEmulatedCameras[camera_id]->Initialize() != NO_ERROR) {
+            res = mEmulatedCameras[camera_id]->Initialize();
+            if (res != NO_ERROR) {
+                ALOGE("%s: Unable to intialize back camera %d: %s (%d)",
+                        __FUNCTION__, camera_id, strerror(-res), res);
                 delete mEmulatedCameras[camera_id];
-                mEmulatedCameras--;
+                mEmulatedCameraNum--;
             }
         } else {
-            mEmulatedCameras--;
+            mEmulatedCameraNum--;
             ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
         }
     }
@@ -121,25 +127,31 @@
         switch (getFrontCameraHalVersion()) {
             case 1:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera(camera_id, false,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             case 2:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera2(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera2(camera_id, false,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             default:
-                ALOGE("%s: Unknown front camera hal version requested: %d", __FUNCTION__,
+                ALOGE("%s: Unknown front camera hal version requested: %d",
+                        __FUNCTION__,
                         getFrontCameraHalVersion());
         }
         if (mEmulatedCameras[camera_id] != NULL) {
             ALOGV("%s: Front camera device version is %d", __FUNCTION__,
                     getFrontCameraHalVersion());
-            if (mEmulatedCameras[camera_id]->Initialize() != NO_ERROR) {
+            res = mEmulatedCameras[camera_id]->Initialize();
+            if (res != NO_ERROR) {
+                ALOGE("%s: Unable to intialize front camera %d: %s (%d)",
+                        __FUNCTION__, camera_id, strerror(-res), res);
                 delete mEmulatedCameras[camera_id];
-                mEmulatedCameras--;
+                mEmulatedCameraNum--;
             }
         } else {
-            mEmulatedCameras--;
+            mEmulatedCameraNum--;
             ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
         }
     }
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
index aa62244..2e46d1e 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
@@ -19,15 +19,53 @@
  * functionality of an advanced fake camera.
  */
 
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera_FakeCamera2"
-#include <cutils/log.h>
-#include <cutils/properties.h>
+#include <utils/Log.h>
+
 #include "EmulatedFakeCamera2.h"
 #include "EmulatedCameraFactory.h"
+#include <ui/Rect.h>
+#include <ui/GraphicBufferMapper.h>
 
 namespace android {
 
+const uint32_t EmulatedFakeCamera2::kAvailableFormats[5] = {
+        HAL_PIXEL_FORMAT_RAW_SENSOR,
+        HAL_PIXEL_FORMAT_BLOB,
+        HAL_PIXEL_FORMAT_RGBA_8888,
+        HAL_PIXEL_FORMAT_YV12,
+        HAL_PIXEL_FORMAT_YCrCb_420_SP
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
+    640, 480
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
+    Sensor::kFrameDurationRange[0]
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizes[2] = {
+    640, 480
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
+    Sensor::kFrameDurationRange[0]
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableJpegSizes[2] = {
+    640, 480
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
+    Sensor::kFrameDurationRange[0]
+};
+
+
 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
         bool facingBack,
         struct hw_module_t* module)
@@ -38,17 +76,1578 @@
             facingBack ? "back" : "front");
 }
 
-EmulatedFakeCamera2::~EmulatedFakeCamera2()
-{
+EmulatedFakeCamera2::~EmulatedFakeCamera2() {
+    if (mCameraInfo != NULL) {
+        free_camera_metadata(mCameraInfo);
+    }
 }
 
 /****************************************************************************
  * Public API overrides
  ***************************************************************************/
 
-status_t EmulatedFakeCamera2::Initialize()
-{
+status_t EmulatedFakeCamera2::Initialize() {
+    status_t res;
+
+    res = constructStaticInfo(&mCameraInfo, true);
+    if (res != OK) {
+        ALOGE("%s: Unable to allocate static info: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    res = constructStaticInfo(&mCameraInfo, false);
+    if (res != OK) {
+        ALOGE("%s: Unable to fill in static info: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    if (res != OK) return res;
+
+    mNextStreamId = 0;
+    mRawStreamCount = 0;
+    mProcessedStreamCount = 0;
+    mJpegStreamCount = 0;
+
     return NO_ERROR;
 }
 
+/****************************************************************************
+ * Camera module API overrides
+ ***************************************************************************/
+
+status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
+    status_t res;
+    ALOGV("%s", __FUNCTION__);
+
+    mConfigureThread = new ConfigureThread(this);
+    mReadoutThread = new ReadoutThread(this);
+    mSensor = new Sensor(this);
+    mJpegCompressor = new JpegCompressor(this);
+
+    mNextStreamId = 0;
+
+    res = mSensor->startUp();
+    if (res != NO_ERROR) return res;
+
+    res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
+    if (res != NO_ERROR) return res;
+
+    res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
+    if (res != NO_ERROR) return res;
+
+    return EmulatedCamera2::connectCamera(device);
+}
+
+status_t EmulatedFakeCamera2::closeCamera() {
+    Mutex::Autolock l(mMutex);
+
+    status_t res;
+    ALOGV("%s", __FUNCTION__);
+
+    res = mSensor->shutDown();
+    if (res != NO_ERROR) {
+        ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
+        return res;
+    }
+
+    mConfigureThread->requestExit();
+    mReadoutThread->requestExit();
+    mJpegCompressor->cancel();
+
+    mConfigureThread->join();
+    mReadoutThread->join();
+
+
+    ALOGV("%s exit", __FUNCTION__);
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
+    info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
+    info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
+    return EmulatedCamera2::getCameraInfo(info);
+}
+
+/****************************************************************************
+ * Camera device API overrides
+ ***************************************************************************/
+
+/** Request input queue */
+
+int EmulatedFakeCamera2::requestQueueNotify() {
+    ALOGV("Request queue notification received");
+
+    ALOG_ASSERT(mRequestQueueSrc != NULL,
+            "%s: Request queue src not set, but received queue notification!",
+            __FUNCTION__);
+    ALOG_ASSERT(mFrameQueueDst != NULL,
+            "%s: Request queue src not set, but received queue notification!",
+            __FUNCTION__);
+    ALOG_ASSERT(mStreams.size() != 0,
+            "%s: No streams allocated, but received queue notification!",
+            __FUNCTION__);
+    return mConfigureThread->newRequestAvailable();
+}
+
+int EmulatedFakeCamera2::getInProgressCount() {
+    Mutex::Autolock l(mMutex);
+
+    int requestCount = 0;
+    requestCount += mConfigureThread->getInProgressCount();
+    requestCount += mReadoutThread->getInProgressCount();
+    requestCount += mJpegCompressor->isBusy() ? 1 : 0;
+
+    return requestCount;
+}
+
+int EmulatedFakeCamera2::constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request) {
+
+    if (request == NULL) return BAD_VALUE;
+    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
+        return BAD_VALUE;
+    }
+    status_t res;
+    // Pass 1, calculate size and allocate
+    res = constructDefaultRequest(request_template,
+            request,
+            true);
+    if (res != OK) {
+        return res;
+    }
+    // Pass 2, build request
+    res = constructDefaultRequest(request_template,
+            request,
+            false);
+    if (res != OK) {
+        ALOGE("Unable to populate new request for template %d",
+                request_template);
+    }
+
+    return res;
+}
+
+int EmulatedFakeCamera2::allocateStream(
+        uint32_t width,
+        uint32_t height,
+        int format,
+        const camera2_stream_ops_t *stream_ops,
+        uint32_t *stream_id,
+        uint32_t *format_actual,
+        uint32_t *usage,
+        uint32_t *max_buffers) {
+    Mutex::Autolock l(mMutex);
+
+    if (format != CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) {
+        unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
+        unsigned int formatIdx = 0;
+        unsigned int sizeOffsetIdx = 0;
+        for (; formatIdx < numFormats; formatIdx++) {
+            if (format == (int)kAvailableFormats[formatIdx]) break;
+        }
+        if (formatIdx == numFormats) {
+            ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
+            return BAD_VALUE;
+        }
+    } else {
+        // Emulator's opaque format is RGBA
+        format = HAL_PIXEL_FORMAT_RGBA_8888;
+    }
+
+    const uint32_t *availableSizes;
+    size_t availableSizeCount;
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            availableSizes = kAvailableRawSizes;
+            availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            availableSizes = kAvailableJpegSizes;
+            availableSizeCount = sizeof(kAvailableJpegSizes)/sizeof(uint32_t);
+            break;
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        case HAL_PIXEL_FORMAT_YV12:
+        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+            availableSizes = kAvailableProcessedSizes;
+            availableSizeCount = sizeof(kAvailableProcessedSizes)/sizeof(uint32_t);
+            break;
+        default:
+            ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
+            return BAD_VALUE;
+    }
+
+    unsigned int resIdx = 0;
+    for (; resIdx < availableSizeCount; resIdx++) {
+        if (availableSizes[resIdx * 2] == width &&
+                availableSizes[resIdx * 2 + 1] == height) break;
+    }
+    if (resIdx == availableSizeCount) {
+        ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
+                format, width, height);
+        return BAD_VALUE;
+    }
+
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            if (mRawStreamCount >= kMaxRawStreamCount) {
+                ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
+                        __FUNCTION__, mRawStreamCount);
+                return INVALID_OPERATION;
+            }
+            mRawStreamCount++;
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            if (mJpegStreamCount >= kMaxJpegStreamCount) {
+                ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
+                        __FUNCTION__, mJpegStreamCount);
+                return INVALID_OPERATION;
+            }
+            mJpegStreamCount++;
+            break;
+        default:
+            if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
+                ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
+                        __FUNCTION__, mProcessedStreamCount);
+                return INVALID_OPERATION;
+            }
+            mProcessedStreamCount++;
+    }
+
+    Stream newStream;
+    newStream.ops = stream_ops;
+    newStream.width = width;
+    newStream.height = height;
+    newStream.format = format;
+    // TODO: Query stride from gralloc
+    newStream.stride = width;
+
+    mStreams.add(mNextStreamId, newStream);
+
+    *stream_id = mNextStreamId;
+    if (format_actual) *format_actual = format;
+    *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
+    *max_buffers = 4;
+
+    ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
+            *stream_id, width, height, format, *usage, *max_buffers);
+
+    mNextStreamId++;
+    return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::registerStreamBuffers(
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers) {
+    // Emulator doesn't need to register these with V4L2, etc.
+    ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
+            stream_id, num_buffers);
+    return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
+    Mutex::Autolock l(mMutex);
+
+    ssize_t streamIndex = mStreams.indexOfKey(stream_id);
+    if (streamIndex < 0) {
+        ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
+        return BAD_VALUE;
+    }
+
+    if (isStreamInUse(stream_id)) {
+        ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
+                stream_id);
+        return BAD_VALUE;
+    }
+
+    switch(mStreams.valueAt(streamIndex).format) {
+        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            mRawStreamCount--;
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            mJpegStreamCount--;
+            break;
+        default:
+            mProcessedStreamCount--;
+            break;
+    }
+
+    mStreams.removeItemsAt(streamIndex);
+
+    return NO_ERROR;
+}
+
+/** Custom tag definitions */
+
+// Emulator camera metadata sections
+enum {
+    EMULATOR_SCENE = VENDOR_SECTION,
+    END_EMULATOR_SECTIONS
+};
+
+enum {
+    EMULATOR_SCENE_START = EMULATOR_SCENE << 16,
+};
+
+// Emulator camera metadata tags
+enum {
+    // Hour of day to use for lighting calculations (0-23). Default: 12
+    EMULATOR_SCENE_HOUROFDAY = EMULATOR_SCENE_START,
+    EMULATOR_SCENE_END
+};
+
+unsigned int emulator_metadata_section_bounds[END_EMULATOR_SECTIONS -
+        VENDOR_SECTION][2] = {
+    { EMULATOR_SCENE_START, EMULATOR_SCENE_END }
+};
+
+const char *emulator_metadata_section_names[END_EMULATOR_SECTIONS -
+        VENDOR_SECTION] = {
+    "com.android.emulator.scene"
+};
+
+typedef struct emulator_tag_info {
+    const char *tag_name;
+    uint8_t     tag_type;
+} emulator_tag_info_t;
+
+emulator_tag_info_t emulator_scene[EMULATOR_SCENE_END - EMULATOR_SCENE_START] = {
+    { "hourOfDay", TYPE_INT32 }
+};
+
+emulator_tag_info_t *tag_info[END_EMULATOR_SECTIONS -
+        VENDOR_SECTION] = {
+    emulator_scene
+};
+
+const char* EmulatedFakeCamera2::getVendorSectionName(uint32_t tag) {
+    ALOGV("%s", __FUNCTION__);
+    uint32_t section = tag >> 16;
+    if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
+    return emulator_metadata_section_names[section - VENDOR_SECTION];
+}
+
+const char* EmulatedFakeCamera2::getVendorTagName(uint32_t tag) {
+    ALOGV("%s", __FUNCTION__);
+    uint32_t section = tag >> 16;
+    if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
+    uint32_t section_index = section - VENDOR_SECTION;
+    if (tag >= emulator_metadata_section_bounds[section_index][1]) {
+        return NULL;
+    }
+    uint32_t tag_index = tag & 0xFFFF;
+    return tag_info[section_index][tag_index].tag_name;
+}
+
+int EmulatedFakeCamera2::getVendorTagType(uint32_t tag) {
+    ALOGV("%s", __FUNCTION__);
+    uint32_t section = tag >> 16;
+    if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return -1;
+    uint32_t section_index = section - VENDOR_SECTION;
+    if (tag >= emulator_metadata_section_bounds[section_index][1]) {
+        return -1;
+    }
+    uint32_t tag_index = tag & 0xFFFF;
+    return tag_info[section_index][tag_index].tag_type;
+}
+
+/** Shutdown and debug methods */
+
+int EmulatedFakeCamera2::dump(int fd) {
+    String8 result;
+
+    result.appendFormat("    Camera HAL device: EmulatedFakeCamera2\n");
+    result.appendFormat("      Streams:\n");
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        int id = mStreams.keyAt(i);
+        const Stream& s = mStreams.valueAt(i);
+        result.appendFormat(
+            "         Stream %d: %d x %d, format 0x%x, stride %d\n",
+            id, s.width, s.height, s.format, s.stride);
+    }
+
+    write(fd, result.string(), result.size());
+
+    return NO_ERROR;
+}
+
+void EmulatedFakeCamera2::signalError() {
+    // TODO: Let parent know so we can shut down cleanly
+    ALOGE("Worker thread is signaling a serious error");
+}
+
+/** Pipeline control worker thread methods */
+
+EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent) {
+    mRunning = false;
+}
+
+EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
+    Mutex::Autolock lock(mInputMutex);
+
+    ALOGV("Starting up ConfigureThread");
+    mRequest = NULL;
+    mActive  = false;
+    mRunning = true;
+
+    mInputSignal.signal();
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
+    Mutex::Autolock lock(mInputMutex);
+    if (!mRunning) {
+        ALOGV("Waiting for configure thread to start");
+        mInputSignal.wait(mInputMutex);
+    }
+    return OK;
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
+    waitUntilRunning();
+
+    Mutex::Autolock lock(mInputMutex);
+
+    mActive = true;
+    mInputSignal.signal();
+
+    return OK;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
+    Mutex::Autolock lock(mInternalsMutex);
+
+    if (mNextBuffers == NULL) return false;
+    for (size_t i=0; i < mNextBuffers->size(); i++) {
+        if ((*mNextBuffers)[i].streamId == (int)id) return true;
+    }
+    return false;
+}
+
+int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
+    Mutex::Autolock lock(mInternalsMutex);
+    return mNextBuffers == NULL ? 0 : 1;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
+    static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
+    status_t res;
+
+    // Check if we're currently processing or just waiting
+    {
+        Mutex::Autolock lock(mInputMutex);
+        if (!mActive) {
+            // Inactive, keep waiting until we've been signaled
+            status_t res;
+            res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
+            if (res != NO_ERROR && res != TIMED_OUT) {
+                ALOGE("%s: Error waiting for input requests: %d",
+                        __FUNCTION__, res);
+                return false;
+            }
+            if (!mActive) return true;
+            ALOGV("New request available");
+        }
+        // Active
+    }
+    if (mRequest == NULL) {
+        Mutex::Autolock il(mInternalsMutex);
+
+        ALOGV("Getting next request");
+        res = mParent->mRequestQueueSrc->dequeue_request(
+            mParent->mRequestQueueSrc,
+            &mRequest);
+        if (res != NO_ERROR) {
+            ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
+            mParent->signalError();
+            return false;
+        }
+        if (mRequest == NULL) {
+            ALOGV("Request queue empty, going inactive");
+            // No requests available, go into inactive mode
+            Mutex::Autolock lock(mInputMutex);
+            mActive = false;
+            return true;
+        }
+        // Get necessary parameters for sensor config
+
+        sort_camera_metadata(mRequest);
+
+        camera_metadata_entry_t streams;
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_REQUEST_OUTPUT_STREAMS,
+                &streams);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading output stream tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+
+        mNextBuffers = new Buffers;
+        mNextNeedsJpeg = false;
+        ALOGV("Setting up buffers for capture");
+        for (size_t i = 0; i < streams.count; i++) {
+            const Stream &s = mParent->getStreamInfo(streams.data.u8[i]);
+            StreamBuffer b;
+            b.streamId = streams.data.u8[i];
+            b.width  = s.width;
+            b.height = s.height;
+            b.format = s.format;
+            b.stride = s.stride;
+            mNextBuffers->push_back(b);
+            ALOGV("  Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
+                    i, b.streamId, b.width, b.height, b.format, b.stride);
+            if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+                mNextNeedsJpeg = true;
+            }
+        }
+
+        camera_metadata_entry_t e;
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_REQUEST_FRAME_COUNT,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading frame count tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        mNextFrameNumber = *e.data.i32;
+
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_EXPOSURE_TIME,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading exposure time tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        mNextExposureTime = *e.data.i64;
+
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_FRAME_DURATION,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading frame duration tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        mNextFrameDuration = *e.data.i64;
+
+        if (mNextFrameDuration <
+                mNextExposureTime + Sensor::kMinVerticalBlank) {
+            mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
+        }
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_SENSITIVITY,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        mNextSensitivity = *e.data.i32;
+
+        res = find_camera_metadata_entry(mRequest,
+                EMULATOR_SCENE_HOUROFDAY,
+                &e);
+        if (res == NO_ERROR) {
+            ALOGV("Setting hour: %d", *e.data.i32);
+            mParent->mSensor->getScene().setHour(*e.data.i32);
+        }
+
+        // Start waiting on sensor or JPEG block
+        if (mNextNeedsJpeg) {
+            ALOGV("Waiting for JPEG compressor");
+        } else {
+            ALOGV("Waiting for sensor");
+        }
+    }
+
+    if (mNextNeedsJpeg) {
+        bool jpegDone;
+        jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
+        if (!jpegDone) return true;
+
+        ALOGV("Waiting for sensor");
+        mNextNeedsJpeg = false;
+    }
+    bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
+
+    if (!vsync) return true;
+
+    Mutex::Autolock il(mInternalsMutex);
+    ALOGV("Configuring sensor for frame %d", mNextFrameNumber);
+    mParent->mSensor->setExposureTime(mNextExposureTime);
+    mParent->mSensor->setFrameDuration(mNextFrameDuration);
+    mParent->mSensor->setSensitivity(mNextSensitivity);
+
+    /** Get buffers to fill for this frame */
+    for (size_t i = 0; i < mNextBuffers->size(); i++) {
+        StreamBuffer &b = mNextBuffers->editItemAt(i);
+
+        Stream s = mParent->getStreamInfo(b.streamId);
+
+        res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
+        if (res != NO_ERROR || b.buffer == NULL) {
+            ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
+                    __FUNCTION__, b.streamId, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+
+        /* Lock the buffer from the perspective of the graphics mapper */
+        uint8_t *img;
+        const Rect rect(s.width, s.height);
+
+        res = GraphicBufferMapper::get().lock(*(b.buffer),
+                GRALLOC_USAGE_SW_WRITE_OFTEN,
+                rect, (void**)&(b.img) );
+
+        if (res != NO_ERROR) {
+            ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            s.ops->cancel_buffer(s.ops,
+                    b.buffer);
+            mParent->signalError();
+            return false;
+        }
+    }
+
+    mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffers);
+    mParent->mSensor->setDestinationBuffers(mNextBuffers);
+
+    mRequest = NULL;
+    mNextBuffers = NULL;
+
+    return true;
+}
+
+EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent),
+        mRunning(false),
+        mActive(false),
+        mRequest(NULL)
+{
+    mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
+    mInFlightHead = 0;
+    mInFlightTail = 0;
+}
+
+EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
+    delete mInFlightQueue;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
+    Mutex::Autolock lock(mInputMutex);
+    ALOGV("Starting up ReadoutThread");
+    mRunning = true;
+    mInputSignal.signal();
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
+    Mutex::Autolock lock(mInputMutex);
+    if (!mRunning) {
+        ALOGV("Waiting for readout thread to start");
+        mInputSignal.wait(mInputMutex);
+    }
+    return OK;
+}
+
+void EmulatedFakeCamera2::ReadoutThread::setNextCapture(
+        camera_metadata_t *request,
+        Buffers *buffers) {
+    Mutex::Autolock lock(mInputMutex);
+    if ( (mInFlightTail + 1) % kInFlightQueueSize == mInFlightHead) {
+        ALOGE("In flight queue full, dropping captures");
+        mParent->signalError();
+        return;
+    }
+    mInFlightQueue[mInFlightTail].request = request;
+    mInFlightQueue[mInFlightTail].buffers = buffers;
+    mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
+
+    if (!mActive) {
+        mActive = true;
+        mInputSignal.signal();
+    }
+}
+
+bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
+    Mutex::Autolock lock(mInputMutex);
+
+    size_t i = mInFlightHead;
+    while (i != mInFlightTail) {
+        for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
+            if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
+                return true;
+        }
+        i = (i + 1) % kInFlightQueueSize;
+    }
+
+    Mutex::Autolock iLock(mInternalsMutex);
+
+    if (mBuffers != NULL) {
+        for (i = 0; i < mBuffers->size(); i++) {
+            if ( (*mBuffers)[i].streamId == (int)id) return true;
+        }
+    }
+
+    return false;
+}
+
+int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
+    Mutex::Autolock lock(mInputMutex);
+    Mutex::Autolock iLock(mInternalsMutex);
+
+    int requestCount =
+            ((mInFlightTail + kInFlightQueueSize) - mInFlightHead)
+            % kInFlightQueueSize;
+    requestCount += (mBuffers == NULL) ? 0 : 1;
+
+    return requestCount;
+}
+
+bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
+    static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
+    status_t res;
+
+    // Check if we're currently processing or just waiting
+    {
+        Mutex::Autolock lock(mInputMutex);
+        if (!mActive) {
+            // Inactive, keep waiting until we've been signaled
+            res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
+            if (res != NO_ERROR && res != TIMED_OUT) {
+                ALOGE("%s: Error waiting for capture requests: %d",
+                        __FUNCTION__, res);
+                mParent->signalError();
+                return false;
+            }
+            if (!mActive) return true;
+        }
+        // Active, see if we need a new request
+        if (mRequest == NULL) {
+            if (mInFlightHead == mInFlightTail) {
+                // Go inactive
+                ALOGV("Waiting for sensor data");
+                mActive = false;
+                return true;
+            } else {
+                Mutex::Autolock iLock(mInternalsMutex);
+                mRequest = mInFlightQueue[mInFlightHead].request;
+                mBuffers  = mInFlightQueue[mInFlightHead].buffers;
+                mInFlightQueue[mInFlightHead].request = NULL;
+                mInFlightQueue[mInFlightHead].buffers = NULL;
+                mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
+                ALOGV("Ready to read out request %p, %d buffers",
+                        mRequest, mBuffers->size());
+            }
+        }
+    }
+
+    // Active with request, wait on sensor to complete
+
+    nsecs_t captureTime;
+
+    bool gotFrame;
+    gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
+            &captureTime);
+
+    if (!gotFrame) return true;
+
+    // Got sensor data, construct frame and send it out
+    ALOGV("Readout: Constructing metadata and frames");
+    Mutex::Autolock iLock(mInternalsMutex);
+
+    camera_metadata_entry_t metadataMode;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_METADATA_MODE,
+            &metadataMode);
+
+    if (*metadataMode.data.u8 == ANDROID_REQUEST_METADATA_FULL) {
+        ALOGV("Metadata requested, constructing");
+
+        camera_metadata_t *frame = NULL;
+
+        size_t frame_entries = get_camera_metadata_entry_count(mRequest);
+        size_t frame_data    = get_camera_metadata_data_count(mRequest);
+
+        frame_entries += 2;
+        frame_data += 8;
+
+        res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
+                frame_entries, frame_data, &frame);
+
+        if (res != NO_ERROR || frame == NULL) {
+            ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+
+        res = append_camera_metadata(frame, mRequest);
+        if (res != NO_ERROR) {
+            ALOGE("Unable to append request metadata");
+        }
+
+        add_camera_metadata_entry(frame,
+                ANDROID_SENSOR_TIMESTAMP,
+                &captureTime,
+                1);
+
+        int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
+        camera_metadata_entry_t requestedHour;
+        res = find_camera_metadata_entry(frame,
+                EMULATOR_SCENE_HOUROFDAY,
+                &requestedHour);
+        if (res == NAME_NOT_FOUND) {
+            ALOGV("Adding vendor tag");
+            res = add_camera_metadata_entry(frame,
+                    EMULATOR_SCENE_HOUROFDAY,
+                    &hourOfDay, 1);
+            if (res != NO_ERROR) {
+                ALOGE("Unable to add vendor tag");
+            }
+        } else if (res == OK) {
+            ALOGV("Replacing value in vendor tag");
+            *requestedHour.data.i32 = hourOfDay;
+        } else {
+            ALOGE("Error looking up vendor tag");
+        }
+
+        // TODO: Collect all final values used from sensor in addition to timestamp
+
+        mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
+                frame);
+    }
+
+    res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
+    if (res != NO_ERROR) {
+        ALOGE("%s: Unable to return request buffer to queue: %d",
+                __FUNCTION__, res);
+        mParent->signalError();
+        return false;
+    }
+    mRequest = NULL;
+
+    int compressedBufferIndex = -1;
+    ALOGV("Processing %d buffers", mBuffers->size());
+    for (size_t i = 0; i < mBuffers->size(); i++) {
+        const StreamBuffer &b = (*mBuffers)[i];
+        ALOGV("  Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
+                i, b.streamId, b.width, b.height, b.format, b.stride);
+        if (b.streamId >= 0) {
+            if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+                // Assumes only one BLOB buffer type per capture
+                compressedBufferIndex = i;
+            } else {
+                ALOGV("Sending image buffer %d to output stream %d",
+                        i, b.streamId);
+                GraphicBufferMapper::get().unlock(*(b.buffer));
+                res = mParent->getStreamInfo(b.streamId).ops->enqueue_buffer(
+                    mParent->getStreamInfo(b.streamId).ops,
+                    captureTime, b.buffer);
+                if (res != OK) {
+                    ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
+                            strerror(-res), res);
+                    mParent->signalError();
+                }
+            }
+        }
+    }
+    if (compressedBufferIndex == -1) {
+        delete mBuffers;
+        mBuffers = NULL;
+    } else {
+        ALOGV("Starting JPEG compression for buffer %d, stream %d",
+                compressedBufferIndex,
+                (*mBuffers)[compressedBufferIndex].streamId);
+        mParent->mJpegCompressor->start(mBuffers, captureTime);
+        mBuffers = NULL;
+    }
+
+    return true;
+}
+
+/** Private methods */
+
+status_t EmulatedFakeCamera2::constructStaticInfo(
+        camera_metadata_t **info,
+        bool sizeRequest) const {
+
+    size_t entryCount = 0;
+    size_t dataCount = 0;
+    status_t ret;
+
+#define ADD_OR_SIZE( tag, data, count ) \
+    if ( ( ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, \
+            tag, data, count) ) != OK ) return ret
+
+    // android.lens
+
+    static const float minFocusDistance = 0;
+    ADD_OR_SIZE(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE,
+            &minFocusDistance, 1);
+    ADD_OR_SIZE(ANDROID_LENS_HYPERFOCAL_DISTANCE,
+            &minFocusDistance, 1);
+
+    static const float focalLength = 3.30f; // mm
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS,
+            &focalLength, 1);
+    static const float aperture = 2.8f;
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_APERTURES,
+            &aperture, 1);
+    static const float filterDensity = 0;
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_FILTER_DENSITY,
+            &filterDensity, 1);
+    static const uint8_t availableOpticalStabilization =
+            ANDROID_LENS_OPTICAL_STABILIZATION_OFF;
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_OPTICAL_STABILIZATION,
+            &availableOpticalStabilization, 1);
+
+    static const int32_t lensShadingMapSize[] = {1, 1};
+    ADD_OR_SIZE(ANDROID_LENS_SHADING_MAP_SIZE, lensShadingMapSize,
+            sizeof(lensShadingMapSize)/sizeof(int32_t));
+
+    static const float lensShadingMap[3 * 1 * 1 ] =
+            { 1.f, 1.f, 1.f };
+    ADD_OR_SIZE(ANDROID_LENS_SHADING_MAP, lensShadingMap,
+            sizeof(lensShadingMap)/sizeof(float));
+
+    // Identity transform
+    static const int32_t geometricCorrectionMapSize[] = {2, 2};
+    ADD_OR_SIZE(ANDROID_LENS_GEOMETRIC_CORRECTION_MAP_SIZE,
+            geometricCorrectionMapSize,
+            sizeof(geometricCorrectionMapSize)/sizeof(int32_t));
+
+    static const float geometricCorrectionMap[2 * 3 * 2 * 2] = {
+            0.f, 0.f,  0.f, 0.f,  0.f, 0.f,
+            1.f, 0.f,  1.f, 0.f,  1.f, 0.f,
+            0.f, 1.f,  0.f, 1.f,  0.f, 1.f,
+            1.f, 1.f,  1.f, 1.f,  1.f, 1.f};
+    ADD_OR_SIZE(ANDROID_LENS_GEOMETRIC_CORRECTION_MAP,
+            geometricCorrectionMap,
+            sizeof(geometricCorrectionMap)/sizeof(float));
+
+    int32_t lensFacing = mFacingBack ?
+            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
+    ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
+
+    float lensPosition[3];
+    if (mFacingBack) {
+        // Back-facing camera is center-top on device
+        lensPosition[0] = 0;
+        lensPosition[1] = 20;
+        lensPosition[2] = -5;
+    } else {
+        // Front-facing camera is center-right on device
+        lensPosition[0] = 20;
+        lensPosition[1] = 20;
+        lensPosition[2] = 0;
+    }
+    ADD_OR_SIZE(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
+            sizeof(float));
+
+    // android.sensor
+
+    ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME_RANGE,
+            Sensor::kExposureTimeRange, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_MAX_FRAME_DURATION,
+            &Sensor::kFrameDurationRange[1], 1);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_AVAILABLE_SENSITIVITIES,
+            Sensor::kAvailableSensitivities,
+            sizeof(Sensor::kAvailableSensitivities)
+            /sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SENSOR_COLOR_FILTER_ARRANGEMENT,
+            &Sensor::kColorFilterArrangement, 1);
+
+    static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
+    ADD_OR_SIZE(ANDROID_SENSOR_PHYSICAL_SIZE,
+            sensorPhysicalSize, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_PIXEL_ARRAY_SIZE,
+            Sensor::kResolution, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE,
+            Sensor::kResolution, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_WHITE_LEVEL,
+            &Sensor::kMaxRawValue, 1);
+
+    static const int32_t blackLevelPattern[4] = {
+            Sensor::kBlackLevel, Sensor::kBlackLevel,
+            Sensor::kBlackLevel, Sensor::kBlackLevel
+    };
+    ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
+            blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
+
+    //TODO: sensor color calibration fields
+
+    // android.flash
+    static const uint8_t flashAvailable = 0;
+    ADD_OR_SIZE(ANDROID_FLASH_AVAILABLE, &flashAvailable, 1);
+
+    static const int64_t flashChargeDuration = 0;
+    ADD_OR_SIZE(ANDROID_FLASH_CHARGE_DURATION, &flashChargeDuration, 1);
+
+    // android.tonemap
+
+    static const int32_t tonemapCurvePoints = 128;
+    ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
+
+    // android.scaler
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS,
+            kAvailableFormats,
+            sizeof(kAvailableFormats)/sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
+            kAvailableRawSizes,
+            sizeof(kAvailableRawSizes)/sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
+            kAvailableRawMinDurations,
+            sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
+            kAvailableProcessedSizes,
+            sizeof(kAvailableProcessedSizes)/sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
+            kAvailableProcessedMinDurations,
+            sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
+            kAvailableJpegSizes,
+            sizeof(kAvailableJpegSizes)/sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
+            kAvailableJpegMinDurations,
+            sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
+
+    static const float maxZoom = 10;
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_ZOOM,
+            &maxZoom, 1);
+
+    // android.jpeg
+
+    static const int32_t jpegThumbnailSizes[] = {
+            160, 120,
+            320, 240,
+            640, 480
+    };
+    ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+            jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
+
+    static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
+    ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+
+    // android.stats
+
+    static const uint8_t availableFaceDetectModes[] = {
+            ANDROID_STATS_FACE_DETECTION_OFF
+    };
+    ADD_OR_SIZE(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES,
+            availableFaceDetectModes,
+            sizeof(availableFaceDetectModes));
+
+    static const int32_t maxFaceCount = 0;
+    ADD_OR_SIZE(ANDROID_STATS_MAX_FACE_COUNT,
+            &maxFaceCount, 1);
+
+    static const int32_t histogramSize = 64;
+    ADD_OR_SIZE(ANDROID_STATS_HISTOGRAM_BUCKET_COUNT,
+            &histogramSize, 1);
+
+    static const int32_t maxHistogramCount = 1000;
+    ADD_OR_SIZE(ANDROID_STATS_MAX_HISTOGRAM_COUNT,
+            &maxHistogramCount, 1);
+
+    static const int32_t sharpnessMapSize[2] = {64, 64};
+    ADD_OR_SIZE(ANDROID_STATS_SHARPNESS_MAP_SIZE,
+            sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
+
+    static const int32_t maxSharpnessMapValue = 1000;
+    ADD_OR_SIZE(ANDROID_STATS_MAX_SHARPNESS_MAP_VALUE,
+            &maxSharpnessMapValue, 1);
+
+    // android.control
+
+    static const uint8_t availableSceneModes[] = {
+            ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+            availableSceneModes, sizeof(availableSceneModes));
+
+    static const uint8_t availableEffects[] = {
+            ANDROID_CONTROL_EFFECT_OFF
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+            availableEffects, sizeof(availableEffects));
+
+    int32_t max3aRegions = 0;
+    ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS,
+            &max3aRegions, 1);
+
+    static const uint8_t availableAeModes[] = {
+            ANDROID_CONTROL_AE_OFF,
+            ANDROID_CONTROL_AE_ON
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+            availableAeModes, sizeof(availableAeModes));
+
+    static const camera_metadata_rational exposureCompensationStep = {
+            1, 3
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP,
+            &exposureCompensationStep, 1);
+
+    int32_t exposureCompensationRange[] = {-9, 9};
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE,
+            exposureCompensationRange,
+            sizeof(exposureCompensationRange)/sizeof(int32_t));
+
+    static const int32_t availableTargetFpsRanges[] = {
+            5, 30
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+            availableTargetFpsRanges,
+            sizeof(availableTargetFpsRanges)/sizeof(int32_t));
+
+    static const uint8_t availableAntibandingModes[] = {
+            ANDROID_CONTROL_AE_ANTIBANDING_OFF,
+            ANDROID_CONTROL_AE_ANTIBANDING_AUTO
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+            availableAntibandingModes, sizeof(availableAntibandingModes));
+
+    static const uint8_t availableAwbModes[] = {
+            ANDROID_CONTROL_AWB_OFF,
+            ANDROID_CONTROL_AWB_AUTO,
+            ANDROID_CONTROL_AWB_INCANDESCENT,
+            ANDROID_CONTROL_AWB_FLUORESCENT,
+            ANDROID_CONTROL_AWB_DAYLIGHT,
+            ANDROID_CONTROL_AWB_SHADE
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+            availableAwbModes, sizeof(availableAwbModes));
+
+    static const uint8_t availableAfModes[] = {
+            ANDROID_CONTROL_AF_OFF
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+            availableAfModes, sizeof(availableAfModes));
+
+    static const uint8_t availableVstabModes[] = {
+            ANDROID_CONTROL_VIDEO_STABILIZATION_OFF
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+            availableVstabModes, sizeof(availableVstabModes));
+
+#undef ADD_OR_SIZE
+    /** Allocate metadata if sizing */
+    if (sizeRequest) {
+        ALOGV("Allocating %d entries, %d extra bytes for "
+                "static camera info",
+                entryCount, dataCount);
+        *info = allocate_camera_metadata(entryCount, dataCount);
+        if (*info == NULL) {
+            ALOGE("Unable to allocate camera static info"
+                    "(%d entries, %d bytes extra data)",
+                    entryCount, dataCount);
+            return NO_MEMORY;
+        }
+    }
+    return OK;
+}
+
+status_t EmulatedFakeCamera2::constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request,
+        bool sizeRequest) const {
+
+    size_t entryCount = 0;
+    size_t dataCount = 0;
+    status_t ret;
+
+#define ADD_OR_SIZE( tag, data, count ) \
+    if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
+            tag, data, count) ) != OK ) return ret
+
+    static const int64_t USEC = 1000LL;
+    static const int64_t MSEC = USEC * 1000LL;
+    static const int64_t SEC = MSEC * 1000LL;
+
+    /** android.request */
+
+    static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_NONE;
+    ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
+
+    static const int32_t id = 0;
+    ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
+
+    static const int32_t frameCount = 0;
+    ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
+
+    // OUTPUT_STREAMS set by user
+    entryCount += 1;
+    dataCount += 5; // TODO: Should be maximum stream number
+
+    /** android.lens */
+
+    static const float focusDistance = 0;
+    ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
+
+    static const float aperture = 2.8f;
+    ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
+
+    static const float focalLength = 5.0f;
+    ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
+
+    static const float filterDensity = 0;
+    ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
+
+    static const uint8_t opticalStabilizationMode =
+            ANDROID_LENS_OPTICAL_STABILIZATION_OFF;
+    ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+            &opticalStabilizationMode, 1);
+
+    // FOCUS_RANGE set only in frame
+
+    /** android.sensor */
+
+    static const int64_t exposureTime = 10 * MSEC;
+    ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
+
+    static const int64_t frameDuration = 33333333L; // 1/30 s
+    ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
+
+    static const int32_t sensitivity = 100;
+    ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
+
+    // TIMESTAMP set only in frame
+
+    /** android.flash */
+
+    static const uint8_t flashMode = ANDROID_FLASH_OFF;
+    ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
+
+    static const uint8_t flashPower = 10;
+    ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
+
+    static const int64_t firingTime = 0;
+    ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
+
+    /** Processing block modes */
+    uint8_t hotPixelMode = 0;
+    uint8_t demosaicMode = 0;
+    uint8_t noiseMode = 0;
+    uint8_t shadingMode = 0;
+    uint8_t geometricMode = 0;
+    uint8_t colorMode = 0;
+    uint8_t tonemapMode = 0;
+    uint8_t edgeMode = 0;
+    switch (request_template) {
+      case CAMERA2_TEMPLATE_PREVIEW:
+        hotPixelMode = ANDROID_PROCESSING_FAST;
+        demosaicMode = ANDROID_PROCESSING_FAST;
+        noiseMode = ANDROID_PROCESSING_FAST;
+        shadingMode = ANDROID_PROCESSING_FAST;
+        geometricMode = ANDROID_PROCESSING_FAST;
+        colorMode = ANDROID_PROCESSING_FAST;
+        tonemapMode = ANDROID_PROCESSING_FAST;
+        edgeMode = ANDROID_PROCESSING_FAST;
+        break;
+      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+        hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+        hotPixelMode = ANDROID_PROCESSING_FAST;
+        demosaicMode = ANDROID_PROCESSING_FAST;
+        noiseMode = ANDROID_PROCESSING_FAST;
+        shadingMode = ANDROID_PROCESSING_FAST;
+        geometricMode = ANDROID_PROCESSING_FAST;
+        colorMode = ANDROID_PROCESSING_FAST;
+        tonemapMode = ANDROID_PROCESSING_FAST;
+        edgeMode = ANDROID_PROCESSING_FAST;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+        hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        break;
+      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+        hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        break;
+      default:
+        hotPixelMode = ANDROID_PROCESSING_FAST;
+        demosaicMode = ANDROID_PROCESSING_FAST;
+        noiseMode = ANDROID_PROCESSING_FAST;
+        shadingMode = ANDROID_PROCESSING_FAST;
+        geometricMode = ANDROID_PROCESSING_FAST;
+        colorMode = ANDROID_PROCESSING_FAST;
+        tonemapMode = ANDROID_PROCESSING_FAST;
+        edgeMode = ANDROID_PROCESSING_FAST;
+        break;
+    }
+    ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
+    ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
+    ADD_OR_SIZE(ANDROID_NOISE_MODE, &noiseMode, 1);
+    ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
+    ADD_OR_SIZE(ANDROID_GEOMETRIC_MODE, &geometricMode, 1);
+    ADD_OR_SIZE(ANDROID_COLOR_MODE, &colorMode, 1);
+    ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
+    ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
+
+    /** android.noise */
+    static const uint8_t noiseStrength = 5;
+    ADD_OR_SIZE(ANDROID_NOISE_STRENGTH, &noiseStrength, 1);
+
+    /** android.color */
+    static const float colorTransform[9] = {
+        1.0f, 0.f, 0.f,
+        0.f, 1.f, 0.f,
+        0.f, 0.f, 1.f
+    };
+    ADD_OR_SIZE(ANDROID_COLOR_TRANSFORM, colorTransform, 9);
+
+    /** android.tonemap */
+    static const float tonemapCurve[4] = {
+        0.f, 0.f,
+        1.f, 1.f
+    };
+    ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
+    ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
+    ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
+
+    /** android.edge */
+    static const uint8_t edgeStrength = 5;
+    ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
+
+    /** android.scaler */
+    static const int32_t cropRegion[3] = {
+        0, 0, Sensor::kResolution[0]
+    };
+    ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
+
+    /** android.jpeg */
+    static const int32_t jpegQuality = 80;
+    ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
+
+    static const int32_t thumbnailSize[2] = {
+        640, 480
+    };
+    ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
+
+    static const int32_t thumbnailQuality = 80;
+    ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
+
+    static const double gpsCoordinates[2] = {
+        0, 0
+    };
+    ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
+
+    static const uint8_t gpsProcessingMethod[32] = "None";
+    ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
+
+    static const int64_t gpsTimestamp = 0;
+    ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
+
+    static const int32_t jpegOrientation = 0;
+    ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
+
+    /** android.stats */
+
+    static const uint8_t faceDetectMode = ANDROID_STATS_FACE_DETECTION_OFF;
+    ADD_OR_SIZE(ANDROID_STATS_FACE_DETECT_MODE, &faceDetectMode, 1);
+
+    static const uint8_t histogramMode = ANDROID_STATS_OFF;
+    ADD_OR_SIZE(ANDROID_STATS_HISTOGRAM_MODE, &histogramMode, 1);
+
+    static const uint8_t sharpnessMapMode = ANDROID_STATS_OFF;
+    ADD_OR_SIZE(ANDROID_STATS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
+
+    // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
+    // sharpnessMap only in frames
+
+    /** android.control */
+
+    uint8_t controlIntent = 0;
+    switch (request_template) {
+      case CAMERA2_TEMPLATE_PREVIEW:
+        controlIntent = ANDROID_CONTROL_INTENT_PREVIEW;
+        break;
+      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+        controlIntent = ANDROID_CONTROL_INTENT_STILL_CAPTURE;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+        controlIntent = ANDROID_CONTROL_INTENT_VIDEO_RECORD;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+        controlIntent = ANDROID_CONTROL_INTENT_VIDEO_SNAPSHOT;
+        break;
+      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+        controlIntent = ANDROID_CONTROL_INTENT_ZERO_SHUTTER_LAG;
+        break;
+      default:
+        controlIntent = ANDROID_CONTROL_INTENT_CUSTOM;
+        break;
+    }
+    ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
+
+    static const uint8_t controlMode = ANDROID_CONTROL_AUTO;
+    ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
+
+    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_OFF;
+    ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
+
+    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+    ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
+
+    static const uint8_t aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH;
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+
+    static const int32_t controlRegions[5] = {
+        0, 0, Sensor::kResolution[0], Sensor::kResolution[1], 1000
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
+
+    static const int32_t aeExpCompensation = 0;
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION, &aeExpCompensation, 1);
+
+    static const int32_t aeTargetFpsRange[2] = {
+        10, 30
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
+
+    static const uint8_t aeAntibandingMode =
+            ANDROID_CONTROL_AE_ANTIBANDING_AUTO;
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
+
+    static const uint8_t awbMode =
+            ANDROID_CONTROL_AWB_AUTO;
+    ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+    ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
+
+    uint8_t afMode = 0;
+    switch (request_template) {
+      case CAMERA2_TEMPLATE_PREVIEW:
+        afMode = ANDROID_CONTROL_AF_AUTO;
+        break;
+      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+        afMode = ANDROID_CONTROL_AF_AUTO;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+        afMode = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+        afMode = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO;
+        break;
+      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+        afMode = ANDROID_CONTROL_AF_CONTINUOUS_PICTURE;
+        break;
+      default:
+        afMode = ANDROID_CONTROL_AF_AUTO;
+        break;
+    }
+    ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
+
+    ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
+
+    static const uint8_t vstabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
+    ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
+
+    // aeState, awbState, afState only in frame
+
+    /** Allocate metadata if sizing */
+    if (sizeRequest) {
+        ALOGV("Allocating %d entries, %d extra bytes for "
+                "request template type %d",
+                entryCount, dataCount, request_template);
+        *request = allocate_camera_metadata(entryCount, dataCount);
+        if (*request == NULL) {
+            ALOGE("Unable to allocate new request template type %d "
+                    "(%d entries, %d bytes extra data)", request_template,
+                    entryCount, dataCount);
+            return NO_MEMORY;
+        }
+    }
+    return OK;
+#undef ADD_OR_SIZE
+}
+
+status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
+        bool sizeRequest,
+        size_t *entryCount,
+        size_t *dataCount,
+        uint32_t tag,
+        const void *entryData,
+        size_t entryDataCount) {
+    status_t res;
+    if (!sizeRequest) {
+        return add_camera_metadata_entry(request, tag, entryData,
+                entryDataCount);
+    } else {
+        int type = get_camera_metadata_tag_type(tag);
+        if (type < 0 ) return BAD_VALUE;
+        (*entryCount)++;
+        (*dataCount) += calculate_camera_metadata_entry_data_size(type,
+                entryDataCount);
+        return OK;
+    }
+}
+
+bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
+    // Assumes mMutex is locked; otherwise new requests could enter
+    // configureThread while readoutThread is being checked
+
+    // Order of isStreamInUse calls matters
+    if (mConfigureThread->isStreamInUse(id) ||
+            mReadoutThread->isStreamInUse(id) ||
+            mJpegCompressor->isStreamInUse(id) ) {
+        ALOGE("%s: Stream %d is in use in active requests!",
+                __FUNCTION__, id);
+        return true;
+    }
+    return false;
+ }
+
+const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
+    Mutex::Autolock lock(mMutex);
+
+    return mStreams.valueFor(streamId);
+}
+
 };  /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.h b/tools/emulator/system/camera/EmulatedFakeCamera2.h
index 89b12d3..9a0b676 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.h
@@ -24,6 +24,14 @@
  */
 
 #include "EmulatedCamera2.h"
+#include "fake-pipeline2/Base.h"
+#include "fake-pipeline2/Sensor.h"
+#include "fake-pipeline2/JpegCompressor.h"
+#include <utils/Condition.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/Thread.h>
 
 namespace android {
 
@@ -44,22 +52,238 @@
 
 public:
     /* Initializes EmulatedFakeCamera2 instance. */
-     status_t Initialize();
+    status_t Initialize();
 
     /****************************************************************************
-     * EmulatedCamera abstract API implementation.
+     * Camera Module API and generic hardware device API implementation
+     ***************************************************************************/
+public:
+
+    virtual status_t connectCamera(hw_device_t** device);
+
+    virtual status_t closeCamera();
+
+    virtual status_t getCameraInfo(struct camera_info *info);
+
+    /****************************************************************************
+     * EmulatedCamera2 abstract API implementation.
+     ***************************************************************************/
+protected:
+    /** Request input queue */
+
+    virtual int requestQueueNotify();
+
+    /** Count of requests in flight */
+    virtual int getInProgressCount();
+
+    /** Cancel all captures in flight */
+    //virtual int flushCapturesInProgress();
+
+    /** Construct default request */
+    virtual int constructDefaultRequest(
+            int request_template,
+            camera_metadata_t **request);
+
+    virtual int allocateStream(
+            uint32_t width,
+            uint32_t height,
+            int format,
+            const camera2_stream_ops_t *stream_ops,
+            uint32_t *stream_id,
+            uint32_t *format_actual,
+            uint32_t *usage,
+            uint32_t *max_buffers);
+
+    virtual int registerStreamBuffers(
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers);
+
+    virtual int releaseStream(uint32_t stream_id);
+
+    // virtual int allocateReprocessStream(
+    //         uint32_t width,
+    //         uint32_t height,
+    //         uint32_t format,
+    //         const camera2_stream_ops_t *stream_ops,
+    //         uint32_t *stream_id,
+    //         uint32_t *format_actual,
+    //         uint32_t *usage,
+    //         uint32_t *max_buffers);
+
+    // virtual int releaseReprocessStream(uint32_t stream_id);
+
+    // virtual int triggerAction(uint32_t trigger_id,
+    //        int ext1,
+    //        int ext2);
+
+    /** Custom tag definitions */
+    virtual const char* getVendorSectionName(uint32_t tag);
+    virtual const char* getVendorTagName(uint32_t tag);
+    virtual int         getVendorTagType(uint32_t tag);
+
+    /** Debug methods */
+
+    virtual int dump(int fd);
+
+public:
+    /****************************************************************************
+     * Utility methods called by configure/readout threads and pipeline
      ***************************************************************************/
 
-protected:
+    // Get information about a given stream. Will lock mMutex
+    const Stream &getStreamInfo(uint32_t streamId);
+
+    // Notifies rest of camera subsystem of serious error
+    void signalError();
+
+private:
+    /****************************************************************************
+     * Utility methods
+     ***************************************************************************/
+    /** Construct static camera metadata, two-pass */
+    status_t constructStaticInfo(
+            camera_metadata_t **info,
+            bool sizeRequest) const;
+
+    /** Two-pass implementation of constructDefaultRequest */
+    status_t constructDefaultRequest(
+            int request_template,
+            camera_metadata_t **request,
+            bool sizeRequest) const;
+    /** Helper function for constructDefaultRequest */
+    static status_t addOrSize( camera_metadata_t *request,
+            bool sizeRequest,
+            size_t *entryCount,
+            size_t *dataCount,
+            uint32_t tag,
+            const void *entry_data,
+            size_t entry_count);
+
+    /** Determine if the stream id is listed in any currently-in-flight
+     * requests. Assumes mMutex is locked */
+    bool isStreamInUse(uint32_t streamId);
 
     /****************************************************************************
-     * Data memebers.
+     * Pipeline controller threads
+     ***************************************************************************/
+
+    class ConfigureThread: public Thread {
+      public:
+        ConfigureThread(EmulatedFakeCamera2 *parent);
+        ~ConfigureThread();
+
+        status_t waitUntilRunning();
+        status_t newRequestAvailable();
+        status_t readyToRun();
+
+        bool isStreamInUse(uint32_t id);
+        int getInProgressCount();
+      private:
+        EmulatedFakeCamera2 *mParent;
+
+        bool mRunning;
+        bool threadLoop();
+
+        Mutex mInputMutex; // Protects mActive
+        Condition mInputSignal;
+        bool mActive; // Whether we're waiting for input requests or actively
+                      // working on them
+
+        camera_metadata_t *mRequest;
+
+        Mutex mInternalsMutex; // Lock before accessing below members.
+        bool    mNextNeedsJpeg;
+        int32_t mNextFrameNumber;
+        int64_t mNextExposureTime;
+        int64_t mNextFrameDuration;
+        int32_t mNextSensitivity;
+        Buffers *mNextBuffers;
+    };
+
+    class ReadoutThread: public Thread {
+      public:
+        ReadoutThread(EmulatedFakeCamera2 *parent);
+        ~ReadoutThread();
+
+        status_t readyToRun();
+
+        // Input
+        status_t waitUntilRunning();
+        void setNextCapture(camera_metadata_t *request,
+                Buffers *buffers);
+
+        bool isStreamInUse(uint32_t id);
+        int getInProgressCount();
+      private:
+        EmulatedFakeCamera2 *mParent;
+
+        bool mRunning;
+        bool threadLoop();
+
+        // Inputs
+        Mutex mInputMutex; // Protects mActive, mInFlightQueue
+        Condition mInputSignal;
+        bool mActive;
+
+        static const int kInFlightQueueSize = 4;
+        struct InFlightQueue {
+            camera_metadata_t *request;
+            Buffers *buffers;
+        } *mInFlightQueue;
+
+        size_t mInFlightHead;
+        size_t mInFlightTail;
+
+        // Internals
+        Mutex mInternalsMutex;
+        camera_metadata_t *mRequest;
+        Buffers *mBuffers;
+
+    };
+
+    /****************************************************************************
+     * Static configuration information
+     ***************************************************************************/
+private:
+    static const uint32_t kMaxRawStreamCount = 1;
+    static const uint32_t kMaxProcessedStreamCount = 3;
+    static const uint32_t kMaxJpegStreamCount = 1;
+    static const uint32_t kAvailableFormats[];
+    static const uint32_t kAvailableRawSizes[];
+    static const uint64_t kAvailableRawMinDurations[];
+    static const uint32_t kAvailableProcessedSizes[];
+    static const uint64_t kAvailableProcessedMinDurations[];
+    static const uint32_t kAvailableJpegSizes[];
+    static const uint64_t kAvailableJpegMinDurations[];
+
+    /****************************************************************************
+     * Data members.
      ***************************************************************************/
 
 protected:
     /* Facing back (true) or front (false) switch. */
-    bool                        mFacingBack;
+    bool mFacingBack;
 
+private:
+    /** Mutex for calls through camera2 device interface */
+    Mutex mMutex;
+
+    /** Stream manipulation */
+    uint32_t mNextStreamId;
+    uint32_t mRawStreamCount;
+    uint32_t mProcessedStreamCount;
+    uint32_t mJpegStreamCount;
+
+    KeyedVector<uint32_t, Stream> mStreams;
+
+    /** Simulated hardware interfaces */
+    sp<Sensor> mSensor;
+    sp<JpegCompressor> mJpegCompressor;
+
+    /** Pipeline control threads */
+    sp<ConfigureThread> mConfigureThread;
+    sp<ReadoutThread>   mReadoutThread;
 };
 
 }; /* namespace android */
diff --git a/tools/emulator/system/camera/fake-pipeline2/Base.h b/tools/emulator/system/camera/fake-pipeline2/Base.h
new file mode 100644
index 0000000..7f6be16
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Base.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This file includes various basic structures that are needed by multiple parts
+ * of the fake camera 2 implementation.
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_BASE_H
+#define HW_EMULATOR_CAMERA2_BASE_H
+
+#include <system/window.h>
+#include <hardware/camera2.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+
+/* Internal structure for passing buffers across threads */
+struct StreamBuffer {
+    int streamId;
+    uint32_t width, height;
+    uint32_t format;
+    uint32_t stride;
+    buffer_handle_t *buffer;
+    uint8_t *img;
+};
+typedef Vector<StreamBuffer> Buffers;
+
+struct Stream {
+    const camera2_stream_ops_t *ops;
+    uint32_t width, height;
+    uint32_t format;
+    uint32_t stride;
+};
+
+} // namespace android;
+
+#endif
diff --git a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
new file mode 100644
index 0000000..76fbb94
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera2_JpegCompressor"
+
+#include <utils/Log.h>
+#include <ui/GraphicBufferMapper.h>
+
+#include "JpegCompressor.h"
+#include "../EmulatedFakeCamera2.h"
+
+namespace android {
+
+JpegCompressor::JpegCompressor(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mIsBusy(false),
+        mParent(parent),
+        mBuffers(NULL),
+        mCaptureTime(0) {
+}
+
+JpegCompressor::~JpegCompressor() {
+    Mutex::Autolock lock(mMutex);
+}
+
+status_t JpegCompressor::start(Buffers *buffers,
+        nsecs_t captureTime) {
+    Mutex::Autolock lock(mMutex);
+    {
+        Mutex::Autolock busyLock(mBusyMutex);
+
+        if (mIsBusy) {
+            ALOGE("%s: Already processing a buffer!", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+
+        mIsBusy = true;
+
+        mBuffers = buffers;
+        mCaptureTime = captureTime;
+    }
+
+    status_t res;
+    res = run("EmulatedFakeCamera2::JpegCompressor");
+    if (res != OK) {
+        ALOGE("%s: Unable to start up compression thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        delete mBuffers;
+    }
+    return res;
+}
+
+status_t JpegCompressor::cancel() {
+    requestExitAndWait();
+    return OK;
+}
+
+status_t JpegCompressor::readyToRun() {
+    return OK;
+}
+
+bool JpegCompressor::threadLoop() {
+    Mutex::Autolock lock(mMutex);
+    ALOGV("%s: Starting compression thread", __FUNCTION__);
+
+    // Find source and target buffers
+
+    bool foundJpeg = false, mFoundAux = false;
+    for (size_t i = 0; i < mBuffers->size(); i++) {
+        const StreamBuffer &b = (*mBuffers)[i];
+        if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+            mJpegBuffer = b;
+            mFoundJpeg = true;
+        } else if (b.streamId == -1) {
+            mAuxBuffer = b;
+            mFoundAux = true;
+        }
+        if (mFoundJpeg && mFoundAux) break;
+    }
+    if (!mFoundJpeg || !mFoundAux) {
+        ALOGE("%s: Unable to find buffers for JPEG source/destination",
+                __FUNCTION__);
+        cleanUp();
+        return false;
+    }
+
+    // Set up error management
+
+    mJpegErrorInfo = NULL;
+    JpegError error;
+    error.parent = this;
+
+    mCInfo.err = jpeg_std_error(&error);
+    mCInfo.err->error_exit = jpegErrorHandler;
+
+    jpeg_create_compress(&mCInfo);
+    if (checkError("Error initializing compression")) return false;
+
+    // Route compressed data straight to output stream buffer
+
+    JpegDestination jpegDestMgr;
+    jpegDestMgr.parent = this;
+    jpegDestMgr.init_destination = jpegInitDestination;
+    jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
+    jpegDestMgr.term_destination = jpegTermDestination;
+
+    mCInfo.dest = &jpegDestMgr;
+
+    // Set up compression parameters
+
+    mCInfo.image_width = mAuxBuffer.width;
+    mCInfo.image_height = mAuxBuffer.height;
+    mCInfo.input_components = 3;
+    mCInfo.in_color_space = JCS_RGB;
+
+    jpeg_set_defaults(&mCInfo);
+    if (checkError("Error configuring defaults")) return false;
+
+    // Do compression
+
+    jpeg_start_compress(&mCInfo, TRUE);
+    if (checkError("Error starting compression")) return false;
+
+    size_t rowStride = mAuxBuffer.stride * 3;
+    const size_t kChunkSize = 32;
+    while (mCInfo.next_scanline < mCInfo.image_height) {
+        JSAMPROW chunk[kChunkSize];
+        for (size_t i = 0 ; i < kChunkSize; i++) {
+            chunk[i] = (JSAMPROW)
+                    (mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
+        }
+        jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
+        if (checkError("Error while compressing")) return false;
+        if (exitPending()) {
+            ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
+            cleanUp();
+            return false;
+        }
+    }
+
+    jpeg_finish_compress(&mCInfo);
+    if (checkError("Error while finishing compression")) return false;
+
+    // Write to JPEG output stream
+
+    ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
+          mJpegBuffer.streamId);
+
+    GraphicBufferMapper::get().unlock(*(mJpegBuffer.buffer));
+    status_t res;
+    const Stream &s = mParent->getStreamInfo(mJpegBuffer.streamId);
+    res = s.ops->enqueue_buffer(s.ops, mCaptureTime, mJpegBuffer.buffer);
+    if (res != OK) {
+        ALOGE("%s: Error queueing compressed image buffer %p: %s (%d)",
+                __FUNCTION__, mJpegBuffer.buffer, strerror(-res), res);
+        mParent->signalError();
+    }
+
+    // All done
+
+    cleanUp();
+
+    return false;
+}
+
+bool JpegCompressor::isBusy() {
+    Mutex::Autolock busyLock(mBusyMutex);
+    return mIsBusy;
+}
+
+bool JpegCompressor::isStreamInUse(uint32_t id) {
+    Mutex::Autolock lock(mBusyMutex);
+
+    if (mBuffers && mIsBusy) {
+        for (size_t i = 0; i < mBuffers->size(); i++) {
+            if ( (*mBuffers)[i].streamId == (int)id ) return true;
+        }
+    }
+    return false;
+}
+
+bool JpegCompressor::waitForDone(nsecs_t timeout) {
+    Mutex::Autolock lock(mBusyMutex);
+    status_t res = OK;
+    if (mIsBusy) {
+        res = mDone.waitRelative(mBusyMutex, timeout);
+    }
+    return (res == OK);
+}
+
+bool JpegCompressor::checkError(const char *msg) {
+    if (mJpegErrorInfo) {
+        char errBuffer[JMSG_LENGTH_MAX];
+        mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
+        ALOGE("%s: %s: %s",
+                __FUNCTION__, msg, errBuffer);
+        cleanUp();
+        mJpegErrorInfo = NULL;
+        return true;
+    }
+    return false;
+}
+
+void JpegCompressor::cleanUp() {
+    jpeg_destroy_compress(&mCInfo);
+    Mutex::Autolock lock(mBusyMutex);
+
+    if (mFoundAux) {
+        delete[] mAuxBuffer.img;
+    }
+    delete mBuffers;
+    mBuffers = NULL;
+
+    mIsBusy = false;
+    mDone.signal();
+}
+
+void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
+    JpegError *error = static_cast<JpegError*>(cinfo->err);
+    error->parent->mJpegErrorInfo = cinfo;
+}
+
+void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
+    JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
+    ALOGV("%s: Setting destination to %p, size %d",
+            __FUNCTION__, dest->parent->mJpegBuffer.img, kMaxJpegSize);
+    dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer.img);
+    dest->free_in_buffer = kMaxJpegSize;
+}
+
+boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
+    ALOGE("%s: JPEG destination buffer overflow!",
+            __FUNCTION__);
+    return true;
+}
+
+void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
+    ALOGV("%s: Done writing JPEG data. %d bytes left in buffer",
+            __FUNCTION__, cinfo->dest->free_in_buffer);
+}
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.h b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.h
new file mode 100644
index 0000000..ea2a84f
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * This class simulates a hardware JPEG compressor.  It receives image buffers
+ * in RGBA_8888 format, processes them in a worker thread, and then pushes them
+ * out to their destination stream.
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_JPEG_H
+#define HW_EMULATOR_CAMERA2_JPEG_H
+
+#include "utils/Thread.h"
+#include "utils/Mutex.h"
+#include "utils/Timers.h"
+
+#include "Base.h"
+
+#include <stdio.h>
+
+extern "C" {
+#include <jpeglib.h>
+}
+
+namespace android {
+
+class EmulatedFakeCamera2;
+
+class JpegCompressor: private Thread, public virtual RefBase {
+  public:
+
+    JpegCompressor(EmulatedFakeCamera2 *parent);
+    ~JpegCompressor();
+
+    // Start compressing COMPRESSED format buffers; JpegCompressor takes
+    // ownership of the Buffers vector.
+    status_t start(Buffers *buffers,
+            nsecs_t captureTime);
+
+    status_t cancel();
+
+    bool isBusy();
+    bool isStreamInUse(uint32_t id);
+
+    bool waitForDone(nsecs_t timeout);
+
+    // TODO: Measure this
+    static const size_t kMaxJpegSize = 300000;
+
+  private:
+    Mutex mBusyMutex;
+    bool mIsBusy;
+    Condition mDone;
+
+    Mutex mMutex;
+
+    EmulatedFakeCamera2 *mParent;
+
+    Buffers *mBuffers;
+    nsecs_t mCaptureTime;
+
+    StreamBuffer mJpegBuffer, mAuxBuffer;
+    bool mFoundJpeg, mFoundAux;
+
+    jpeg_compress_struct mCInfo;
+
+    struct JpegError : public jpeg_error_mgr {
+        JpegCompressor *parent;
+    };
+    j_common_ptr mJpegErrorInfo;
+
+    struct JpegDestination : public jpeg_destination_mgr {
+        JpegCompressor *parent;
+    };
+
+    static void jpegErrorHandler(j_common_ptr cinfo);
+
+    static void jpegInitDestination(j_compress_ptr cinfo);
+    static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
+    static void jpegTermDestination(j_compress_ptr cinfo);
+
+    bool checkError(const char *msg);
+    void cleanUp();
+
+    /**
+     * Inherited Thread virtual overrides
+     */
+  private:
+    virtual status_t readyToRun();
+    virtual bool threadLoop();
+};
+
+} // namespace android
+
+#endif
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.cpp b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
new file mode 100644
index 0000000..ca50350
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
@@ -0,0 +1,459 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera_Scene"
+#include <utils/Log.h>
+#include <stdlib.h>
+
+#include "Scene.h"
+
+// TODO: This should probably be done host-side in OpenGL for speed and better
+// quality
+
+namespace android {
+
+// Define single-letter shortcuts for scene definition, for directly indexing
+// mCurrentColors
+#define G (Scene::GRASS * Scene::NUM_CHANNELS)
+#define S (Scene::GRASS_SHADOW * Scene::NUM_CHANNELS)
+#define H (Scene::HILL * Scene::NUM_CHANNELS)
+#define W (Scene::WALL * Scene::NUM_CHANNELS)
+#define R (Scene::ROOF * Scene::NUM_CHANNELS)
+#define D (Scene::DOOR * Scene::NUM_CHANNELS)
+#define C (Scene::CHIMNEY * Scene::NUM_CHANNELS)
+#define I (Scene::WINDOW * Scene::NUM_CHANNELS)
+#define U (Scene::SUN * Scene::NUM_CHANNELS)
+#define K (Scene::SKY * Scene::NUM_CHANNELS)
+#define M (Scene::MOON * Scene::NUM_CHANNELS)
+
+const int Scene::kSceneWidth = 20;
+const int Scene::kSceneHeight = 20;
+
+const uint8_t Scene::kScene[Scene::kSceneWidth * Scene::kSceneHeight] = {
+    //      5         10        15        20
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K, // 5
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,H,H,H,
+    K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,C,C,H,H,H,
+    K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,C,C,H,H,H,
+    H,K,K,K,K,K,H,R,R,R,R,R,R,R,R,R,R,R,R,H, // 10
+    H,K,K,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
+    H,H,H,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
+    H,H,H,K,K,H,H,H,W,W,W,W,W,W,W,W,W,W,H,H,
+    S,S,S,G,G,S,S,S,W,W,W,W,W,W,W,W,W,W,S,S,
+    S,G,G,G,G,S,S,S,W,I,I,W,D,D,W,I,I,W,S,S, // 15
+    G,G,G,G,G,G,S,S,W,I,I,W,D,D,W,I,I,W,S,S,
+    G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
+    G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
+    G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G,
+    G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G, // 20
+    //      5         10        15        20
+};
+
+#undef G
+#undef S
+#undef H
+#undef W
+#undef R
+#undef D
+#undef C
+#undef I
+#undef U
+#undef K
+#undef M
+
+Scene::Scene(
+    int sensorWidthPx,
+    int sensorHeightPx,
+    float sensorSensitivity):
+        mSensorWidth(sensorWidthPx),
+        mSensorHeight(sensorHeightPx),
+        mHour(12),
+        mExposureDuration(0.033f),
+        mSensorSensitivity(sensorSensitivity)
+{
+    // Map scene to sensor pixels
+    if (mSensorWidth > mSensorHeight) {
+        mMapDiv = (mSensorWidth / (kSceneWidth + 1) ) + 1;
+    } else {
+        mMapDiv = (mSensorHeight / (kSceneHeight + 1) ) + 1;
+    }
+    mOffsetX = (kSceneWidth * mMapDiv - mSensorWidth) / 2;
+    mOffsetY = (kSceneHeight * mMapDiv - mSensorHeight) / 2;
+
+    // Assume that sensor filters are sRGB primaries to start
+    mFilterR[0]  =  3.2406f; mFilterR[1]  = -1.5372f; mFilterR[2]  = -0.4986f;
+    mFilterGr[0] = -0.9689f; mFilterGr[1] =  1.8758f; mFilterGr[2] =  0.0415f;
+    mFilterGb[0] = -0.9689f; mFilterGb[1] =  1.8758f; mFilterGb[2] =  0.0415f;
+    mFilterB[0]  =  0.0557f; mFilterB[1]  = -0.2040f; mFilterB[2]  =  1.0570f;
+
+
+}
+
+Scene::~Scene() {
+}
+
+void Scene::setColorFilterXYZ(
+        float rX, float rY, float rZ,
+        float grX, float grY, float grZ,
+        float gbX, float gbY, float gbZ,
+        float bX, float bY, float bZ) {
+    mFilterR[0]  = rX;  mFilterR[1]  = rY;  mFilterR[2]  = rZ;
+    mFilterGr[0] = grX; mFilterGr[1] = grY; mFilterGr[2] = grZ;
+    mFilterGb[0] = gbX; mFilterGb[1] = gbY; mFilterGb[2] = gbZ;
+    mFilterB[0]  = bX;  mFilterB[1]  = bY;  mFilterB[2]  = bZ;
+}
+
+void Scene::setHour(int hour) {
+    ALOGV("Hour set to: %d", hour);
+    mHour = hour % 24;
+}
+
+int Scene::getHour() {
+    return mHour;
+}
+
+void Scene::setExposureDuration(float seconds) {
+    mExposureDuration = seconds;
+}
+
+void Scene::calculateScene(nsecs_t time) {
+    // Calculate time fractions for interpolation
+    int timeIdx = mHour / kTimeStep;
+    int nextTimeIdx = (timeIdx + 1) % (24 / kTimeStep);
+    const nsecs_t kOneHourInNsec = 1e9 * 60 * 60;
+    nsecs_t timeSinceIdx = (mHour - timeIdx * kTimeStep) * kOneHourInNsec + time;
+    float timeFrac = timeSinceIdx / (float)(kOneHourInNsec * kTimeStep);
+
+    // Determine overall sunlight levels
+    float sunLux =
+            kSunlight[timeIdx] * (1 - timeFrac) +
+            kSunlight[nextTimeIdx] * timeFrac;
+    ALOGV("Sun lux: %f", sunLux);
+
+    float sunShadeLux = sunLux * (kDaylightShadeIllum / kDirectSunIllum);
+
+    // Determine sun/shade illumination chromaticity
+    float currentSunXY[2];
+    float currentShadeXY[2];
+
+    const float *prevSunXY, *nextSunXY;
+    const float *prevShadeXY, *nextShadeXY;
+    if (kSunlight[timeIdx] == kSunsetIllum ||
+            kSunlight[timeIdx] == kTwilightIllum) {
+        prevSunXY = kSunsetXY;
+        prevShadeXY = kSunsetXY;
+    } else {
+        prevSunXY = kDirectSunlightXY;
+        prevShadeXY = kDaylightXY;
+    }
+    if (kSunlight[nextTimeIdx] == kSunsetIllum ||
+            kSunlight[nextTimeIdx] == kTwilightIllum) {
+        nextSunXY = kSunsetXY;
+        nextShadeXY = kSunsetXY;
+    } else {
+        nextSunXY = kDirectSunlightXY;
+        nextShadeXY = kDaylightXY;
+    }
+    currentSunXY[0] = prevSunXY[0] * (1 - timeFrac) +
+            nextSunXY[0] * timeFrac;
+    currentSunXY[1] = prevSunXY[1] * (1 - timeFrac) +
+            nextSunXY[1] * timeFrac;
+
+    currentShadeXY[0] = prevShadeXY[0] * (1 - timeFrac) +
+            nextShadeXY[0] * timeFrac;
+    currentShadeXY[1] = prevShadeXY[1] * (1 - timeFrac) +
+            nextShadeXY[1] * timeFrac;
+
+    ALOGV("Sun XY: %f, %f, Shade XY: %f, %f",
+            currentSunXY[0], currentSunXY[1],
+            currentShadeXY[0], currentShadeXY[1]);
+
+    // Converting for xyY to XYZ:
+    // X = Y / y * x
+    // Y = Y
+    // Z = Y / y * (1 - x - y);
+    float sunXYZ[3] = {
+        sunLux / currentSunXY[1] * currentSunXY[0],
+        sunLux,
+        sunLux / currentSunXY[1] *
+        (1 - currentSunXY[0] - currentSunXY[1])
+    };
+    float sunShadeXYZ[3] = {
+        sunShadeLux / currentShadeXY[1] * currentShadeXY[0],
+        sunShadeLux,
+        sunShadeLux / currentShadeXY[1] *
+        (1 - currentShadeXY[0] - currentShadeXY[1])
+    };
+    ALOGV("Sun XYZ: %f, %f, %f",
+            sunXYZ[0], sunXYZ[1], sunXYZ[2]);
+    ALOGV("Sun shade XYZ: %f, %f, %f",
+            sunShadeXYZ[0], sunShadeXYZ[1], sunShadeXYZ[2]);
+
+    // Determine moonlight levels
+    float moonLux =
+            kMoonlight[timeIdx] * (1 - timeFrac) +
+            kMoonlight[nextTimeIdx] * timeFrac;
+    float moonShadeLux = moonLux * (kDaylightShadeIllum / kDirectSunIllum);
+
+    float moonXYZ[3] = {
+        moonLux / kMoonlightXY[1] * kMoonlightXY[0],
+        moonLux,
+        moonLux / kMoonlightXY[1] *
+        (1 - kMoonlightXY[0] - kMoonlightXY[1])
+    };
+    float moonShadeXYZ[3] = {
+        moonShadeLux / kMoonlightXY[1] * kMoonlightXY[0],
+        moonShadeLux,
+        moonShadeLux / kMoonlightXY[1] *
+        (1 - kMoonlightXY[0] - kMoonlightXY[1])
+    };
+
+    // Determine starlight level
+    const float kClearNightXYZ[3] = {
+        kClearNightIllum / kMoonlightXY[1] * kMoonlightXY[0],
+        kClearNightIllum,
+        kClearNightIllum / kMoonlightXY[1] *
+            (1 - kMoonlightXY[0] - kMoonlightXY[1])
+    };
+
+    // Calculate direct and shaded light
+    float directIllumXYZ[3] = {
+        sunXYZ[0] + moonXYZ[0] + kClearNightXYZ[0],
+        sunXYZ[1] + moonXYZ[1] + kClearNightXYZ[1],
+        sunXYZ[2] + moonXYZ[2] + kClearNightXYZ[2],
+    };
+
+    float shadeIllumXYZ[3] = {
+        kClearNightXYZ[0],
+        kClearNightXYZ[1],
+        kClearNightXYZ[2]
+    };
+
+    shadeIllumXYZ[0] += (mHour < kSunOverhead) ? sunXYZ[0] : sunShadeXYZ[0];
+    shadeIllumXYZ[1] += (mHour < kSunOverhead) ? sunXYZ[1] : sunShadeXYZ[1];
+    shadeIllumXYZ[2] += (mHour < kSunOverhead) ? sunXYZ[2] : sunShadeXYZ[2];
+
+    // Moon up period covers 23->0 transition, shift for simplicity
+    int adjHour = (mHour + 12) % 24;
+    int adjMoonOverhead = (kMoonOverhead + 12 ) % 24;
+    shadeIllumXYZ[0] += (adjHour < adjMoonOverhead) ?
+            moonXYZ[0] : moonShadeXYZ[0];
+    shadeIllumXYZ[1] += (adjHour < adjMoonOverhead) ?
+            moonXYZ[1] : moonShadeXYZ[1];
+    shadeIllumXYZ[2] += (adjHour < adjMoonOverhead) ?
+            moonXYZ[2] : moonShadeXYZ[2];
+
+    ALOGV("Direct XYZ: %f, %f, %f",
+            directIllumXYZ[0],directIllumXYZ[1],directIllumXYZ[2]);
+    ALOGV("Shade XYZ: %f, %f, %f",
+            shadeIllumXYZ[0], shadeIllumXYZ[1], shadeIllumXYZ[2]);
+
+    for (int i = 0; i < NUM_MATERIALS; i++) {
+        // Converting for xyY to XYZ:
+        // X = Y / y * x
+        // Y = Y
+        // Z = Y / y * (1 - x - y);
+        float matXYZ[3] = {
+            kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
+              kMaterials_xyY[i][0],
+            kMaterials_xyY[i][2],
+            kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
+              (1 - kMaterials_xyY[i][0] - kMaterials_xyY[i][1])
+        };
+
+        if (kMaterialsFlags[i] == 0 || kMaterialsFlags[i] & kSky) {
+            matXYZ[0] *= directIllumXYZ[0];
+            matXYZ[1] *= directIllumXYZ[1];
+            matXYZ[2] *= directIllumXYZ[2];
+        } else if (kMaterialsFlags[i] & kShadowed) {
+            matXYZ[0] *= shadeIllumXYZ[0];
+            matXYZ[1] *= shadeIllumXYZ[1];
+            matXYZ[2] *= shadeIllumXYZ[2];
+        } // else if (kMaterialsFlags[i] * kSelfLit), do nothing
+
+        ALOGV("Mat %d XYZ: %f, %f, %f", i, matXYZ[0], matXYZ[1], matXYZ[2]);
+        float luxToElectrons = mSensorSensitivity * mExposureDuration /
+                (kAperture * kAperture);
+        mCurrentColors[i*NUM_CHANNELS + 0] =
+                (mFilterR[0] * matXYZ[0] +
+                 mFilterR[1] * matXYZ[1] +
+                 mFilterR[2] * matXYZ[2])
+                * luxToElectrons;
+        mCurrentColors[i*NUM_CHANNELS + 1] =
+                (mFilterGr[0] * matXYZ[0] +
+                 mFilterGr[1] * matXYZ[1] +
+                 mFilterGr[2] * matXYZ[2])
+                * luxToElectrons;
+        mCurrentColors[i*NUM_CHANNELS + 2] =
+                (mFilterGb[0] * matXYZ[0] +
+                 mFilterGb[1] * matXYZ[1] +
+                 mFilterGb[2] * matXYZ[2])
+                * luxToElectrons;
+        mCurrentColors[i*NUM_CHANNELS + 3] =
+                (mFilterB[0] * matXYZ[0] +
+                 mFilterB[1] * matXYZ[1] +
+                 mFilterB[2] * matXYZ[2])
+                * luxToElectrons;
+
+        ALOGV("Color %d RGGB: %d, %d, %d, %d", i,
+                mCurrentColors[i*NUM_CHANNELS + 0],
+                mCurrentColors[i*NUM_CHANNELS + 1],
+                mCurrentColors[i*NUM_CHANNELS + 2],
+                mCurrentColors[i*NUM_CHANNELS + 3]);
+    }
+    // Shake viewpoint
+    mHandshakeX = rand() % mMapDiv/4 - mMapDiv/8;
+    mHandshakeY = rand() % mMapDiv/4 - mMapDiv/8;
+    // Set starting pixel
+    setReadoutPixel(0,0);
+}
+
+void Scene::setReadoutPixel(int x, int y) {
+    mCurrentX = x;
+    mCurrentY = y;
+    mSubX = (x + mOffsetX + mHandshakeX) % mMapDiv;
+    mSubY = (y + mOffsetY + mHandshakeY) % mMapDiv;
+    mSceneX = (x + mOffsetX + mHandshakeX) / mMapDiv;
+    mSceneY = (y + mOffsetY + mHandshakeY) / mMapDiv;
+    mSceneIdx = mSceneY * kSceneWidth + mSceneX;
+    mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
+}
+
+const uint32_t* Scene::getPixelElectrons() {
+    const uint32_t *pixel = mCurrentSceneMaterial;
+    mCurrentX++;
+    mSubX++;
+    if (mCurrentX >= mSensorWidth) {
+        mCurrentX = 0;
+        mCurrentY++;
+        if (mCurrentY >= mSensorHeight) mCurrentY = 0;
+        setReadoutPixel(mCurrentX, mCurrentY);
+    } else if (mSubX > mMapDiv) {
+        mSceneIdx++;
+        mSceneX++;
+        mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
+        mSubX = 0;
+    }
+    return pixel;
+}
+
+// RGB->YUV, Jpeg standard
+const float Scene::kRgb2Yuv[12] = {
+       0.299f,    0.587f,    0.114f,    0.f,
+    -0.16874f, -0.33126f,      0.5f, -128.f,
+         0.5f, -0.41869f, -0.08131f, -128.f,
+};
+
+// Aperture of imaging lens
+const float Scene::kAperture = 2.8;
+
+// Sun illumination levels through the day
+const float Scene::kSunlight[24/kTimeStep] =
+{
+    0, // 00:00
+    0,
+    0,
+    kTwilightIllum, // 06:00
+    kDirectSunIllum,
+    kDirectSunIllum,
+    kDirectSunIllum, // 12:00
+    kDirectSunIllum,
+    kDirectSunIllum,
+    kSunsetIllum, // 18:00
+    kTwilightIllum,
+    0
+};
+
+// Moon illumination levels through the day
+const float Scene::kMoonlight[24/kTimeStep] =
+{
+    kFullMoonIllum, // 00:00
+    kFullMoonIllum,
+    0,
+    0, // 06:00
+    0,
+    0,
+    0, // 12:00
+    0,
+    0,
+    0, // 18:00
+    0,
+    kFullMoonIllum
+};
+
+const int Scene::kSunOverhead = 12;
+const int Scene::kMoonOverhead = 0;
+
+// Used for sun illumination levels
+const float Scene::kDirectSunIllum     = 100000;
+const float Scene::kSunsetIllum        = 400;
+const float Scene::kTwilightIllum      = 4;
+// Used for moon illumination levels
+const float Scene::kFullMoonIllum      = 1;
+// Other illumination levels
+const float Scene::kDaylightShadeIllum = 20000;
+const float Scene::kClearNightIllum    = 2e-3;
+const float Scene::kStarIllum          = 2e-6;
+const float Scene::kLivingRoomIllum    = 50;
+
+const float Scene::kIncandescentXY[2]   = { 0.44757f, 0.40745f};
+const float Scene::kDirectSunlightXY[2] = { 0.34842f, 0.35161f};
+const float Scene::kDaylightXY[2]       = { 0.31271f, 0.32902f};
+const float Scene::kNoonSkyXY[2]        = { 0.346f,   0.359f};
+const float Scene::kMoonlightXY[2]      = { 0.34842f, 0.35161f};
+const float Scene::kSunsetXY[2]         = { 0.527f,   0.413f};
+
+const uint8_t Scene::kSelfLit  = 0x01;
+const uint8_t Scene::kShadowed = 0x02;
+const uint8_t Scene::kSky      = 0x04;
+
+// For non-self-lit materials, the Y component is normalized with 1=full
+// reflectance; for self-lit materials, it's the constant illuminance in lux.
+const float Scene::kMaterials_xyY[Scene::NUM_MATERIALS][3] = {
+    { 0.3688f, 0.4501f, .1329f }, // GRASS
+    { 0.3688f, 0.4501f, .1329f }, // GRASS_SHADOW
+    { 0.3986f, 0.5002f, .4440f }, // HILL
+    { 0.3262f, 0.5040f, .2297f }, // WALL
+    { 0.4336f, 0.3787f, .1029f }, // ROOF
+    { 0.3316f, 0.2544f, .0639f }, // DOOR
+    { 0.3425f, 0.3577f, .0887f }, // CHIMNEY
+    { kIncandescentXY[0], kIncandescentXY[1], kLivingRoomIllum }, // WINDOW
+    { kDirectSunlightXY[0], kDirectSunlightXY[1], kDirectSunIllum }, // SUN
+    { kNoonSkyXY[0], kNoonSkyXY[1], kDaylightShadeIllum / kDirectSunIllum }, // SKY
+    { kMoonlightXY[0], kMoonlightXY[1], kFullMoonIllum } // MOON
+};
+
+const uint8_t Scene::kMaterialsFlags[Scene::NUM_MATERIALS] = {
+    0,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kSelfLit,
+    kSelfLit,
+    kSky,
+    kSelfLit,
+};
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.h b/tools/emulator/system/camera/fake-pipeline2/Scene.h
new file mode 100644
index 0000000..687e427
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The Scene class implements a simple physical simulation of a scene, using the
+ * CIE 1931 colorspace to represent light in physical units (lux).
+ *
+ * It's fairly approximate, but does provide a scene with realistic widely
+ * variable illumination levels and colors over time.
+ *
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_SCENE_H
+#define HW_EMULATOR_CAMERA2_SCENE_H
+
+#include "utils/Timers.h"
+
+namespace android {
+
+class Scene {
+  public:
+    Scene(int sensorWidthPx,
+            int sensorHeightPx,
+            float sensorSensitivity);
+    ~Scene();
+
+    // Set the filter coefficients for the red, green, and blue filters on the
+    // sensor. Used as an optimization to pre-calculate various illuminance
+    // values. Two different green filters can be provided, to account for
+    // possible cross-talk on a Bayer sensor. Must be called before
+    // calculateScene.
+    void setColorFilterXYZ(
+        float rX, float rY, float rZ,
+        float grX, float grY, float grZ,
+        float gbX, float gbY, float gbZ,
+        float bX, float bY, float bZ);
+
+    // Set time of day (24-hour clock). This controls the general light levels
+    // in the scene. Must be called before calculateScene
+    void setHour(int hour);
+    // Get current hour
+    int getHour();
+
+    // Set the duration of exposure for determining luminous exposure.
+    // Must be called before calculateScene
+    void setExposureDuration(float seconds);
+
+    // Calculate scene information for current hour and the time offset since
+    // the hour. Must be called at least once before calling getLuminousExposure.
+    // Resets pixel readout location to 0,0
+    void calculateScene(nsecs_t time);
+
+    // Set sensor pixel readout location.
+    void setReadoutPixel(int x, int y);
+
+    // Get sensor response in physical units (electrons) for light hitting the
+    // current readout pixel, after passing through color filters. The readout
+    // pixel will be auto-incremented. The returned array can be indexed with
+    // ColorChannels.
+    const uint32_t* getPixelElectrons();
+
+    enum ColorChannels {
+        R = 0,
+        Gr,
+        Gb,
+        B,
+        Y,
+        Cb,
+        Cr,
+        NUM_CHANNELS
+    };
+
+  private:
+    // Sensor color filtering coefficients in XYZ
+    float mFilterR[3];
+    float mFilterGr[3];
+    float mFilterGb[3];
+    float mFilterB[3];
+
+    int mOffsetX, mOffsetY;
+    int mMapDiv;
+
+    int mHandshakeX, mHandshakeY;
+
+    int mSensorWidth;
+    int mSensorHeight;
+    int mCurrentX;
+    int mCurrentY;
+    int mSubX;
+    int mSubY;
+    int mSceneX;
+    int mSceneY;
+    int mSceneIdx;
+    uint32_t *mCurrentSceneMaterial;
+
+    int mHour;
+    float mExposureDuration;
+    float mSensorSensitivity;
+
+    enum Materials {
+        GRASS = 0,
+        GRASS_SHADOW,
+        HILL,
+        WALL,
+        ROOF,
+        DOOR,
+        CHIMNEY,
+        WINDOW,
+        SUN,
+        SKY,
+        MOON,
+        NUM_MATERIALS
+    };
+
+    uint32_t mCurrentColors[NUM_MATERIALS*NUM_CHANNELS];
+
+    /**
+     * Constants for scene definition. These are various degrees of approximate.
+     */
+
+    // RGB->YUV conversion
+    static const float kRgb2Yuv[12];
+
+    // Aperture of imaging lens
+    static const float kAperture;
+
+    // Sun, moon illuminance levels in 2-hour increments. These don't match any
+    // real day anywhere.
+    static const uint32_t kTimeStep = 2;
+    static const float kSunlight[];
+    static const float kMoonlight[];
+    static const int kSunOverhead;
+    static const int kMoonOverhead;
+
+    // Illumination levels for various conditions, in lux
+    static const float kDirectSunIllum;
+    static const float kDaylightShadeIllum;
+    static const float kSunsetIllum;
+    static const float kTwilightIllum;
+    static const float kFullMoonIllum;
+    static const float kClearNightIllum;
+    static const float kStarIllum;
+    static const float kLivingRoomIllum;
+
+    // Chromaticity of various illumination sources
+    static const float kIncandescentXY[2];
+    static const float kDirectSunlightXY[2];
+    static const float kDaylightXY[2];
+    static const float kNoonSkyXY[2];
+    static const float kMoonlightXY[2];
+    static const float kSunsetXY[2];
+
+    static const uint8_t kSelfLit;
+    static const uint8_t kShadowed;
+    static const uint8_t kSky;
+
+    static const float kMaterials_xyY[NUM_MATERIALS][3];
+    static const uint8_t kMaterialsFlags[NUM_MATERIALS];
+
+    static const int kSceneWidth;
+    static const int kSceneHeight;
+    static const uint8_t kScene[];
+};
+
+}
+
+#endif // HW_EMULATOR_CAMERA2_SCENE_H
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
new file mode 100644
index 0000000..29898b8
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
@@ -0,0 +1,442 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+#define LOG_TAG "EmulatedCamera2_Sensor"
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+
+#include "../EmulatedFakeCamera2.h"
+#include "Sensor.h"
+#include <cmath>
+#include <cstdlib>
+#include "system/camera_metadata.h"
+
+namespace android {
+
+const unsigned int Sensor::kResolution[2]  = {640, 480};
+
+const nsecs_t Sensor::kExposureTimeRange[2] =
+    {1000L, 30000000000L} ; // 1 us - 30 sec
+const nsecs_t Sensor::kFrameDurationRange[2] =
+    {33331760L, 30000000000L}; // ~1/30 s - 30 sec
+const nsecs_t Sensor::kMinVerticalBlank = 10000L;
+
+const uint8_t Sensor::kColorFilterArrangement = ANDROID_SENSOR_RGGB;
+
+// Output image data characteristics
+const uint32_t Sensor::kMaxRawValue = 4000;
+const uint32_t Sensor::kBlackLevel  = 1000;
+
+// Sensor sensitivity
+const float Sensor::kSaturationVoltage      = 0.520f;
+const uint32_t Sensor::kSaturationElectrons = 2000;
+const float Sensor::kVoltsPerLuxSecond      = 0.100f;
+
+const float Sensor::kElectronsPerLuxSecond =
+        Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
+        * Sensor::kVoltsPerLuxSecond;
+
+const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
+            Sensor::kSaturationElectrons;
+
+const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
+const float Sensor::kReadNoiseStddevAfterGain =  2.100; // in digital counts
+const float Sensor::kReadNoiseVarBeforeGain =
+            Sensor::kReadNoiseStddevBeforeGain *
+            Sensor::kReadNoiseStddevBeforeGain;
+const float Sensor::kReadNoiseVarAfterGain =
+            Sensor::kReadNoiseStddevAfterGain *
+            Sensor::kReadNoiseStddevAfterGain;
+
+// While each row has to read out, reset, and then expose, the (reset +
+// expose) sequence can be overlapped by other row readouts, so the final
+// minimum frame duration is purely a function of row readout time, at least
+// if there's a reasonable number of rows.
+const nsecs_t Sensor::kRowReadoutTime =
+            Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
+
+const uint32_t Sensor::kAvailableSensitivities[5] =
+    {100, 200, 400, 800, 1600};
+const uint32_t Sensor::kDefaultSensitivity = 100;
+
+/** A few utility functions for math, normal distributions */
+
+// Take advantage of IEEE floating-point format to calculate an approximate
+// square root. Accurate to within +-3.6%
+float sqrtf_approx(float r) {
+    // Modifier is based on IEEE floating-point representation; the
+    // manipulations boil down to finding approximate log2, dividing by two, and
+    // then inverting the log2. A bias is added to make the relative error
+    // symmetric about the real answer.
+    const int32_t modifier = 0x1FBB4000;
+
+    int32_t r_i = *(int32_t*)(&r);
+    r_i = (r_i >> 1) + modifier;
+
+    return *(float*)(&r_i);
+}
+
+
+
+Sensor::Sensor(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent),
+        mGotVSync(false),
+        mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
+        mFrameDuration(kFrameDurationRange[0]),
+        mGainFactor(kDefaultSensitivity),
+        mNextBuffers(NULL),
+        mCapturedBuffers(NULL),
+        mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
+{
+
+}
+
+Sensor::~Sensor() {
+    shutDown();
+}
+
+status_t Sensor::startUp() {
+    ALOGV("%s: E", __FUNCTION__);
+
+    int res;
+    mCapturedBuffers = NULL;
+    res = run("EmulatedFakeCamera2::Sensor",
+            ANDROID_PRIORITY_URGENT_DISPLAY);
+
+    if (res != OK) {
+        ALOGE("Unable to start up sensor capture thread: %d", res);
+    }
+    return res;
+}
+
+status_t Sensor::shutDown() {
+    ALOGV("%s: E", __FUNCTION__);
+
+    int res;
+    res = requestExitAndWait();
+    if (res != OK) {
+        ALOGE("Unable to shut down sensor capture thread: %d", res);
+    }
+    return res;
+}
+
+Scene &Sensor::getScene() {
+    return mScene;
+}
+
+void Sensor::setExposureTime(uint64_t ns) {
+    Mutex::Autolock lock(mControlMutex);
+    ALOGVV("Exposure set to %f", ns/1000000.f);
+    mExposureTime = ns;
+}
+
+void Sensor::setFrameDuration(uint64_t ns) {
+    Mutex::Autolock lock(mControlMutex);
+    ALOGVV("Frame duration set to %f", ns/1000000.f);
+    mFrameDuration = ns;
+}
+
+void Sensor::setSensitivity(uint32_t gain) {
+    Mutex::Autolock lock(mControlMutex);
+    ALOGVV("Gain set to %d", gain);
+    mGainFactor = gain;
+}
+
+void Sensor::setDestinationBuffers(Buffers *buffers) {
+    Mutex::Autolock lock(mControlMutex);
+    mNextBuffers = buffers;
+}
+
+bool Sensor::waitForVSync(nsecs_t reltime) {
+    int res;
+    Mutex::Autolock lock(mControlMutex);
+
+    mGotVSync = false;
+    res = mVSync.waitRelative(mControlMutex, reltime);
+    if (res != OK && res != TIMED_OUT) {
+        ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
+        return false;
+    }
+    return mGotVSync;
+}
+
+bool Sensor::waitForNewFrame(nsecs_t reltime,
+        nsecs_t *captureTime) {
+    Mutex::Autolock lock(mReadoutMutex);
+    uint8_t *ret;
+    if (mCapturedBuffers == NULL) {
+        int res;
+        res = mReadoutComplete.waitRelative(mReadoutMutex, reltime);
+        if (res == TIMED_OUT) {
+            return false;
+        } else if (res != OK || mCapturedBuffers == NULL) {
+            ALOGE("Error waiting for sensor readout signal: %d", res);
+            return false;
+        }
+    }
+    *captureTime = mCaptureTime;
+    mCapturedBuffers = NULL;
+    return true;
+}
+
+status_t Sensor::readyToRun() {
+    ALOGV("Starting up sensor thread");
+    mStartupTime = systemTime();
+    mNextCaptureTime = 0;
+    mNextCapturedBuffers = NULL;
+    return OK;
+}
+
+bool Sensor::threadLoop() {
+    /**
+     * Sensor capture operation main loop.
+     *
+     * Stages are out-of-order relative to a single frame's processing, but
+     * in-order in time.
+     */
+
+    /**
+     * Stage 1: Read in latest control parameters
+     */
+    uint64_t exposureDuration;
+    uint64_t frameDuration;
+    uint32_t gain;
+    Buffers *nextBuffers;
+    {
+        Mutex::Autolock lock(mControlMutex);
+        exposureDuration = mExposureTime;
+        frameDuration    = mFrameDuration;
+        gain             = mGainFactor;
+        nextBuffers      = mNextBuffers;
+        // Don't reuse a buffer set
+        mNextBuffers = NULL;
+
+        // Signal VSync for start of readout
+        ALOGVV("Sensor VSync");
+        mGotVSync = true;
+        mVSync.signal();
+    }
+
+    /**
+     * Stage 3: Read out latest captured image
+     */
+
+    Buffers *capturedBuffers = NULL;
+    nsecs_t captureTime = 0;
+
+    nsecs_t startRealTime  = systemTime();
+    nsecs_t simulatedTime    = startRealTime - mStartupTime;
+    nsecs_t frameEndRealTime = startRealTime + frameDuration;
+    nsecs_t frameReadoutEndRealTime = startRealTime +
+            kRowReadoutTime * kResolution[1];
+
+    if (mNextCapturedBuffers != NULL) {
+        ALOGVV("Sensor starting readout");
+        // Pretend we're doing readout now; will signal once enough time has elapsed
+        capturedBuffers = mNextCapturedBuffers;
+        captureTime    = mNextCaptureTime;
+    }
+    simulatedTime += kRowReadoutTime + kMinVerticalBlank;
+
+    // TODO: Move this signal to another thread to simulate readout
+    // time properly
+    if (capturedBuffers != NULL) {
+        ALOGVV("Sensor readout complete");
+        Mutex::Autolock lock(mReadoutMutex);
+        mCapturedBuffers = capturedBuffers;
+        mCaptureTime = captureTime;
+        mReadoutComplete.signal();
+        capturedBuffers = NULL;
+    }
+
+    /**
+     * Stage 2: Capture new image
+     */
+
+    mNextCaptureTime = simulatedTime;
+    mNextCapturedBuffers = nextBuffers;
+
+    if (mNextCapturedBuffers != NULL) {
+        ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
+                (float)exposureDuration/1e6, gain);
+        mScene.setExposureDuration((float)exposureDuration/1e9);
+        mScene.calculateScene(mNextCaptureTime);
+        for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
+            const StreamBuffer &b = (*mNextCapturedBuffers)[i];
+            ALOGVV("Sensor capturing buffer %d: stream %d,"
+                    " %d x %d, format %x, stride %d, buf %p, img %p",
+                    i, b.streamId, b.width, b.height, b.format, b.stride,
+                    b.buffer, b.img);
+            switch(b.format) {
+                case HAL_PIXEL_FORMAT_RAW_SENSOR:
+                    captureRaw(b.img, gain, b.stride);
+                    break;
+                case HAL_PIXEL_FORMAT_RGBA_8888:
+                    captureRGBA(b.img, gain, b.stride);
+                    break;
+                case HAL_PIXEL_FORMAT_BLOB:
+                    // Add auxillary buffer of the right size
+                    // Assumes only one BLOB (JPEG) buffer in
+                    // mNextCapturedBuffers
+                    StreamBuffer bAux;
+                    bAux.streamId = -1;
+                    bAux.width = b.width;
+                    bAux.height = b.height;
+                    bAux.format = HAL_PIXEL_FORMAT_RGB_888;
+                    bAux.stride = b.width;
+                    bAux.buffer = NULL;
+                    // TODO: Reuse these
+                    bAux.img = new uint8_t[b.width * b.height * 3];
+                    captureRGB(bAux.img, gain, b.stride);
+                    mNextCapturedBuffers->push_back(bAux);
+                    break;
+                case HAL_PIXEL_FORMAT_YV12:
+                case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+                    // TODO:
+                    ALOGE("%s: Format %x is TODO", __FUNCTION__, b.format);
+                    break;
+                default:
+                    ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
+                            b.format);
+                    break;
+            }
+        }
+    }
+
+    ALOGVV("Sensor vertical blanking interval");
+    nsecs_t workDoneRealTime = systemTime();
+    const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
+    if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
+        timespec t;
+        t.tv_sec = (frameEndRealTime - workDoneRealTime)  / 1000000000L;
+        t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
+
+        int ret;
+        do {
+            ret = nanosleep(&t, &t);
+        } while (ret != 0);
+    }
+    nsecs_t endRealTime = systemTime();
+    ALOGVV("Frame cycle took %d ms, target %d ms",
+            (int)((endRealTime - startRealTime)/1000000),
+            (int)(frameDuration / 1000000));
+    return true;
+};
+
+void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    float noiseVarGain =  totalGain * totalGain;
+    float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+            + kReadNoiseVarAfterGain;
+
+    int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
+    mScene.setReadoutPixel(0,0);
+    for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+        int *bayerRow = bayerSelect + (y & 0x1) * 2;
+        uint16_t *px = (uint16_t*)img + y * stride;
+        for (unsigned int x = 0; x < kResolution[0]; x++) {
+            uint32_t electronCount;
+            electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
+
+            // TODO: Better pixel saturation curve?
+            electronCount = (electronCount < kSaturationElectrons) ?
+                    electronCount : kSaturationElectrons;
+
+            // TODO: Better A/D saturation curve?
+            uint16_t rawCount = electronCount * totalGain;
+            rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
+
+            // Calculate noise value
+            // TODO: Use more-correct Gaussian instead of uniform noise
+            float photonNoiseVar = electronCount * noiseVarGain;
+            float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
+            // Scaled to roughly match gaussian/uniform noise stddev
+            float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
+
+            rawCount += kBlackLevel;
+            rawCount += noiseStddev * noiseSample;
+
+            *px++ = rawCount;
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("Raw sensor image captured");
+}
+
+void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    mScene.setReadoutPixel(0,0);
+
+    for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+        uint8_t *px = img + y * stride * 4;
+        for (unsigned int x = 0; x < kResolution[0]; x++) {
+            uint32_t rCount, gCount, bCount;
+            // TODO: Perfect demosaicing is a cheat
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            rCount = pixel[Scene::R]  * scale64x;
+            gCount = pixel[Scene::Gr] * scale64x;
+            bCount = pixel[Scene::B]  * scale64x;
+
+            *px++ = rCount < 255*64 ? rCount / 64 : 255;
+            *px++ = gCount < 255*64 ? gCount / 64 : 255;
+            *px++ = bCount < 255*64 ? bCount / 64 : 255;
+            *px++ = 255;
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("RGBA sensor image captured");
+}
+
+void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    mScene.setReadoutPixel(0,0);
+
+    for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+        uint8_t *px = img + y * stride * 3;
+        for (unsigned int x = 0; x < kResolution[0]; x++) {
+            uint32_t rCount, gCount, bCount;
+            // TODO: Perfect demosaicing is a cheat
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            rCount = pixel[Scene::R]  * scale64x;
+            gCount = pixel[Scene::Gr] * scale64x;
+            bCount = pixel[Scene::B]  * scale64x;
+
+            *px++ = rCount < 255*64 ? rCount / 64 : 255;
+            *px++ = gCount < 255*64 ? gCount / 64 : 255;
+            *px++ = bCount < 255*64 ? bCount / 64 : 255;
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("RGB sensor image captured");
+}
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.h b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
new file mode 100644
index 0000000..a666cc3
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This class is a simple simulation of a typical CMOS cellphone imager chip,
+ * which outputs 12-bit Bayer-mosaic raw images.
+ *
+ * The sensor is abstracted as operating as a pipeline 3 stages deep;
+ * conceptually, each frame to be captured goes through these three stages. The
+ * processing step for the sensor is marked off by vertical sync signals, which
+ * indicate the start of readout of the oldest frame. The interval between
+ * processing steps depends on the frame duration of the frame currently being
+ * captured. The stages are 1) configure, 2) capture, and 3) readout. During
+ * configuration, the sensor's registers for settings such as exposure time,
+ * frame duration, and gain are set for the next frame to be captured. In stage
+ * 2, the image data for the frame is actually captured by the sensor. Finally,
+ * in stage 3, the just-captured data is read out and sent to the rest of the
+ * system.
+ *
+ * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
+ * sensor are exposed earlier in time than larger-numbered rows, with the time
+ * offset between each row being equal to the row readout time.
+ *
+ * The characteristics of this sensor don't correspond to any actual sensor,
+ * but are not far off typical sensors.
+ *
+ * Example timing diagram, with three frames:
+ *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
+ *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
+ * Legend:
+ *   C = update sensor registers for frame
+ *   v = row in reset (vertical blanking interval)
+ *   E = row capturing image data
+ *   R = row being read out
+ *   | = vertical sync signal
+ *time(ms)|   0          55        105       155            230     270
+ * Frame 0|   :configure : capture : readout :              :       :
+ *  Row # | ..|CCCC______|_________|_________|              :       :
+ *      0 |   :\          \vvvvvEEEER         \             :       :
+ *    500 |   : \          \vvvvvEEEER         \            :       :
+ *   1000 |   :  \          \vvvvvEEEER         \           :       :
+ *   1500 |   :   \          \vvvvvEEEER         \          :       :
+ *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
+ * Frame 1|   :           configure  capture      readout   :       :
+ *  Row # |   :          |CCCC_____|_________|______________|       :
+ *      0 |   :          :\         \vvvvvEEEER              \      :
+ *    500 |   :          : \         \vvvvvEEEER              \     :
+ *   1000 |   :          :  \         \vvvvvEEEER              \    :
+ *   1500 |   :          :   \         \vvvvvEEEER              \   :
+ *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
+ * Frame 2|   :          :          configure     capture    readout:
+ *  Row # |   :          :         |CCCC_____|______________|_______|...
+ *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
+ *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
+ *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
+ *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
+ *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
+#define HW_EMULATOR_CAMERA2_SENSOR_H
+
+#include "utils/Thread.h"
+#include "utils/Mutex.h"
+#include "utils/Timers.h"
+
+#include "Scene.h"
+#include "Base.h"
+
+namespace android {
+
+class EmulatedFakeCamera2;
+
+class Sensor: private Thread, public virtual RefBase {
+  public:
+
+    Sensor(EmulatedFakeCamera2 *parent);
+    ~Sensor();
+
+    /*
+     * Power control
+     */
+
+    status_t startUp();
+    status_t shutDown();
+
+    /*
+     * Access to scene
+     */
+    Scene &getScene();
+
+    /*
+     * Controls that can be updated every frame
+     */
+
+    void setExposureTime(uint64_t ns);
+    void setFrameDuration(uint64_t ns);
+    void setSensitivity(uint32_t gain);
+    // Buffer must be at least stride*height*2 bytes in size
+    void setDestinationBuffers(Buffers *buffers);
+
+    /*
+     * Controls that cause reconfiguration delay
+     */
+
+    void setBinning(int horizontalFactor, int verticalFactor);
+
+    /*
+     * Synchronizing with sensor operation (vertical sync)
+     */
+
+    // Wait until the sensor outputs its next vertical sync signal, meaning it
+    // is starting readout of its latest frame of data. Returns true if vertical
+    // sync is signaled, false if the wait timed out.
+    bool waitForVSync(nsecs_t reltime);
+
+    // Wait until a new frame has been read out, and then return the time
+    // capture started.  May return immediately if a new frame has been pushed
+    // since the last wait for a new frame. Returns true if new frame is
+    // returned, false if timed out.
+    bool waitForNewFrame(nsecs_t reltime,
+            nsecs_t *captureTime);
+
+    /**
+     * Static sensor characteristics
+     */
+    static const unsigned int kResolution[2];
+
+    static const nsecs_t kExposureTimeRange[2];
+    static const nsecs_t kFrameDurationRange[2];
+    static const nsecs_t kMinVerticalBlank;
+
+    static const uint8_t kColorFilterArrangement;
+
+    // Output image data characteristics
+    static const uint32_t kMaxRawValue;
+    static const uint32_t kBlackLevel;
+    // Sensor sensitivity, approximate
+
+    static const float kSaturationVoltage;
+    static const uint32_t kSaturationElectrons;
+    static const float kVoltsPerLuxSecond;
+    static const float kElectronsPerLuxSecond;
+
+    static const float kBaseGainFactor;
+
+    static const float kReadNoiseStddevBeforeGain; // In electrons
+    static const float kReadNoiseStddevAfterGain;  // In raw digital units
+    static const float kReadNoiseVarBeforeGain;
+    static const float kReadNoiseVarAfterGain;
+
+    // While each row has to read out, reset, and then expose, the (reset +
+    // expose) sequence can be overlapped by other row readouts, so the final
+    // minimum frame duration is purely a function of row readout time, at least
+    // if there's a reasonable number of rows.
+    static const nsecs_t kRowReadoutTime;
+
+    static const uint32_t kAvailableSensitivities[5];
+    static const uint32_t kDefaultSensitivity;
+
+  private:
+    EmulatedFakeCamera2 *mParent;
+
+    Mutex mControlMutex; // Lock before accessing control parameters
+    // Start of control parameters
+    Condition mVSync;
+    bool      mGotVSync;
+    uint64_t  mExposureTime;
+    uint64_t  mFrameDuration;
+    uint32_t  mGainFactor;
+    Buffers  *mNextBuffers;
+
+    // End of control parameters
+
+    Mutex mReadoutMutex; // Lock before accessing readout variables
+    // Start of readout variables
+    Condition mReadoutComplete;
+    Buffers  *mCapturedBuffers;
+    nsecs_t   mCaptureTime;
+    // End of readout variables
+
+    // Time of sensor startup, used for simulation zero-time point
+    nsecs_t mStartupTime;
+
+    /**
+     * Inherited Thread virtual overrides, and members only used by the
+     * processing thread
+     */
+  private:
+    virtual status_t readyToRun();
+
+    virtual bool threadLoop();
+
+    nsecs_t mNextCaptureTime;
+    Buffers *mNextCapturedBuffers;
+
+    Scene mScene;
+
+    void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
+
+};
+
+}
+
+#endif // HW_EMULATOR_CAMERA2_SENSOR_H
diff --git a/tools/idegen/README b/tools/idegen/README
index 1f773d8..02bb593 100644
--- a/tools/idegen/README
+++ b/tools/idegen/README
@@ -7,10 +7,10 @@
     If this is your first time using IDEGen...
 
         IDEA needs a lot of memory. Add "-Xms748m -Xmx748m" to your VM options
-        in "IDEA_HOME/bin/idea.vmoptions" on Linux or 
+        in "IDEA_HOME/bin/idea.vmoptions" on Linux or
         "IntelliJ IDEA.app/Contents/Info.plist" on OS X.
 
-        Create a JDK configuration named "1.5 (No Libraries)" by adding a new
+        Create a JDK configuration named "1.6 (No Libraries)" by adding a new
         JDK like you normally would and then removing all of the jar entries
         under the "Classpath" tab. This will ensure that you only get access to
         Android's core libraries and not those from your desktop VM.
@@ -18,13 +18,13 @@
     From the project's root directory...
 
         Repeat these steps after each sync...
-        
+
         1) make (to produce generated .java source)
         2) development/tools/idegen/idegen.sh
         3) Open android.ipr in IntelliJ. If you already have the project open,
            hit the sync button in IntelliJ, and it will automatically detect the
            updated configuration.
-        
+
         If you get unexpected compilation errors from IntelliJ, try running
         "Build -> Rebuild Project". Sometimes IntelliJ gets confused after the
         project changes significantly.
@@ -53,7 +53,7 @@
 
 Excluding source roots and jars
 
-    IDEGen keeps an exclusion list in the "excluded-paths" file. This file 
+    IDEGen keeps an exclusion list in the "excluded-paths" file. This file
     has one regular expression per line that matches paths (relative to the
     project root) that should be excluded from the IDE configuration. We
     use Java's regular expression parser (see java.util.regex.Parser).
@@ -62,7 +62,7 @@
     "excluded-paths" file in the project's root directory. For example, you
     might exclude all apps except the Browser in your IDE configuration with
     this regular expression: "^packages/apps/(?!Browser)".
-    
+
 Controlling source root ordering (Eclipse)
 
     You may want some source roots to come before others in Eclipse. Simply
@@ -77,4 +77,4 @@
     For example, if you want your applications's source root to come first,
     you might add an expression like "^packages/apps/MyApp/src$" to the top
     of the "path-precedence" file.  To make source roots under ./out come last,
-    add "^(?!out/)" (which matches all paths that don't start with "out/").
\ No newline at end of file
+    add "^(?!out/)" (which matches all paths that don't start with "out/").
diff --git a/tools/idegen/excluded-paths b/tools/idegen/excluded-paths
index 35280ad..9122c30 100644
--- a/tools/idegen/excluded-paths
+++ b/tools/idegen/excluded-paths
@@ -62,3 +62,6 @@
 # This directory contains only an R.java file which is the same as the one in
 # Camera_intermediates.
 ^out/target/common/obj/APPS/CameraTests_intermediates$
+
+# Exclude all prebuilts jars.
+^prebuilts/.*\.jar$
diff --git a/tools/idegen/idegen.ipr b/tools/idegen/idegen.ipr
index 00cf4fd..75771b5 100644
--- a/tools/idegen/idegen.ipr
+++ b/tools/idegen/idegen.ipr
@@ -135,7 +135,7 @@
     <option name="GENERATE_NO_WARNINGS" value="false" />
     <option name="DEPRECATION" value="true" />
     <option name="ADDITIONAL_OPTIONS_STRING" value="" />
-    <option name="MAXIMUM_HEAP_SIZE" value="128" />
+    <option name="MAXIMUM_HEAP_SIZE" value="800" />
   </component>
   <component name="JavadocGenerationManager">
     <option name="OUTPUT_DIRECTORY" />
@@ -298,7 +298,7 @@
       <module fileurl="file://$PROJECT_DIR$/idegen.iml" filepath="$PROJECT_DIR$/idegen.iml" />
     </modules>
   </component>
-  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_5" assert-keyword="true" jdk-15="true" project-jdk-name="1.5" project-jdk-type="JavaSDK">
+  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6 (no libraries)" project-jdk-type="JavaSDK">
     <output url="file://$PROJECT_DIR$/classes" />
   </component>
   <component name="RmicSettings">
diff --git a/tools/idegen/templates/android.ipr b/tools/idegen/templates/android.ipr
index d6aba4b..f857d4a 100644
--- a/tools/idegen/templates/android.ipr
+++ b/tools/idegen/templates/android.ipr
@@ -26,7 +26,6 @@
         <option name="PLACE_ASSIGNMENT_SIGN_ON_NEXT_LINE" value="true" />
       </value>
     </option>
-    <option name="USE_PER_PROJECT_SETTINGS" value="true" />
   </component>
   <component name="CompilerConfiguration">
     <option name="DEFAULT_COMPILER" value="Javac" />
@@ -122,7 +121,7 @@
     <option name="GENERATE_NO_WARNINGS" value="false" />
     <option name="DEPRECATION" value="false" />
     <option name="ADDITIONAL_OPTIONS_STRING" value="-Xlint:all,-deprecation,-serial" />
-    <option name="MAXIMUM_HEAP_SIZE" value="512" />
+    <option name="MAXIMUM_HEAP_SIZE" value="800" />
   </component>
   <component name="JavadocGenerationManager">
     <option name="OUTPUT_DIRECTORY" />
@@ -282,7 +281,7 @@
       <module fileurl="file://$PROJECT_DIR$/android.iml" filepath="$PROJECT_DIR$/android.iml" />
     </modules>
   </component>
-  <component name="ProjectRootManager" version="2" assert-keyword="true" jdk-15="true" project-jdk-name="1.5 (No Libraries)" project-jdk-type="JavaSDK">
+  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6 (No Libraries)" project-jdk-type="JavaSDK">
     <output url="file:///tmp/intellij$PROJECT_DIR$/classes" />
   </component>
   <component name="RmicSettings">