am f78ddc64: am d333a0d1: Merge "Update/Copy fenv.h"

* commit 'f78ddc649987b9f0e7d6a7328d54cd31099976ff':
  Update/Copy fenv.h
diff --git a/apps/Development/AndroidManifest.xml b/apps/Development/AndroidManifest.xml
index f2bf60c..32738dd 100644
--- a/apps/Development/AndroidManifest.xml
+++ b/apps/Development/AndroidManifest.xml
@@ -38,6 +38,7 @@
     <uses-permission android:name="android.permission.USE_CREDENTIALS" />
     <uses-permission android:name="android.permission.WAKE_LOCK" />
     <uses-permission android:name="android.permission.WRITE_SETTINGS" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
     <uses-permission android:name="com.google.android.googleapps.permission.ACCESS_GOOGLE_PASSWORD" />
     <uses-permission android:name="com.google.android.googleapps.permission.GOOGLE_AUTH" />
     <uses-permission android:name="com.google.android.googleapps.permission.GOOGLE_AUTH.ALL_SERVICES" />
@@ -169,6 +170,13 @@
         </receiver>
         <service android:name="BadBehaviorActivity$BadService" />
 
+        <activity android:name="CacheAbuser" android:label="Cache Abuser">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.TEST" />
+            </intent-filter>
+        </activity>
+
         <activity android:name="ConfigurationViewer" android:label="Configuration">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
diff --git a/apps/Development/res/layout/bad_behavior.xml b/apps/Development/res/layout/bad_behavior.xml
index ce10ebb..6415da6 100644
--- a/apps/Development/res/layout/bad_behavior.xml
+++ b/apps/Development/res/layout/bad_behavior.xml
@@ -21,7 +21,7 @@
     <LinearLayout
         android:orientation="vertical"
         android:layout_width="match_parent"
-        android:layout_height="match_parent">
+        android:layout_height="wrap_content">
 
         <Button android:id="@+id/bad_behavior_crash_main"
                 android:layout_width="match_parent"
diff --git a/apps/Development/res/layout/cache_abuser.xml b/apps/Development/res/layout/cache_abuser.xml
new file mode 100644
index 0000000..8e212a9
--- /dev/null
+++ b/apps/Development/res/layout/cache_abuser.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2012 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent">
+
+    <LinearLayout
+        android:orientation="vertical"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content">
+
+        <Button android:id="@+id/start_internal_abuse"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/cache_abuser_start_internal_abuse" />
+
+        <Button android:id="@+id/start_slow_internal_abuse"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/cache_abuser_start_slow_internal_abuse" />
+
+        <Button android:id="@+id/start_external_abuse"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/cache_abuser_start_external_abuse" />
+
+        <Button android:id="@+id/start_slow_external_abuse"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/cache_abuser_start_slow_external_abuse" />
+
+        <Button android:id="@+id/stop_abuse"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:text="@string/cache_abuser_stop_abuse" />
+
+    </LinearLayout>
+
+</ScrollView>
diff --git a/apps/Development/res/values/strings.xml b/apps/Development/res/values/strings.xml
index 0f4763c..1087931 100644
--- a/apps/Development/res/values/strings.xml
+++ b/apps/Development/res/values/strings.xml
@@ -220,6 +220,13 @@
     <string name="bad_behavior_anr_system_label">System ANR (in ActivityManager)</string>
     <string name="bad_behavior_wedge_system_label">Wedge system (5 minute system ANR)</string>
 
+    <!-- CacheAbuser -->
+    <string name="cache_abuser_start_internal_abuse">Quickly abuse internal cache</string>
+    <string name="cache_abuser_start_slow_internal_abuse">Slowly abuse internal cache</string>
+    <string name="cache_abuser_start_external_abuse">Quickly abuse external cache</string>
+    <string name="cache_abuser_start_slow_external_abuse">Slowly abuse external cache</string>
+    <string name="cache_abuser_stop_abuse">Stop cache abuse</string>
+
     <!-- MediaScannerActivity -->
     <string name="scancard">Scan SD card</string>
     <string name="numsongs"># of albums</string>
diff --git a/apps/Development/src/com/android/development/CacheAbuser.java b/apps/Development/src/com/android/development/CacheAbuser.java
new file mode 100644
index 0000000..489018f
--- /dev/null
+++ b/apps/Development/src/com/android/development/CacheAbuser.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.development;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import android.app.Activity;
+import android.app.ActivityManagerNative;
+import android.app.IActivityController;
+import android.app.IActivityManager;
+import android.app.Service;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.os.IPowerManager;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.Log;
+import android.view.View;
+import android.widget.Button;
+
+public class CacheAbuser extends Activity {
+    Button mStartInternalAbuse;
+    Button mStartSlowInternalAbuse;
+    Button mStartExternalAbuse;
+    Button mStartSlowExternalAbuse;
+    Button mStopAbuse;
+
+    AsyncTask<Void, Void, Void> mInternalAbuseTask;
+    AsyncTask<Void, Void, Void> mExternalAbuseTask;
+
+    static class AbuseTask extends AsyncTask<Void, Void, Void> {
+        final File mBaseDir;
+        final boolean mQuick;
+        final byte[] mBuffer;
+
+        AbuseTask(File cacheDir, boolean quick) {
+            File dir = new File(cacheDir, quick ? "quick" : "slow");
+            mBaseDir = new File(dir, Long.toString(System.currentTimeMillis()));
+            mQuick = quick;
+            mBuffer = quick ? new byte[1024*1024] : new byte[1024];
+        }
+
+        @Override
+        protected Void doInBackground(Void... params) {
+            long num = 0;
+            while (!isCancelled()) {
+                long dir1num = num/100;
+                long dir2num = num%100;
+                File dir = new File(mBaseDir, Long.toString(dir1num));
+                File file = new File(dir, Long.toString(dir2num));
+                FileOutputStream fos = null;
+                try {
+                    dir.mkdirs();
+                    fos = new FileOutputStream(file, false);
+                    fos.write(mBuffer);
+                } catch (IOException e) {
+                    Log.w("CacheAbuser", "Write failed to " + file + ": " + e);
+                    try {
+                        wait(5*1000);
+                    } catch (InterruptedException e1) {
+                    }
+                } finally {
+                    try {
+                        if (fos != null) {
+                            fos.close();
+                        }
+                    } catch (IOException e) {
+                    }
+                }
+                num++;
+            }
+            return null;
+        }
+    }
+
+    @Override
+    public void onCreate(Bundle icicle) {
+        super.onCreate(icicle);
+
+        setContentView(R.layout.cache_abuser);
+
+        mStartInternalAbuse = (Button) findViewById(R.id.start_internal_abuse);
+        mStartInternalAbuse.setOnClickListener(new View.OnClickListener() {
+            public void onClick(View v) {
+                if (mInternalAbuseTask == null) {
+                    mInternalAbuseTask = new AbuseTask(getCacheDir(), true);
+                    mInternalAbuseTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
+                    updateButtonState();
+                }
+            }
+        });
+
+        mStartSlowInternalAbuse = (Button) findViewById(R.id.start_slow_internal_abuse);
+        mStartSlowInternalAbuse.setOnClickListener(new View.OnClickListener() {
+            public void onClick(View v) {
+                if (mInternalAbuseTask == null) {
+                    mInternalAbuseTask = new AbuseTask(getCacheDir(), false);
+                    mInternalAbuseTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
+                    updateButtonState();
+                }
+            }
+        });
+
+        mStartExternalAbuse = (Button) findViewById(R.id.start_external_abuse);
+        mStartExternalAbuse.setOnClickListener(new View.OnClickListener() {
+            public void onClick(View v) {
+                if (mExternalAbuseTask == null) {
+                    mExternalAbuseTask = new AbuseTask(getExternalCacheDir(), true);
+                    mExternalAbuseTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
+                    updateButtonState();
+                }
+            }
+        });
+
+        mStartSlowExternalAbuse = (Button) findViewById(R.id.start_slow_external_abuse);
+        mStartSlowExternalAbuse.setOnClickListener(new View.OnClickListener() {
+            public void onClick(View v) {
+                if (mExternalAbuseTask == null) {
+                    mExternalAbuseTask = new AbuseTask(getExternalCacheDir(), false);
+                    mExternalAbuseTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
+                    updateButtonState();
+                }
+            }
+        });
+
+        mStopAbuse = (Button) findViewById(R.id.stop_abuse);
+        mStopAbuse.setOnClickListener(new View.OnClickListener() {
+            public void onClick(View v) {
+                stopAbuse();
+            }
+        });
+
+        updateButtonState();
+    }
+
+    @Override
+    public void onStart() {
+        super.onStart();
+        updateButtonState();
+    }
+
+    @Override
+    public void onStop() {
+        super.onStop();
+        stopAbuse();
+    }
+
+    void stopAbuse() {
+        if (mInternalAbuseTask != null) {
+            mInternalAbuseTask.cancel(false);
+            mInternalAbuseTask = null;
+        }
+        if (mExternalAbuseTask != null) {
+            mExternalAbuseTask.cancel(false);
+            mExternalAbuseTask = null;
+        }
+        updateButtonState();
+    }
+
+    void updateButtonState() {
+        mStartInternalAbuse.setEnabled(mInternalAbuseTask == null);
+        mStartSlowInternalAbuse.setEnabled(mInternalAbuseTask == null);
+        mStartExternalAbuse.setEnabled(mExternalAbuseTask == null);
+        mStartSlowExternalAbuse.setEnabled(mExternalAbuseTask == null);
+        mStopAbuse.setEnabled(mInternalAbuseTask != null
+                || mExternalAbuseTask != null);
+    }
+}
diff --git a/apps/Fallback/res/values-fa/strings.xml b/apps/Fallback/res/values-fa/strings.xml
index 6f6c60f..7c0256b 100644
--- a/apps/Fallback/res/values-fa/strings.xml
+++ b/apps/Fallback/res/values-fa/strings.xml
@@ -18,5 +18,5 @@
     xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
     <string name="appTitle" msgid="161410001913116606">"بازگشت"</string>
     <string name="title" msgid="8156274565006125136">"عملکرد پشتیبانی نشده"</string>
-    <string name="error" msgid="6539615832923362301">"آن عملکرد در حال حاضر پشتیبانی نمی شود."</string>
+    <string name="error" msgid="6539615832923362301">"آن عملکرد در حال حاضر پشتیبانی نمی‌شود."</string>
 </resources>
diff --git a/cmds/monkey/src/com/android/commands/monkey/Monkey.java b/cmds/monkey/src/com/android/commands/monkey/Monkey.java
index fcf0893..706dd0f 100644
--- a/cmds/monkey/src/com/android/commands/monkey/Monkey.java
+++ b/cmds/monkey/src/com/android/commands/monkey/Monkey.java
@@ -32,7 +32,7 @@
 import android.os.StrictMode;
 import android.os.SystemClock;
 import android.os.SystemProperties;
-import android.os.UserId;
+import android.os.UserHandle;
 import android.view.IWindowManager;
 import android.view.Surface;
 
@@ -953,7 +953,7 @@
                     intent.addCategory(category);
                 }
                 List<ResolveInfo> mainApps = mPm.queryIntentActivities(intent, null, 0,
-                        UserId.myUserId());
+                        UserHandle.myUserId());
                 if (mainApps == null || mainApps.size() == 0) {
                     System.err.println("// Warning: no activities found for category " + category);
                     continue;
diff --git a/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetwork.java b/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetwork.java
index 99e7c07..cae6416 100644
--- a/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetwork.java
+++ b/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetwork.java
@@ -437,7 +437,7 @@
         IPowerManager pm =
                 IPowerManager.Stub.asInterface(ServiceManager.getService(Context.POWER_SERVICE));
         try {
-            pm.userActivityWithForce(SystemClock.uptimeMillis(), true, true);
+            pm.wakeUp(SystemClock.uptimeMillis());
         } catch (RemoteException e) {
             Log.e(TAG, "Got remote exception", e);
             return false;
diff --git a/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java b/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java
index 590f406..ddb83da 100644
--- a/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java
+++ b/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java
@@ -24,7 +24,7 @@
 import android.graphics.Rect;
 import android.os.RemoteException;
 import android.os.ServiceManager;
-import android.os.UserId;
+import android.os.UserHandle;
 import android.view.accessibility.AccessibilityEvent;
 import android.view.accessibility.AccessibilityNodeInfo;
 
@@ -140,7 +140,7 @@
         try {
             AccessibilityNodeInfo node = event.getSource();
             String packageName = node.getPackageName().toString();
-            ApplicationInfo appInfo = sPm.getApplicationInfo(packageName, 0, UserId.myUserId());
+            ApplicationInfo appInfo = sPm.getApplicationInfo(packageName, 0, UserHandle.myUserId());
             Class<?> klass;
             klass = getIdClass(packageName, appInfo.sourceDir);
             return klass.getField(stringId).getInt(null);
@@ -195,7 +195,7 @@
             String packageName = node.getPackageName().toString();
             try{
                 Class<?> klass;
-                ApplicationInfo appInfo = sPm.getApplicationInfo(packageName, 0, UserId.myUserId());
+                ApplicationInfo appInfo = sPm.getApplicationInfo(packageName, 0, UserHandle.myUserId());
                 klass = getIdClass(packageName, appInfo.sourceDir);
                 StringBuilder fieldBuilder = new StringBuilder();
                 Field[] fields = klass.getFields();
diff --git a/data/etc/apns-conf.xml b/data/etc/apns-conf.xml
deleted file mode 100644
index 2fe90d9..0000000
--- a/data/etc/apns-conf.xml
+++ /dev/null
@@ -1,265 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
-/*
-** Copyright 2006, Google Inc.
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
--->
-
-<!-- use empty string to specify no proxy or port -->
-<!-- This version must agree with that in apps/common/res/apns.xml -->
-<apns version="7">
-    <apn carrier="T-Mobile US"
-         mcc="310"
-         mnc="260"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-    />
-
-    <apn carrier="T-Mobile US 250"
-         mcc="310"
-         mnc="250"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 660"
-         mcc="310"
-         mnc="660"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 230"
-         mcc="310"
-         mnc="230"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 310"
-         mcc="310"
-         mnc="310"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 580"
-         mcc="310"
-         mnc="580"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 240"
-         mcc="310"
-         mnc="240"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 800"
-         mcc="310"
-         mnc="800"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 210"
-         mcc="310"
-         mnc="210"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 160"
-         mcc="310"
-         mnc="160"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 270"
-         mcc="310"
-         mnc="270"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 200"
-         mcc="310"
-         mnc="200"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 220"
-         mcc="310"
-         mnc="220"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <apn carrier="T-Mobile US 490"
-         mcc="310"
-         mnc="490"
-         apn="epc.tmobile.com"
-         user="none"
-         server="*"
-         password="none"
-         mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
-            />
-
-    <!-- T-Mobile Europe -->
-    <apn carrier="T-Mobile UK"
-         mcc="234"
-         mnc="30"
-         apn="general.t-mobile.uk"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="149.254.201.135"
-         mmsport="8080"
-         mmsc="http://mmsc.t-mobile.co.uk:8002"
-    />
-
-    <apn carrier="T-Mobile D"
-         mcc="262"
-         mnc="01"
-         apn="internet.t-mobile"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="172.028.023.131"
-         mmsport="8008"
-         mmsc="http://mms.t-mobile.de/servlets/mms"
-    />
-
-    <apn carrier="T-Mobile A"
-         mcc="232"
-         mnc="03"
-         apn="gprsinternet"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="010.012.000.020"
-         mmsport="80"
-         mmsc="http://mmsc.t-mobile.at/servlets/mms"
-         type="default,supl"
-    />
-
-    <apn carrier="T-Mobile A MMS"
-         mcc="232"
-         mnc="03"
-         apn="gprsmms"
-         user="t-mobile"
-         password="tm"
-         server="*"
-         mmsproxy="010.012.000.020"
-         mmsport="80"
-         mmsc="http://mmsc.t-mobile.at/servlets/mms"
-         type="mms"
-    />
-
-    <apn carrier="T-Mobile CZ"
-         mcc="230"
-         mnc="01"
-         apn="internet.t-mobile.cz"
-         user="wap"
-         password="wap"
-         server="*"
-         mmsproxy="010.000.000.010"
-         mmsport="80"
-         mmsc="http://mms"
-         type="default,supl"
-    />
-
-    <apn carrier="T-Mobile CZ MMS"
-         mcc="230"
-         mnc="01"
-         apn="mms.t-mobile.cz"
-         user="mms"
-         password="mms"
-         server="*"
-         mmsproxy="010.000.000.010"
-         mmsport="80"
-         mmsc="http://mms"
-         type="mms"
-    />
-
-    <apn carrier="T-Mobile NL"
-         mcc="204"
-         mnc="16"
-         apn="internet"
-         user="*"
-         password="*"
-         server="*"
-         mmsproxy="010.010.010.011"
-         mmsport="8080"
-         mmsc="http://t-mobilemms"
-         type="default,supl"
-    />
-
-    <apn carrier="T-Mobile NL MMS"
-         mcc="204"
-         mnc="16"
-         apn="mms"
-         user="tmobilemms"
-         password="tmobilemms"
-         server="*"
-         mmsproxy="010.010.010.011"
-         mmsport="8080"
-         mmsc="http://t-mobilemms"
-         type="mms"
-    />
-</apns>
diff --git a/data/etc/apns-conf_sdk.xml b/data/etc/apns-conf_sdk.xml
deleted file mode 100644
index 0e9cf7d..0000000
--- a/data/etc/apns-conf_sdk.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2008 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-  
-          http://www.apache.org/licenses/LICENSE-2.0
-  
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-
-<!-- This file contains fake APNs that are necessary for the emulator
-     to talk to the network.  It should only be installed for SDK builds.
-
-     This file is not installed by the local Android.mk, it's installed using
-     a PRODUCT_COPY_FILES line in the sdk section of the toplevel Makefile.
--->
-
-<!-- use empty string to specify no proxy or port -->
-<!-- This version must agree with that in apps/common/res/apns.xml -->
-<apns version="7">
-    <apn carrier="Android"
-        mcc="310"
-        mnc="995"
-        apn="internet"
-        user="*"
-        server="*"
-        password="*"
-        mmsc="null"
-    />
-    <apn carrier="TelKila"
-        mcc="310"
-        mnc="260"
-        apn="internet"
-        user="*"
-        server="*"
-        password="*"
-        mmsc="null"
-    />
-</apns>
diff --git a/data/etc/vold.conf b/data/etc/vold.conf
deleted file mode 100644
index 7888936..0000000
--- a/data/etc/vold.conf
+++ /dev/null
@@ -1,10 +0,0 @@
-## vold configuration file for the emulator/SDK
-
-volume_sdcard {
-    ## This is the direct uevent device path to the SD slot on the device
-    emu_media_path /devices/platform/goldfish_mmc.0/mmc_host/mmc0
-
-    media_type     mmc
-    mount_point    /sdcard
-    ums_path       /devices/platform/usb_mass_storage/lun0
-}
diff --git a/ide/eclipse/.classpath b/ide/eclipse/.classpath
index 67d2bc6..d2d651d 100644
--- a/ide/eclipse/.classpath
+++ b/ide/eclipse/.classpath
@@ -1,22 +1,22 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
 	<classpathentry kind="src" path="packages/apps/Bluetooth/src"/>
+	<classpathentry kind="src" path="packages/apps/Camera/src"/>
 	<classpathentry kind="src" path="packages/apps/Browser/src"/>
 	<classpathentry kind="src" path="packages/apps/Calendar/src"/>
 	<classpathentry kind="src" path="packages/apps/Calculator/src"/>
-	<classpathentry kind="src" path="packages/apps/Camera/src"/>
 	<classpathentry kind="src" path="packages/apps/CertInstaller/src"/>
 	<classpathentry kind="src" path="packages/apps/Contacts/src"/>
 	<classpathentry kind="src" path="packages/apps/DeskClock/src"/>
 	<classpathentry kind="src" path="packages/apps/Email/src"/>
 	<classpathentry kind="src" path="packages/apps/Email/emailcommon/src"/>
-	<classpathentry kind="src" path="packages/apps/Exchange/exchange2/src"/>
 	<classpathentry kind="src" path="packages/apps/Gallery2/src"/>
 	<classpathentry kind="src" path="packages/apps/Gallery2/src_pd"/>
 	<classpathentry kind="src" path="packages/apps/Gallery2/gallerycommon/src"/>
 	<classpathentry kind="src" path="packages/apps/HTMLViewer/src"/>
 	<classpathentry kind="src" path="packages/apps/Launcher2/src"/>
 	<classpathentry kind="src" path="packages/apps/Mms/src"/>
+	<classpathentry kind="src" path="packages/apps/Nfc/src"/>
 	<classpathentry kind="src" path="packages/apps/PackageInstaller/src"/>
 	<classpathentry kind="src" path="packages/apps/Phone/src"/>
 	<classpathentry kind="src" path="packages/apps/QuickSearchBox/src"/>
@@ -24,6 +24,7 @@
 	<classpathentry kind="src" path="packages/apps/Settings/src"/>
 	<classpathentry kind="src" path="packages/apps/SoundRecorder/src"/>
 	<classpathentry kind="src" path="packages/apps/Stk/src"/>
+	<classpathentry kind="src" path="packages/apps/UnifiedEmail/src"/>
 	<classpathentry kind="src" path="packages/apps/VoiceDialer/src"/>
 	<classpathentry kind="src" path="packages/providers/CalendarProvider/src"/>
 	<classpathentry kind="src" path="packages/providers/ContactsProvider/src"/>
@@ -41,12 +42,15 @@
 	<classpathentry kind="src" path="frameworks/base/icu4j/java"/>
 	<classpathentry kind="src" path="frameworks/base/keystore/java"/>
 	<classpathentry kind="src" path="frameworks/base/location/java"/>
+	<classpathentry kind="src" path="frameworks/base/location/lib/java"/>
 	<classpathentry kind="src" path="frameworks/base/media/java"/>
 	<classpathentry kind="src" path="frameworks/base/media/mca/effect/java"/>
 	<classpathentry kind="src" path="frameworks/base/media/mca/filterfw/java"/>
 	<classpathentry kind="src" path="frameworks/base/media/mca/filterpacks/java"/>
+	<classpathentry kind="src" path="frameworks/base/nfc-extras/java"/>
 	<classpathentry kind="src" path="frameworks/base/obex"/>
 	<classpathentry kind="src" path="frameworks/base/opengl/java"/>
+	<classpathentry kind="src" path="frameworks/base/packages/FusedLocation/src"/>
 	<classpathentry kind="src" path="frameworks/base/packages/SettingsProvider/src"/>
 	<classpathentry kind="src" path="frameworks/base/packages/SystemUI/src"/>
 	<classpathentry kind="src" path="frameworks/base/policy/src"/>
@@ -59,6 +63,7 @@
 	<classpathentry kind="src" path="frameworks/ex/carousel/java"/>
 	<classpathentry kind="src" path="frameworks/ex/chips/src"/>
 	<classpathentry kind="src" path="frameworks/ex/common/java"/>
+	<classpathentry kind="src" path="frameworks/ex/photoviewer/src"/>
 	<classpathentry kind="src" path="frameworks/ex/variablespeed/src"/>
 	<classpathentry kind="src" path="frameworks/opt/calendar/src"/>
 	<classpathentry kind="src" path="frameworks/opt/telephony/src/java"/>
@@ -95,8 +100,7 @@
 	<classpathentry kind="src" path="out/target/common/obj/APPS/Phone_intermediates/src/src"/>
 	<classpathentry kind="src" path="out/target/common/obj/APPS/QuickSearchBox_intermediates/src/src"/>
 	<classpathentry kind="src" path="out/target/common/obj/JAVA_LIBRARIES/android-common-carousel_intermediates/src/renderscript/src"/>
-	<classpathentry kind="src" path="out/target/common/obj/JAVA_LIBRARIES/com.android.emailcommon_intermediates/src/src"/>
-	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/filterfw_intermediates/classes-jarjar.jar"/>
+	<classpathentry kind="src" path="out/target/common/obj/JAVA_LIBRARIES/com.android.emailcommon2_intermediates/src/src"/>
 	<classpathentry kind="src" path="out/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/core/java"/>
 	<classpathentry kind="src" path="out/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/keystore/java"/>
 	<classpathentry kind="src" path="out/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/location/java"/>
@@ -112,7 +116,6 @@
 	<classpathentry kind="src" path="external/libphonenumber/java/src"/>
 	<classpathentry kind="src" path="external/nist-sip/java"/>
 	<classpathentry kind="src" path="external/tagsoup/src"/>
-	<classpathentry excluding="src/" kind="src" path="out/target/common/obj/JAVA_LIBRARIES/com.android.emailcommon_intermediates/src"/>
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/bouncycastle_intermediates/classes-jarjar.jar"/>
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/android-support-v4_intermediates/javalib.jar"/>
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/core-junit_intermediates/classes.jar"/>
@@ -120,5 +123,6 @@
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/android-common_intermediates/javalib.jar"/>
 	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/guava_intermediates/javalib.jar"/>
 	<classpathentry kind="lib" path="packages/apps/Calculator/arity-2.1.2.jar"/>
+	<classpathentry kind="lib" path="out/target/common/obj/JAVA_LIBRARIES/junit-runner_intermediates/javalib.jar"/>
 	<classpathentry kind="output" path="out/target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates/classes"/>
 </classpath>
diff --git a/ide/eclipse/android-include-paths.xml b/ide/eclipse/android-include-paths.xml
index bb038a2..0e60642 100644
--- a/ide/eclipse/android-include-paths.xml
+++ b/ide/eclipse/android-include-paths.xml
@@ -4,8 +4,7 @@
 <language name="holder for library settings">
 
 </language>
-<language name="GNU C++">
-<includepath>${ProjDirPath}/prebuilt/darwin-x86/toolchain/arm-linux-androideabi-4.4.x/lib/gcc/arm-linux-androideabi/4.4.3/include</includepath>
+<language name="Assembly">
 <includepath>${ProjDirPath}/system/core/include/arch/linux-arm</includepath>
 <includepath>${ProjDirPath}/system/core/include</includepath>
 <includepath>${ProjDirPath}/bionic/libc/arch-arm/include</includepath>
@@ -17,21 +16,41 @@
 <includepath>${ProjDirPath}/bionic/libm/include</includepath>
 <includepath>${ProjDirPath}/bionic/libm/include/arm</includepath>
 <includepath>${ProjDirPath}/bionic/libthread_db/include</includepath>
-<includepath>${ProjDirPath}/dalvik/libnativehelper/include</includepath>
-<includepath>${ProjDirPath}/dalvik/libnativehelper/include/nativehelper</includepath>
 <includepath>${ProjDirPath}/hardware/libhardware/include</includepath>
 <includepath>${ProjDirPath}/hardware/libhardware_legacy/include</includepath>
 <includepath>${ProjDirPath}/hardware/ril/include</includepath>
+<includepath>${ProjDirPath}/frameworks/base/include</includepath>
 <includepath>${ProjDirPath}/frameworks/native/include</includepath>
 <includepath>${ProjDirPath}/frameworks/native/opengl/include</includepath>
-<includepath>${ProjDirPath}/frameworks/base/include</includepath>
-<includepath>${ProjDirPath}/frameworks/base/native/include</includepath>
 <includepath>${ProjDirPath}/external/skia/include</includepath>
 <includepath>${ProjDirPath}/external/skia/include/core</includepath>
+<includepath>${ProjDirPath}/prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.6/lib/gcc/arm-linux-androideabi/4.6.x-google/include</includepath>
+
+</language>
+<language name="GNU C++">
+<includepath>${ProjDirPath}/system/core/include/arch/linux-arm</includepath>
+<includepath>${ProjDirPath}/system/core/include</includepath>
+<includepath>${ProjDirPath}/bionic/libc/arch-arm/include</includepath>
+<includepath>${ProjDirPath}/bionic/libc/include</includepath>
+<includepath>${ProjDirPath}/bionic/libstdc++/include</includepath>
+<includepath>${ProjDirPath}/bionic/libc/kernel/common</includepath>
+<includepath>${ProjDirPath}/bionic/libc/kernel/common/linux</includepath>
+<includepath>${ProjDirPath}/bionic/libc/kernel/arch-arm</includepath>
+<includepath>${ProjDirPath}/bionic/libm/include</includepath>
+<includepath>${ProjDirPath}/bionic/libm/include/arm</includepath>
+<includepath>${ProjDirPath}/bionic/libthread_db/include</includepath>
+<includepath>${ProjDirPath}/hardware/libhardware/include</includepath>
+<includepath>${ProjDirPath}/hardware/libhardware_legacy/include</includepath>
+<includepath>${ProjDirPath}/hardware/ril/include</includepath>
+<includepath>${ProjDirPath}/frameworks/base/include</includepath>
+<includepath>${ProjDirPath}/frameworks/native/include</includepath>
+<includepath>${ProjDirPath}/frameworks/native/opengl/include</includepath>
+<includepath>${ProjDirPath}/external/skia/include</includepath>
+<includepath>${ProjDirPath}/external/skia/include/core</includepath>
+<includepath>${ProjDirPath}/prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.6/lib/gcc/arm-linux-androideabi/4.6.x-google/include</includepath>
 
 </language>
 <language name="GNU C">
-<includepath>${ProjDirPath}/prebuilt/darwin-x86/toolchain/arm-linux-androideabi-4.4.x/lib/gcc/arm-linux-androideabi/4.4.3/include</includepath>
 <includepath>${ProjDirPath}/system/core/include/arch/linux-arm</includepath>
 <includepath>${ProjDirPath}/system/core/include</includepath>
 <includepath>${ProjDirPath}/bionic/libc/arch-arm/include</includepath>
@@ -43,43 +62,15 @@
 <includepath>${ProjDirPath}/bionic/libm/include</includepath>
 <includepath>${ProjDirPath}/bionic/libm/include/arm</includepath>
 <includepath>${ProjDirPath}/bionic/libthread_db/include</includepath>
-<includepath>${ProjDirPath}/dalvik/libnativehelper/include</includepath>
-<includepath>${ProjDirPath}/dalvik/libnativehelper/include/nativehelper</includepath>
 <includepath>${ProjDirPath}/hardware/libhardware/include</includepath>
 <includepath>${ProjDirPath}/hardware/libhardware_legacy/include</includepath>
 <includepath>${ProjDirPath}/hardware/ril/include</includepath>
+<includepath>${ProjDirPath}/frameworks/base/include</includepath>
 <includepath>${ProjDirPath}/frameworks/native/include</includepath>
 <includepath>${ProjDirPath}/frameworks/native/opengl/include</includepath>
-<includepath>${ProjDirPath}/frameworks/base/include</includepath>
-<includepath>${ProjDirPath}/frameworks/base/native/include</includepath>
 <includepath>${ProjDirPath}/external/skia/include</includepath>
 <includepath>${ProjDirPath}/external/skia/include/core</includepath>
-
-</language>
-<language name="Assembly">
-<includepath>${ProjDirPath}/prebuilt/darwin-x86/toolchain/arm-linux-androideabi-4.4.x/lib/gcc/arm-linux-androideabi/4.4.3/include</includepath>
-<includepath>${ProjDirPath}/system/core/include/arch/linux-arm</includepath>
-<includepath>${ProjDirPath}/system/core/include</includepath>
-<includepath>${ProjDirPath}/bionic/libc/arch-arm/include</includepath>
-<includepath>${ProjDirPath}/bionic/libc/include</includepath>
-<includepath>${ProjDirPath}/bionic/libstdc++/include</includepath>
-<includepath>${ProjDirPath}/bionic/libc/kernel/common</includepath>
-<includepath>${ProjDirPath}/bionic/libc/kernel/common/linux</includepath>
-<includepath>${ProjDirPath}/bionic/libc/kernel/arch-arm</includepath>
-<includepath>${ProjDirPath}/bionic/libm/include</includepath>
-<includepath>${ProjDirPath}/bionic/libm/include/arm</includepath>
-<includepath>${ProjDirPath}/bionic/libthread_db/include</includepath>
-<includepath>${ProjDirPath}/dalvik/libnativehelper/include</includepath>
-<includepath>${ProjDirPath}/dalvik/libnativehelper/include/nativehelper</includepath>
-<includepath>${ProjDirPath}/hardware/libhardware/include</includepath>
-<includepath>${ProjDirPath}/hardware/libhardware_legacy/include</includepath>
-<includepath>${ProjDirPath}/hardware/ril/include</includepath>
-<includepath>${ProjDirPath}/frameworks/native/include</includepath>
-<includepath>${ProjDirPath}/frameworks/native/opengl/include</includepath>
-<includepath>${ProjDirPath}/frameworks/base/include</includepath>
-<includepath>${ProjDirPath}/frameworks/base/native/include</includepath>
-<includepath>${ProjDirPath}/external/skia/include</includepath>
-<includepath>${ProjDirPath}/external/skia/include/core</includepath>
+<includepath>${ProjDirPath}/prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.6/lib/gcc/arm-linux-androideabi/4.6.x-google/include</includepath>
 
 </language>
 </section>
diff --git a/ide/intellij/codestyles/AndroidStyle.xml b/ide/intellij/codestyles/AndroidStyle.xml
index 113ffca..cd6beb4 100644
--- a/ide/intellij/codestyles/AndroidStyle.xml
+++ b/ide/intellij/codestyles/AndroidStyle.xml
@@ -42,7 +42,7 @@
       <package name="" withSubpackages="true" />
     </value>
   </option>
-  <option name="RIGHT_MARGIN" value="80" />
+  <option name="RIGHT_MARGIN" value="100" />
   <option name="CALL_PARAMETERS_WRAP" value="1" />
   <option name="METHOD_PARAMETERS_WRAP" value="1" />
   <option name="EXTENDS_LIST_WRAP" value="1" />
diff --git a/ndk/platforms/android-9/samples/native-activity/Android.mk b/ndk/platforms/android-9/samples/native-activity/Android.mk
index 73b3d87..a092f41 100644
--- a/ndk/platforms/android-9/samples/native-activity/Android.mk
+++ b/ndk/platforms/android-9/samples/native-activity/Android.mk
@@ -39,7 +39,6 @@
 	jni/main.c \
 	../../../../sources/android/native_app_glue/android_native_app_glue.c
 
-LOCAL_NDK_VERSION := 4
 LOCAL_SDK_VERSION := 8
 
 LOCAL_SHARED_LIBRARIES := liblog libandroid libEGL libGLESv1_CM
diff --git a/samples/ApiDemos/AndroidManifest.xml b/samples/ApiDemos/AndroidManifest.xml
index 54e9723..817b23a 100644
--- a/samples/ApiDemos/AndroidManifest.xml
+++ b/samples/ApiDemos/AndroidManifest.xml
@@ -36,7 +36,7 @@
     <!-- For android.media.audiofx.Visualizer -->
     <uses-permission android:name="android.permission.RECORD_AUDIO" />
 
-    <uses-sdk android:minSdkVersion="4" android:targetSdkVersion="14" />
+    <uses-sdk android:minSdkVersion="4" android:targetSdkVersion="16" />
 
     <!-- We will request access to the camera, saying we require a camera
          of some sort but not one with autofocus capability. -->
@@ -47,7 +47,8 @@
     <application android:name="ApiDemosApplication"
             android:label="@string/activity_sample_code"
             android:icon="@drawable/app_sample_code"
-            android:hardwareAccelerated="true">
+            android:hardwareAccelerated="true"
+            android:supportsRtl="true">
 
         <!-- This is how we can request a library but still allow the app
              to be installed if it doesn't exist. -->
diff --git a/samples/ApiDemos/src/com/example/android/apis/app/ActionBarDisplayOptions.java b/samples/ApiDemos/src/com/example/android/apis/app/ActionBarDisplayOptions.java
index 5585c91..257f0cd 100644
--- a/samples/ApiDemos/src/com/example/android/apis/app/ActionBarDisplayOptions.java
+++ b/samples/ApiDemos/src/com/example/android/apis/app/ActionBarDisplayOptions.java
@@ -93,18 +93,18 @@
             case R.id.cycle_custom_gravity:
                 ActionBar.LayoutParams lp = (ActionBar.LayoutParams) mCustomView.getLayoutParams();
                 int newGravity = 0;
-                switch (lp.gravity & Gravity.HORIZONTAL_GRAVITY_MASK) {
-                    case Gravity.LEFT:
+                switch (lp.gravity & Gravity.RELATIVE_HORIZONTAL_GRAVITY_MASK) {
+                    case Gravity.START:
                         newGravity = Gravity.CENTER_HORIZONTAL;
                         break;
                     case Gravity.CENTER_HORIZONTAL:
-                        newGravity = Gravity.RIGHT;
+                        newGravity = Gravity.END;
                         break;
-                    case Gravity.RIGHT:
-                        newGravity = Gravity.LEFT;
+                    case Gravity.END:
+                        newGravity = Gravity.START;
                         break;
                 }
-                lp.gravity = lp.gravity & ~Gravity.HORIZONTAL_GRAVITY_MASK | newGravity;
+                lp.gravity = lp.gravity & ~Gravity.RELATIVE_HORIZONTAL_GRAVITY_MASK | newGravity;
                 bar.setCustomView(mCustomView, lp);
                 return;
         }
diff --git a/samples/ApiDemos/src/com/example/android/apis/app/FragmentLayout.java b/samples/ApiDemos/src/com/example/android/apis/app/FragmentLayout.java
index 730f4d4..572173f 100644
--- a/samples/ApiDemos/src/com/example/android/apis/app/FragmentLayout.java
+++ b/samples/ApiDemos/src/com/example/android/apis/app/FragmentLayout.java
@@ -150,7 +150,11 @@
                     // Execute a transaction, replacing any existing fragment
                     // with this one inside the frame.
                     FragmentTransaction ft = getFragmentManager().beginTransaction();
-                    ft.replace(R.id.details, details);
+                    if (index == 0) {
+                        ft.replace(R.id.details, details);
+                    } else {
+                        ft.replace(R.id.a_item, details);
+                    }
                     ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
                     ft.commit();
                 }
diff --git a/samples/RenderScript/HelloCompute/Android.mk b/samples/RenderScript/HelloCompute/Android.mk
index e19f351..27798c4 100644
--- a/samples/RenderScript/HelloCompute/Android.mk
+++ b/samples/RenderScript/HelloCompute/Android.mk
@@ -23,5 +23,6 @@
                    $(call all-renderscript-files-under, src)
 
 LOCAL_PACKAGE_NAME := RsHelloCompute
+LOCAL_SDK_VERSION := 14
 
 include $(BUILD_PACKAGE)
diff --git a/samples/Support4Demos/src/com/example/android/supportv4/app/FragmentLayoutSupport.java b/samples/Support4Demos/src/com/example/android/supportv4/app/FragmentLayoutSupport.java
index 25ec4a3..8be83a6 100644
--- a/samples/Support4Demos/src/com/example/android/supportv4/app/FragmentLayoutSupport.java
+++ b/samples/Support4Demos/src/com/example/android/supportv4/app/FragmentLayoutSupport.java
@@ -153,7 +153,11 @@
                     // Execute a transaction, replacing any existing fragment
                     // with this one inside the frame.
                     FragmentTransaction ft = getFragmentManager().beginTransaction();
-                    ft.replace(R.id.details, details);
+                    if (index == 0) {
+                        ft.replace(R.id.details, details);
+                    } else {
+                        ft.replace(R.id.a_item, details);
+                    }
                     ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
                     ft.commit();
                 }
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/body.png b/samples/WeatherListWidget/res/drawable-hdpi/body.png
deleted file mode 100644
index 17d303f..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/body.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/footer.png b/samples/WeatherListWidget/res/drawable-hdpi/footer.png
deleted file mode 100644
index 43962f7..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/footer.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/header.9.png b/samples/WeatherListWidget/res/drawable-hdpi/header.9.png
deleted file mode 100644
index 5f34768..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/header.9.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/item_bg_dark.png b/samples/WeatherListWidget/res/drawable-hdpi/item_bg_dark.png
deleted file mode 100644
index f5886bd..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/item_bg_dark.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/item_bg_light.png b/samples/WeatherListWidget/res/drawable-hdpi/item_bg_light.png
deleted file mode 100644
index e8b5aaf..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/item_bg_light.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/refresh.png b/samples/WeatherListWidget/res/drawable-hdpi/refresh.png
deleted file mode 100644
index eaec9cb..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/refresh.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/refresh_pressed.png b/samples/WeatherListWidget/res/drawable-hdpi/refresh_pressed.png
deleted file mode 100644
index 34438b7..0000000
--- a/samples/WeatherListWidget/res/drawable-hdpi/refresh_pressed.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-hdpi/sunny.png b/samples/WeatherListWidget/res/drawable-hdpi/sunny.png
new file mode 100644
index 0000000..42785b9
--- /dev/null
+++ b/samples/WeatherListWidget/res/drawable-hdpi/sunny.png
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/body.png b/samples/WeatherListWidget/res/drawable-mdpi/body.png
deleted file mode 100644
index a08d03b..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/body.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/footer.png b/samples/WeatherListWidget/res/drawable-mdpi/footer.png
deleted file mode 100644
index d3960a7..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/footer.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/header.9.png b/samples/WeatherListWidget/res/drawable-mdpi/header.9.png
deleted file mode 100644
index 2372225..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/header.9.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/item_bg_dark.png b/samples/WeatherListWidget/res/drawable-mdpi/item_bg_dark.png
deleted file mode 100644
index a3ac9d7..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/item_bg_dark.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/item_bg_light.png b/samples/WeatherListWidget/res/drawable-mdpi/item_bg_light.png
deleted file mode 100644
index ec6f5aa..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/item_bg_light.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/refresh.png b/samples/WeatherListWidget/res/drawable-mdpi/refresh.png
deleted file mode 100644
index 006bcc5..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/refresh.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/refresh_pressed.png b/samples/WeatherListWidget/res/drawable-mdpi/refresh_pressed.png
deleted file mode 100644
index d8ca9b5..0000000
--- a/samples/WeatherListWidget/res/drawable-mdpi/refresh_pressed.png
+++ /dev/null
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-mdpi/sunny.png b/samples/WeatherListWidget/res/drawable-mdpi/sunny.png
new file mode 100644
index 0000000..9453447
--- /dev/null
+++ b/samples/WeatherListWidget/res/drawable-mdpi/sunny.png
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable-nodpi/preview.png b/samples/WeatherListWidget/res/drawable-nodpi/preview.png
index f0cbdaf..b9c8780 100644
--- a/samples/WeatherListWidget/res/drawable-nodpi/preview.png
+++ b/samples/WeatherListWidget/res/drawable-nodpi/preview.png
Binary files differ
diff --git a/samples/WeatherListWidget/res/drawable/refresh_button.xml b/samples/WeatherListWidget/res/drawable/refresh_button.xml
deleted file mode 100644
index 1c0017e..0000000
--- a/samples/WeatherListWidget/res/drawable/refresh_button.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<selector xmlns:android="http://schemas.android.com/apk/res/android">
-    <item android:state_pressed="true"
-        android:drawable="@drawable/refresh_pressed" /> <!-- pressed -->
-    <item android:drawable="@drawable/refresh" /> <!-- default -->
-</selector>
\ No newline at end of file
diff --git a/samples/WeatherListWidget/res/layout/dark_widget_item.xml b/samples/WeatherListWidget/res/layout/dark_widget_item.xml
deleted file mode 100644
index 1f920a2..0000000
--- a/samples/WeatherListWidget/res/layout/dark_widget_item.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2011 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<TextView xmlns:android="http://schemas.android.com/apk/res/android"
-    android:id="@+id/widget_item"
-    android:layout_width="match_parent"
-    android:layout_height="46dp"
-    android:paddingLeft="25dp"
-    android:gravity="center_vertical"
-    android:background="@drawable/item_bg_dark"
-    android:textColor="#e5e5e1"
-    android:textSize="24sp" />
diff --git a/samples/WeatherListWidget/res/layout/light_widget_item.xml b/samples/WeatherListWidget/res/layout/widget_item.xml
similarity index 89%
rename from samples/WeatherListWidget/res/layout/light_widget_item.xml
rename to samples/WeatherListWidget/res/layout/widget_item.xml
index bb2946f..c0002e5 100644
--- a/samples/WeatherListWidget/res/layout/light_widget_item.xml
+++ b/samples/WeatherListWidget/res/layout/widget_item.xml
@@ -19,6 +19,6 @@
     android:layout_height="46dp"
     android:paddingLeft="25dp"
     android:gravity="center_vertical"
-    android:background="@drawable/item_bg_light"
-    android:textColor="#e5e5e1"
-    android:textSize="24sp" />
+    android:background="#F0F0F0"
+    android:textColor="#232323"
+    android:textSize="20sp" />
diff --git a/samples/WeatherListWidget/res/layout/widget_layout.xml b/samples/WeatherListWidget/res/layout/widget_layout.xml
index 4c58fa7..f3eb6de 100644
--- a/samples/WeatherListWidget/res/layout/widget_layout.xml
+++ b/samples/WeatherListWidget/res/layout/widget_layout.xml
@@ -23,30 +23,37 @@
     android:layout_marginRight="@dimen/widget_margin_right">
     <!-- We define separate margins to allow for flexibility in twiddling the margins
          depending on device form factor and target SDK version. -->
-    <FrameLayout
+
+    <LinearLayout
         android:layout_width="match_parent"
-        android:layout_height="wrap_content">
+        android:layout_height="80dp"
+        android:background="#F8F8F8"
+        android:orientation="horizontal">
         <ImageView
-            android:id="@+id/header"
-            android:layout_width="match_parent"
+            android:id="@+id/city_weather"
+            android:layout_width="wrap_content"
             android:layout_height="wrap_content"
-            android:scaleType="fitXY"
-            android:src="@drawable/header" />
-        <ImageButton
-            android:id="@+id/refresh"
-            android:layout_width="56dp"
-            android:layout_height="39dp"
-            android:layout_gravity="right|top"
-            android:layout_marginRight="15dp"
-            android:layout_marginTop="20dp"
-            android:background="@drawable/refresh_button" />
-    </FrameLayout>
+            android:padding="12dp"
+            android:scaleType="fitStart"
+            android:adjustViewBounds="true"
+            android:src="@drawable/sunny" />
+        <TextView
+            android:id="@+id/city_name"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_gravity="center_vertical"
+            android:text="@string/city_name"
+            android:textAllCaps="true"
+            android:textColor="#232323"
+            android:textSize="24sp" />
+    </LinearLayout>
+
     <FrameLayout
         android:layout_width="match_parent"
         android:layout_height="match_parent"
         android:layout_weight="1"
         android:layout_gravity="center"
-        android:background="@drawable/body">
+        android:background="#F8F8F8">
         <ListView
             android:id="@+id/weather_list"
             android:layout_width="match_parent"
@@ -61,10 +68,16 @@
             android:text="@string/empty_view_text"
             android:textSize="20sp" />
     </FrameLayout>
-    <ImageView
-        android:id="@+id/footer"
+
+    <Button
+        android:id="@+id/refresh"
         android:layout_width="match_parent"
         android:layout_height="wrap_content"
-        android:scaleType="fitXY"
-        android:src="@drawable/footer" />
+        android:padding="12dp"
+        android:gravity="center"
+        android:background="#F8F8F8"
+        android:text="@string/refresh"
+        android:textAllCaps="true"
+        android:textColor="#232323"
+        android:textSize="14sp" />
 </LinearLayout>
diff --git a/samples/WeatherListWidget/res/layout/widget_layout_small.xml b/samples/WeatherListWidget/res/layout/widget_layout_small.xml
new file mode 100644
index 0000000..20227b7
--- /dev/null
+++ b/samples/WeatherListWidget/res/layout/widget_layout_small.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:orientation="vertical"
+    android:layout_marginTop="@dimen/widget_margin_top"
+    android:layout_marginBottom="@dimen/widget_margin_bottom"
+    android:layout_marginLeft="@dimen/widget_margin_left"
+    android:layout_marginRight="@dimen/widget_margin_right">
+
+    <LinearLayout
+        android:layout_width="match_parent"
+        android:layout_height="80dp"
+        android:background="#F8F8F8"
+        android:orientation="horizontal">
+        <ImageView
+            android:id="@+id/city_weather"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:padding="12dp"
+            android:scaleType="fitStart"
+            android:adjustViewBounds="true"
+            android:src="@drawable/sunny" />
+        <TextView
+            android:id="@+id/city_name"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:layout_gravity="center_vertical"
+            android:text="@string/city_name"
+            android:textAllCaps="true"
+            android:textColor="#232323"
+            android:textSize="24sp" />
+    </LinearLayout>
+</LinearLayout>
diff --git a/samples/WeatherListWidget/res/values/strings.xml b/samples/WeatherListWidget/res/values/strings.xml
index 6542545..a3b7e06 100644
--- a/samples/WeatherListWidget/res/values/strings.xml
+++ b/samples/WeatherListWidget/res/values/strings.xml
@@ -15,6 +15,10 @@
 -->
 <resources>
     <string name="empty_view_text">No cities found...</string>
-    <string name="toast_format_string">%1$s says Hi!</string>
-    <string name="item_format_string">%1$d\u00B0 in %2$s</string>
+    <string name="toast_format_string">%1$s!</string>
+    <string name="item_format_string">%1$d\u00B0 on %2$s</string>
+    <string name="header_format_string">%1$d\u00B0 in %2$s</string>
+
+    <string name="refresh">Refresh</string>
+    <string name="city_name">San Francisco</string>
 </resources>
diff --git a/samples/WeatherListWidget/res/xml/widgetinfo.xml b/samples/WeatherListWidget/res/xml/widgetinfo.xml
index e6e9cf3..2e41943 100644
--- a/samples/WeatherListWidget/res/xml/widgetinfo.xml
+++ b/samples/WeatherListWidget/res/xml/widgetinfo.xml
@@ -15,12 +15,12 @@
 -->
 <appwidget-provider
   xmlns:android="http://schemas.android.com/apk/res/android"
-  android:minWidth="250dp"
+  android:minWidth="280dp"
   android:minHeight="180dp"
   android:updatePeriodMillis="1800000"
   android:initialLayout="@layout/widget_layout"
   android:resizeMode="vertical"
-  android:minResizeWidth="250dp"
-  android:minResizeHeight="110dp"
+  android:minResizeWidth="280dp"
+  android:minResizeHeight="70dp"
   android:previewImage="@drawable/preview">
 </appwidget-provider>
diff --git a/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherDataProvider.java b/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherDataProvider.java
index 92a1cb3..ede0039 100644
--- a/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherDataProvider.java
+++ b/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherDataProvider.java
@@ -36,12 +36,12 @@
  * data will only be stored in memory.
  */
 class WeatherDataPoint {
-    String city;
+    String day;
     int degrees;
 
-    WeatherDataPoint(String c, int d) {
-        city = c;
-        degrees = d;
+    WeatherDataPoint(String d, int deg) {
+        day = d;
+        degrees = deg;
     }
 }
 
@@ -53,7 +53,7 @@
         Uri.parse("content://com.example.android.weatherlistwidget.provider");
     public static class Columns {
         public static final String ID = "_id";
-        public static final String CITY = "city";
+        public static final String DAY = "day";
         public static final String TEMPERATURE = "temperature";
     }
 
@@ -67,17 +67,20 @@
     @Override
     public boolean onCreate() {
         // We are going to initialize the data provider with some default values
-        sData.add(new WeatherDataPoint("San Francisco", 13));
-        sData.add(new WeatherDataPoint("New York", 1));
-        sData.add(new WeatherDataPoint("Seattle", 7));
-        sData.add(new WeatherDataPoint("Boston", 4));
-        sData.add(new WeatherDataPoint("Miami", 22));
-        sData.add(new WeatherDataPoint("Toronto", -10));
-        sData.add(new WeatherDataPoint("Calgary", -13));
-        sData.add(new WeatherDataPoint("Tokyo", 8));
-        sData.add(new WeatherDataPoint("Kyoto", 11));
-        sData.add(new WeatherDataPoint("London", -1));
-        sData.add(new WeatherDataPoint("Nomanisan", 27));
+        sData.add(new WeatherDataPoint("Monday", 13));
+        sData.add(new WeatherDataPoint("Tuesday", 1));
+        sData.add(new WeatherDataPoint("Wednesday", 7));
+        sData.add(new WeatherDataPoint("Thursday", 4));
+        sData.add(new WeatherDataPoint("Friday", 22));
+        sData.add(new WeatherDataPoint("Saturday", -10));
+        sData.add(new WeatherDataPoint("Sunday", -13));
+        sData.add(new WeatherDataPoint("Monday", 8));
+        sData.add(new WeatherDataPoint("Tuesday", 11));
+        sData.add(new WeatherDataPoint("Wednesday", -1));
+        sData.add(new WeatherDataPoint("Thursday", 27));
+        sData.add(new WeatherDataPoint("Friday", 27));
+        sData.add(new WeatherDataPoint("Saturday", 27));
+        sData.add(new WeatherDataPoint("Sunday", 27));
         return true;
     }
 
@@ -89,17 +92,17 @@
         // In this sample, we only query without any parameters, so we can just return a cursor to
         // all the weather data.
         final MatrixCursor c = new MatrixCursor(
-                new String[]{ Columns.ID, Columns.CITY, Columns.TEMPERATURE });
+                new String[]{ Columns.ID, Columns.DAY, Columns.TEMPERATURE });
         for (int i = 0; i < sData.size(); ++i) {
             final WeatherDataPoint data = sData.get(i);
-            c.addRow(new Object[]{ new Integer(i), data.city, new Integer(data.degrees) });
+            c.addRow(new Object[]{ new Integer(i), data.day, new Integer(data.degrees) });
         }
         return c;
     }
 
     @Override
     public String getType(Uri uri) {
-        return "vnd.android.cursor.dir/vnd.weatherlistwidget.citytemperature";
+        return "vnd.android.cursor.dir/vnd.weatherlistwidget.temperature";
     }
 
     @Override
@@ -123,7 +126,7 @@
         // temperature values.
         final int index = Integer.parseInt(uri.getPathSegments().get(0));
         final MatrixCursor c = new MatrixCursor(
-                new String[]{ Columns.ID, Columns.CITY, Columns.TEMPERATURE });
+                new String[]{ Columns.ID, Columns.DAY, Columns.TEMPERATURE });
         assert(0 <= index && index < sData.size());
         final WeatherDataPoint data = sData.get(index);
         data.degrees = values.getAsInteger(Columns.TEMPERATURE);
@@ -134,4 +137,4 @@
         return 1;
     }
 
-}
\ No newline at end of file
+}
diff --git a/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetProvider.java b/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetProvider.java
index 2f2b347..ea3f944 100644
--- a/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetProvider.java
+++ b/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetProvider.java
@@ -28,6 +28,7 @@
 import android.database.Cursor;
 import android.database.ContentObserver;
 import android.net.Uri;
+import android.os.Bundle;
 import android.os.Handler;
 import android.os.HandlerThread;
 import android.widget.RemoteViews;
@@ -64,11 +65,15 @@
 public class WeatherWidgetProvider extends AppWidgetProvider {
     public static String CLICK_ACTION = "com.example.android.weatherlistwidget.CLICK";
     public static String REFRESH_ACTION = "com.example.android.weatherlistwidget.REFRESH";
-    public static String EXTRA_CITY_ID = "com.example.android.weatherlistwidget.city";
+    public static String EXTRA_DAY_ID = "com.example.android.weatherlistwidget.day";
 
     private static HandlerThread sWorkerThread;
     private static Handler sWorkerQueue;
     private static WeatherDataProviderObserver sDataObserver;
+    private static final int sMaxDegrees = 96;
+
+    private boolean mIsLargeLayout = true;
+    private int mHeaderWeatherState = 0;
 
     public WeatherWidgetProvider() {
         // Start the worker thread
@@ -77,6 +82,8 @@
         sWorkerQueue = new Handler(sWorkerThread.getLooper());
     }
 
+    // XXX: clear the worker queue if we are destroyed?
+
     @Override
     public void onEnabled(Context context) {
         // Register for external updates to the data to trigger an update of the widget.  When using
@@ -109,7 +116,6 @@
                     final Cursor c = r.query(WeatherDataProvider.CONTENT_URI, null, null, null, 
                             null);
                     final int count = c.getCount();
-                    final int maxDegrees = 96;
 
                     // We disable the data changed observer temporarily since each of the updates
                     // will trigger an onChange() in our data observer.
@@ -118,7 +124,7 @@
                         final Uri uri = ContentUris.withAppendedId(WeatherDataProvider.CONTENT_URI, i);
                         final ContentValues values = new ContentValues();
                         values.put(WeatherDataProvider.Columns.TEMPERATURE,
-                                new Random().nextInt(maxDegrees));
+                                new Random().nextInt(sMaxDegrees));
                         r.update(uri, values, null, null);
                     }
                     r.registerContentObserver(WeatherDataProvider.CONTENT_URI, true, sDataObserver);
@@ -128,29 +134,31 @@
                     mgr.notifyAppWidgetViewDataChanged(mgr.getAppWidgetIds(cn), R.id.weather_list);
                 }
             });
+
+            final int appWidgetId = intent.getIntExtra(AppWidgetManager.EXTRA_APPWIDGET_ID,
+                    AppWidgetManager.INVALID_APPWIDGET_ID);
         } else if (action.equals(CLICK_ACTION)) {
             // Show a toast
             final int appWidgetId = intent.getIntExtra(AppWidgetManager.EXTRA_APPWIDGET_ID,
                     AppWidgetManager.INVALID_APPWIDGET_ID);
-            final String city = intent.getStringExtra(EXTRA_CITY_ID);
+            final String day = intent.getStringExtra(EXTRA_DAY_ID);
             final String formatStr = ctx.getResources().getString(R.string.toast_format_string);
-            Toast.makeText(ctx, String.format(formatStr, city), Toast.LENGTH_SHORT).show();
+            Toast.makeText(ctx, String.format(formatStr, day), Toast.LENGTH_SHORT).show();
         }
 
         super.onReceive(ctx, intent);
     }
 
-    @Override
-    public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) {
-        // Update each of the widgets with the remote adapter
-        for (int i = 0; i < appWidgetIds.length; ++i) {
+    private RemoteViews buildLayout(Context context, int appWidgetId, boolean largeLayout) {
+        RemoteViews rv;
+        if (largeLayout) {
             // Specify the service to provide data for the collection widget.  Note that we need to
             // embed the appWidgetId via the data otherwise it will be ignored.
             final Intent intent = new Intent(context, WeatherWidgetService.class);
-            intent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, appWidgetIds[i]);
+            intent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, appWidgetId);
             intent.setData(Uri.parse(intent.toUri(Intent.URI_INTENT_SCHEME)));
-            final RemoteViews rv = new RemoteViews(context.getPackageName(), R.layout.widget_layout);
-            rv.setRemoteAdapter(appWidgetIds[i], R.id.weather_list, intent);
+            rv = new RemoteViews(context.getPackageName(), R.layout.widget_layout);
+            rv.setRemoteAdapter(appWidgetId, R.id.weather_list, intent);
 
             // Set the empty view to be displayed if the collection is empty.  It must be a sibling
             // view of the collection view.
@@ -161,7 +169,7 @@
             // ignored otherwise.
             final Intent onClickIntent = new Intent(context, WeatherWidgetProvider.class);
             onClickIntent.setAction(WeatherWidgetProvider.CLICK_ACTION);
-            onClickIntent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, appWidgetIds[i]);
+            onClickIntent.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, appWidgetId);
             onClickIntent.setData(Uri.parse(onClickIntent.toUri(Intent.URI_INTENT_SCHEME)));
             final PendingIntent onClickPendingIntent = PendingIntent.getBroadcast(context, 0,
                     onClickIntent, PendingIntent.FLAG_UPDATE_CURRENT);
@@ -174,8 +182,53 @@
                     refreshIntent, PendingIntent.FLAG_UPDATE_CURRENT);
             rv.setOnClickPendingIntent(R.id.refresh, refreshPendingIntent);
 
-            appWidgetManager.updateAppWidget(appWidgetIds[i], rv);
+            // Restore the minimal header
+            rv.setTextViewText(R.id.city_name, context.getString(R.string.city_name));
+        } else {
+            rv = new RemoteViews(context.getPackageName(), R.layout.widget_layout_small);
+
+            // Update the header to reflect the weather for "today"
+            Cursor c = context.getContentResolver().query(WeatherDataProvider.CONTENT_URI, null,
+                    null, null, null);
+            if (c.moveToPosition(0)) {
+                int tempColIndex = c.getColumnIndex(WeatherDataProvider.Columns.TEMPERATURE);
+                int temp = c.getInt(tempColIndex);
+                String formatStr = context.getResources().getString(R.string.header_format_string);
+                String header = String.format(formatStr, temp,
+                        context.getString(R.string.city_name));
+                rv.setTextViewText(R.id.city_name, header);
+            }
+            c.close();
+        }
+        return rv;
+    }
+
+    @Override
+    public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) {
+        // Update each of the widgets with the remote adapter
+        for (int i = 0; i < appWidgetIds.length; ++i) {
+            RemoteViews layout = buildLayout(context, appWidgetIds[i], mIsLargeLayout);
+            appWidgetManager.updateAppWidget(appWidgetIds[i], layout);
         }
         super.onUpdate(context, appWidgetManager, appWidgetIds);
     }
+
+    @Override
+    public void onAppWidgetOptionsChanged(Context context, AppWidgetManager appWidgetManager,
+            int appWidgetId, Bundle newOptions) {
+
+        int minWidth = newOptions.getInt(AppWidgetManager.OPTION_APPWIDGET_MIN_WIDTH);
+        int maxWidth = newOptions.getInt(AppWidgetManager.OPTION_APPWIDGET_MAX_WIDTH);
+        int minHeight = newOptions.getInt(AppWidgetManager.OPTION_APPWIDGET_MIN_HEIGHT);
+        int maxHeight = newOptions.getInt(AppWidgetManager.OPTION_APPWIDGET_MAX_HEIGHT);
+
+        RemoteViews layout;
+        if (minHeight < 100) {
+            mIsLargeLayout = false;
+        } else {
+            mIsLargeLayout = true;
+        }
+        layout = buildLayout(context, appWidgetId, mIsLargeLayout);
+        appWidgetManager.updateAppWidget(appWidgetId, layout);
+    }
 }
\ No newline at end of file
diff --git a/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetService.java b/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetService.java
index 1d3c349..4780e80 100644
--- a/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetService.java
+++ b/samples/WeatherListWidget/src/com/example/android/weatherlistwidget/WeatherWidgetService.java
@@ -70,28 +70,26 @@
 
     public RemoteViews getViewAt(int position) {
         // Get the data for this position from the content provider
-        String city = "Unknown City";
+        String day = "Unknown Day";
         int temp = 0;
         if (mCursor.moveToPosition(position)) {
-            final int cityColIndex = mCursor.getColumnIndex(WeatherDataProvider.Columns.CITY);
+            final int dayColIndex = mCursor.getColumnIndex(WeatherDataProvider.Columns.DAY);
             final int tempColIndex = mCursor.getColumnIndex(
                     WeatherDataProvider.Columns.TEMPERATURE);
-            city = mCursor.getString(cityColIndex);
+            day = mCursor.getString(dayColIndex);
             temp = mCursor.getInt(tempColIndex);
         }
 
-        // Return a proper item with the proper city and temperature.  Just for fun, we alternate
-        // the items to make the list easier to read.
+        // Return a proper item with the proper day and temperature
         final String formatStr = mContext.getResources().getString(R.string.item_format_string);
-        final int itemId = (position % 2 == 0 ? R.layout.light_widget_item
-                : R.layout.dark_widget_item);
+        final int itemId = R.layout.widget_item;
         RemoteViews rv = new RemoteViews(mContext.getPackageName(), itemId);
-        rv.setTextViewText(R.id.widget_item, String.format(formatStr, temp, city));
+        rv.setTextViewText(R.id.widget_item, String.format(formatStr, temp, day));
 
         // Set the click intent so that we can handle it and show a toast message
         final Intent fillInIntent = new Intent();
         final Bundle extras = new Bundle();
-        extras.putString(WeatherWidgetProvider.EXTRA_CITY_ID, city);
+        extras.putString(WeatherWidgetProvider.EXTRA_DAY_ID, day);
         fillInIntent.putExtras(extras);
         rv.setOnClickFillInIntent(R.id.widget_item, fillInIntent);
 
diff --git a/sdk/doc_source.prop_template b/sdk/doc_source.prop_template
index 523d6bd..d3cdfd5 100644
--- a/sdk/doc_source.prop_template
+++ b/sdk/doc_source.prop_template
@@ -1,4 +1,4 @@
 Pkg.UserSrc=false
-Pkg.Revision=2
+Pkg.Revision=1
 AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
 AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
diff --git a/sdk/platform_source.prop_template b/sdk/platform_source.prop_template
index 6e5397e..c6756e8 100644
--- a/sdk/platform_source.prop_template
+++ b/sdk/platform_source.prop_template
@@ -2,7 +2,7 @@
 Pkg.UserSrc=false
 Platform.Version=${PLATFORM_VERSION}
 Platform.CodeName=Jelly Bean
-Pkg.Revision=2
+Pkg.Revision=1
 AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
 AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
 Layoutlib.Api=8
diff --git a/sdk_overlay/frameworks/base/core/res/res/values/config.xml b/sdk_overlay/frameworks/base/core/res/res/values/config.xml
index fdeb8bd..2cc8d7a 100644
--- a/sdk_overlay/frameworks/base/core/res/res/values/config.xml
+++ b/sdk_overlay/frameworks/base/core/res/res/values/config.xml
@@ -20,8 +20,10 @@
 <!-- These resources are around just to allow their values to be customized
      for different hardware and product builds. -->
 <resources>
-    <!-- Package name providing geocoder API support. -->
-    <string name="config_geocodeProviderPackageName" translatable="false">com.google.android.location</string>
+    <!-- Package name providing location API support. -->
+    <string-array name="config_overlay_locationProviderPackageNames" translatable="false">
+        <item>com.google.android.location</item>
+    </string-array>
 
     <bool name="config_voice_capable">true</bool>
 </resources>
diff --git a/testrunner/android_build.py b/testrunner/android_build.py
index 584ef52..a10d43b 100644
--- a/testrunner/android_build.py
+++ b/testrunner/android_build.py
@@ -42,7 +42,8 @@
   # TODO: does this need to be reimplemented to be like gettop() in envsetup.sh
   root_path = os.getenv("ANDROID_BUILD_TOP")
   if root_path is None:
-    logger.Log("Error: ANDROID_BUILD_TOP not defined. Please run envsetup.sh")
+    logger.Log("Error: ANDROID_BUILD_TOP not defined. Please run "
+               "envsetup.sh and lunch/choosecombo")
     raise errors.AbortError
   return root_path
 
@@ -109,7 +110,8 @@
   """
   path = os.getenv("ANDROID_PRODUCT_OUT")
   if path is None:
-    logger.Log("Error: ANDROID_PRODUCT_OUT not defined. Please run envsetup.sh")
+    logger.Log("Error: ANDROID_PRODUCT_OUT not defined. Please run "
+               "envsetup.sh and lunch/choosecombo")
     raise errors.AbortError
   return path
 
diff --git a/tools/emulator/opengl/system/OpenglSystemCommon/gralloc_cb.h b/tools/emulator/opengl/system/OpenglSystemCommon/gralloc_cb.h
index e879409..a3fb8dd 100644
--- a/tools/emulator/opengl/system/OpenglSystemCommon/gralloc_cb.h
+++ b/tools/emulator/opengl/system/OpenglSystemCommon/gralloc_cb.h
@@ -25,22 +25,35 @@
 #define CB_HANDLE_NUM_INTS(nfds) (int)((sizeof(cb_handle_t) - (nfds)*sizeof(int)) / sizeof(int))
 
 //
+// Emulator-specific gralloc formats
+//
+enum {
+    // Request that gralloc select the proper format given the usage
+    // flags. Pass this as the format to gralloc_alloc, and then the concrete
+    // format can be found in cb_handle_t.format. It is invalid for
+    // cb_handle_t.format to be AUTO; it must be a concrete format in either the
+    // standard HAL_PIXEL_FORMAT enum or other values in this enum.
+    GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO = 0x100
+};
+
+//
 // Our buffer handle structure
 //
 struct cb_handle_t : public native_handle {
 
     cb_handle_t(int p_fd, int p_ashmemSize, int p_usage,
-                int p_width, int p_height,
+                int p_width, int p_height, int p_format,
                 int p_glFormat, int p_glType) :
         fd(p_fd),
         magic(BUFFER_HANDLE_MAGIC),
         usage(p_usage),
         width(p_width),
         height(p_height),
+        format(p_format),
         glFormat(p_glFormat),
         glType(p_glType),
         ashmemSize(p_ashmemSize),
-        ashmemBase(NULL),
+        ashmemBase(0),
         ashmemBasePid(0),
         mappedPid(0),
         lockedLeft(0),
@@ -88,6 +101,7 @@
     int usage;              // usage bits the buffer was created with
     int width;              // buffer width
     int height;             // buffer height
+    int format;             // real internal pixel format format
     int glFormat;           // OpenGL format enum used for host h/w color buffer
     int glType;             // OpenGL type enum used when uploading to host
     int ashmemSize;         // ashmem region size for the buffer (0 unless is HW_FB buffer or
diff --git a/tools/emulator/opengl/system/egl/egl.cpp b/tools/emulator/opengl/system/egl/egl.cpp
index ee195ac..da89c4d 100644
--- a/tools/emulator/opengl/system/egl/egl.cpp
+++ b/tools/emulator/opengl/system/egl/egl.cpp
@@ -263,10 +263,9 @@
 
 EGLBoolean egl_window_surface_t::init()
 {
-    if (nativeWindow->dequeueBuffer(nativeWindow, &buffer) != NO_ERROR) {
+    if (nativeWindow->dequeueBuffer_DEPRECATED(nativeWindow, &buffer) != NO_ERROR) {
         setErrorReturn(EGL_BAD_ALLOC, EGL_FALSE);
     }
-    nativeWindow->lockBuffer(nativeWindow, buffer);
 
     DEFINE_AND_VALIDATE_HOST_CONNECTION(EGL_FALSE);
     rcSurface = rcEnc->rcCreateWindowSurface(rcEnc, (uint32_t)config,
@@ -300,7 +299,7 @@
         rcEnc->rcDestroyWindowSurface(rcEnc, rcSurface);
     }
     if (buffer) {
-        nativeWindow->cancelBuffer(nativeWindow, buffer);
+        nativeWindow->cancelBuffer_DEPRECATED(nativeWindow, buffer);
     }
     nativeWindow->common.decRef(&nativeWindow->common);
 }
@@ -316,12 +315,11 @@
 
     rcEnc->rcFlushWindowColorBuffer(rcEnc, rcSurface);
 
-    nativeWindow->queueBuffer(nativeWindow, buffer);
-    if (nativeWindow->dequeueBuffer(nativeWindow, &buffer)) {
+    nativeWindow->queueBuffer_DEPRECATED(nativeWindow, buffer);
+    if (nativeWindow->dequeueBuffer_DEPRECATED(nativeWindow, &buffer)) {
         buffer = NULL;
         setErrorReturn(EGL_BAD_ALLOC, EGL_FALSE);
     }
-    nativeWindow->lockBuffer(nativeWindow, buffer);
 
     rcEnc->rcSetWindowColorBuffer(rcEnc, rcSurface,
             ((cb_handle_t *)(buffer->handle))->hostHandle);
@@ -1148,7 +1146,9 @@
     if (native_buffer->common.version != sizeof(android_native_buffer_t))
         setErrorReturn(EGL_BAD_PARAMETER, EGL_NO_IMAGE_KHR);
 
-    switch (native_buffer->format) {
+    cb_handle_t *cb = (cb_handle_t *)(native_buffer->handle);
+
+    switch (cb->format) {
         case HAL_PIXEL_FORMAT_RGBA_8888:
         case HAL_PIXEL_FORMAT_RGBX_8888:
         case HAL_PIXEL_FORMAT_RGB_888:
diff --git a/tools/emulator/opengl/system/gralloc/gralloc.cpp b/tools/emulator/opengl/system/gralloc/gralloc.cpp
index 4334835..dd5e515 100644
--- a/tools/emulator/opengl/system/gralloc/gralloc.cpp
+++ b/tools/emulator/opengl/system/gralloc/gralloc.cpp
@@ -133,8 +133,11 @@
     D("gralloc_alloc w=%d h=%d usage=0x%x\n", w, h, usage);
 
     gralloc_device_t *grdev = (gralloc_device_t *)dev;
-    if (!grdev || !pHandle || !pStride)
+    if (!grdev || !pHandle || !pStride) {
+        ALOGE("gralloc_alloc: Bad inputs (grdev: %p, pHandle: %p, pStride: %p",
+                grdev, pHandle, pStride);
         return -EINVAL;
+    }
 
     //
     // Validate usage: buffer cannot be written both by s/w and h/w access.
@@ -142,9 +145,38 @@
     bool sw_write = (0 != (usage & GRALLOC_USAGE_SW_WRITE_MASK));
     bool hw_write = (usage & GRALLOC_USAGE_HW_RENDER);
     if (hw_write && sw_write) {
+        ALOGE("gralloc_alloc: Mismatched usage flags: %d x %d, usage %x",
+                w, h, usage);
         return -EINVAL;
     }
     bool sw_read = (0 != (usage & GRALLOC_USAGE_SW_READ_MASK));
+    bool hw_cam_write = usage & GRALLOC_USAGE_HW_CAMERA_WRITE;
+    bool hw_cam_read = usage & GRALLOC_USAGE_HW_CAMERA_READ;
+    bool hw_vid_enc_read = usage & GRALLOC_USAGE_HW_VIDEO_ENCODER;
+
+    // Pick the right concrete pixel format given the endpoints as encoded in
+    // the usage bits.  Every end-point pair needs explicit listing here.
+    if (format == GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO) {
+        // Camera as producer
+        if (usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
+            if (usage & GRALLOC_USAGE_HW_TEXTURE) {
+                // Camera-to-display is RGBA
+                format = HAL_PIXEL_FORMAT_RGBA_8888;
+            } else if (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
+                // Camera-to-encoder is NV21
+                format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+            }
+        }
+
+        if (format == GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO) {
+            ALOGE("gralloc_alloc: Requested auto format selection, "
+                    "but no known format for this usage: %d x %d, usage %x",
+                    w, h, usage);
+            return -EINVAL;
+        }
+    }
+
+    bool yuv_format = false;
 
     int ashmem_size = 0;
     int stride = w;
@@ -185,14 +217,31 @@
         case HAL_PIXEL_FORMAT_RAW_SENSOR:
             bpp = 2;
             align = 16*bpp;
-            if (! (sw_read && sw_write) ) {
-                // Raw sensor data cannot be used by HW
+            if (! ((sw_read && hw_cam_write) || (sw_write && hw_cam_read) ) ) {
+                // Raw sensor data only goes to/from camera to CPU
                 return -EINVAL;
             }
+            // Not expecting to actually create any GL surfaces for this
             glFormat = GL_LUMINANCE;
             glType = GL_UNSIGNED_SHORT;
             break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            bpp = 1;
+            if (! (sw_read && hw_cam_write) ) {
+                // Blob data cannot be used by HW other than camera emulator
+                return -EINVAL;
+            }
+            // Not expecting to actually create any GL surfaces for this
+            glFormat = GL_LUMINANCE;
+            glType = GL_UNSIGNED_BYTE;
+            break;
+        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+            bpp = 0; // Chroma-subsampled format has fractional bpp
+            yuv_format = true;
+            // Not expecting to actually create any GL surfaces for this
+            break;
         default:
+            ALOGE("gralloc_alloc: Unknown format %d", format);
             return -EINVAL;
     }
 
@@ -201,16 +250,22 @@
         ashmem_size += sizeof(uint32_t);
     }
 
-    if (sw_read || sw_write) {
+    if (sw_read || sw_write || hw_cam_write || hw_vid_enc_read) {
         // keep space for image on guest memory if SW access is needed
-
-        size_t bpr = (w*bpp + (align-1)) & ~(align-1);
-        ashmem_size += (bpr * h);
-        stride = bpr / bpp;
+        // or if the camera is doing writing
+        if (yuv_format) {
+            // For NV21
+            ashmem_size += w * h * 3 / 2;
+            stride = w;
+        } else {
+            size_t bpr = (w*bpp + (align-1)) & ~(align-1);
+            ashmem_size += (bpr * h);
+            stride = bpr / bpp;
+        }
     }
 
-    D("gralloc_alloc ashmem_size=%d, stride=%d, tid %d\n", ashmem_size, stride,
-            gettid());
+    D("gralloc_alloc format=%d, ashmem_size=%d, stride=%d, tid %d\n", format,
+            ashmem_size, stride, gettid());
 
     //
     // Allocate space in ashmem if needed
@@ -229,7 +284,7 @@
     }
 
     cb_handle_t *cb = new cb_handle_t(fd, ashmem_size, usage,
-                                      w, h, glFormat, glType);
+                                      w, h, format, glFormat, glType);
 
     if (ashmem_size > 0) {
         //
@@ -248,8 +303,11 @@
 
     //
     // Allocate ColorBuffer handle on the host (only if h/w access is allowed)
+    // Only do this for some h/w usages, not all.
     //
-    if (usage & GRALLOC_USAGE_HW_MASK) {
+    if (usage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
+                    GRALLOC_USAGE_HW_2D | GRALLOC_USAGE_HW_COMPOSER |
+                    GRALLOC_USAGE_HW_FB) ) {
         DEFINE_HOST_CONNECTION;
         if (hostCon && rcEnc) {
             cb->hostHandle = rcEnc->rcCreateColorBuffer(rcEnc, w, h, glFormat);
@@ -542,14 +600,17 @@
     bool sw_write = (0 != (usage & GRALLOC_USAGE_SW_WRITE_MASK));
     bool hw_read = (usage & GRALLOC_USAGE_HW_TEXTURE);
     bool hw_write = (usage & GRALLOC_USAGE_HW_RENDER);
+    bool hw_vid_enc_read = (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER);
+    bool hw_cam_write = (usage & GRALLOC_USAGE_HW_CAMERA_WRITE);
     bool sw_read_allowed = (0 != (cb->usage & GRALLOC_USAGE_SW_READ_MASK));
     bool sw_write_allowed = (0 != (cb->usage & GRALLOC_USAGE_SW_WRITE_MASK));
 
     if ( (hw_read || hw_write) ||
-         (!sw_read && !sw_write) ||
+         (!sw_read && !sw_write && !hw_cam_write && !hw_vid_enc_read) ||
          (sw_read && !sw_read_allowed) ||
          (sw_write && !sw_write_allowed) ) {
-        ALOGE("gralloc_lock usage mismatch usage=0x%x cb->usage=0x%x\n", usage, cb->usage);
+        ALOGE("gralloc_lock usage mismatch usage=0x%x cb->usage=0x%x\n", usage,
+                cb->usage);
         return -EINVAL;
     }
 
@@ -559,7 +620,7 @@
     //
     // make sure ashmem area is mapped if needed
     //
-    if (cb->canBePosted() || sw_read || sw_write) {
+    if (cb->canBePosted() || sw_read || sw_write || hw_cam_write || hw_vid_enc_read) {
         if (cb->ashmemBasePid != getpid() || !cb->ashmemBase) {
             return -EACCES;
         }
@@ -596,11 +657,11 @@
     //
     // is virtual address required ?
     //
-    if (sw_read || sw_write) {
+    if (sw_read || sw_write || hw_cam_write || hw_vid_enc_read) {
         *vaddr = cpu_addr;
     }
 
-    if (sw_write) {
+    if (sw_write || hw_cam_write) {
         //
         // Keep locked region if locked for s/w write access.
         //
@@ -610,6 +671,9 @@
         cb->lockedHeight = h;
     }
 
+    DD("gralloc_lock success. vaddr: %p, *vaddr: %p, usage: %x, cpu_addr: %p",
+            vaddr, vaddr ? *vaddr : 0, usage, cpu_addr);
+
     return 0;
 }
 
diff --git a/tools/emulator/system/camera/Android.mk b/tools/emulator/system/camera/Android.mk
index c51f621..3843c1d 100755
--- a/tools/emulator/system/camera/Android.mk
+++ b/tools/emulator/system/camera/Android.mk
@@ -33,11 +33,13 @@
 	libjpeg \
 	libskia \
 	libandroid_runtime \
+	libcamera_metadata
 
 LOCAL_C_INCLUDES += external/jpeg \
 	external/skia/include/core/ \
 	frameworks/native/include/media/hardware \
 	frameworks/base/core/jni/android/graphics \
+	$(LOCAL_PATH)/../../opengl/system/OpenglSystemCommon \
 	$(call include-path-for, camera)
 
 LOCAL_SRC_FILES := \
@@ -57,7 +59,11 @@
 	JpegCompressor.cpp \
     EmulatedCamera2.cpp \
 	EmulatedFakeCamera2.cpp \
-	EmulatedQemuCamera2.cpp
+	EmulatedQemuCamera2.cpp \
+	fake-pipeline2/Scene.cpp \
+	fake-pipeline2/Sensor.cpp \
+	fake-pipeline2/JpegCompressor.cpp
+
 
 ifeq ($(TARGET_PRODUCT),vbox_x86)
 LOCAL_MODULE := camera.vbox_x86
diff --git a/tools/emulator/system/camera/EmulatedCamera2.cpp b/tools/emulator/system/camera/EmulatedCamera2.cpp
index f7672f4..fa7ee4d 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera2.cpp
@@ -22,7 +22,7 @@
  * for all camera API calls that defined by camera2_device_ops_t API.
  */
 
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera2_Camera"
 #include <cutils/log.h>
 
@@ -48,36 +48,10 @@
     ops = &sDeviceOps;
     priv = this;
 
-    mRequestQueueDstOps.notify_queue_not_empty =
-            EmulatedCamera2::request_queue_notify_queue_not_empty;
-    mRequestQueueDstOps.parent                 = this;
+    mNotifyCb = NULL;
 
-    mRequestQueueDstOps.notify_queue_not_empty =
-            EmulatedCamera2::reprocess_queue_notify_queue_not_empty;
-    mReprocessQueueDstOps.parent               = this;
-
-    mFrameQueueSrcOps.buffer_count = EmulatedCamera2::frame_queue_buffer_count;
-    mFrameQueueSrcOps.dequeue      = EmulatedCamera2::frame_queue_dequeue;
-    mFrameQueueSrcOps.free         = EmulatedCamera2::frame_queue_free;
-    mFrameQueueSrcOps.parent       = this;
-
-    mReprocessStreamOps.dequeue_buffer =
-            EmulatedCamera2::reprocess_stream_dequeue_buffer;
-    mReprocessStreamOps.enqueue_buffer =
-            EmulatedCamera2::reprocess_stream_enqueue_buffer;
-    mReprocessStreamOps.cancel_buffer =
-            EmulatedCamera2::reprocess_stream_cancel_buffer;
-    mReprocessStreamOps.set_buffer_count =
-            EmulatedCamera2::reprocess_stream_set_buffer_count;
-    mReprocessStreamOps.set_crop = EmulatedCamera2::reprocess_stream_set_crop;
-    mReprocessStreamOps.set_timestamp =
-            EmulatedCamera2::reprocess_stream_set_timestamp;
-    mReprocessStreamOps.set_usage = EmulatedCamera2::reprocess_stream_set_usage;
-    mReprocessStreamOps.get_min_undequeued_buffer_count =
-            EmulatedCamera2::reprocess_stream_get_min_undequeued_buffer_count;
-    mReprocessStreamOps.lock_buffer =
-            EmulatedCamera2::reprocess_stream_lock_buffer;
-    mReprocessStreamOps.parent   = this;
+    mRequestQueueSrc = NULL;
+    mFrameQueueDst = NULL;
 
     mVendorTagOps.get_camera_vendor_section_name =
             EmulatedCamera2::get_camera_vendor_section_name;
@@ -109,6 +83,7 @@
  ***************************************************************************/
 
 status_t EmulatedCamera2::connectCamera(hw_device_t** device) {
+    *device = &common;
     return NO_ERROR;
 }
 
@@ -117,126 +92,85 @@
 }
 
 status_t EmulatedCamera2::getCameraInfo(struct camera_info* info) {
-
     return EmulatedBaseCamera::getCameraInfo(info);
 }
 
 /****************************************************************************
- * Camera API implementation.
+ * Camera Device API implementation.
  * These methods are called from the camera API callback routines.
  ***************************************************************************/
 
 /** Request input queue */
 
-int EmulatedCamera2::setRequestQueueSrcOps(
-    camera2_metadata_queue_src_ops *request_queue_src_ops) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::requestQueueNotifyNotEmpty() {
-    return NO_ERROR;
-}
-
-/** Reprocessing input queue */
-
-int EmulatedCamera2::setReprocessQueueSrcOps(
-    camera2_metadata_queue_src_ops *reprocess_queue_src_ops) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessQueueNotifyNotEmpty() {
-    return NO_ERROR;
-}
-
-/** Frame output queue */
-
-int EmulatedCamera2::setFrameQueueDstOps(camera2_metadata_queue_dst_ops *frame_queue_dst_ops) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::frameQueueBufferCount() {
-    return NO_ERROR;
-}
-int EmulatedCamera2::frameQueueDequeue(camera_metadata_t **buffer) {
-    return NO_ERROR;
-}
-int EmulatedCamera2::frameQueueFree(camera_metadata_t *old_buffer) {
-    return NO_ERROR;
-}
-
-/** Notifications to application */
-int EmulatedCamera2::setNotifyCallback(camera2_notify_callback notify_cb) {
-    return NO_ERROR;
+int EmulatedCamera2::requestQueueNotify() {
+    return INVALID_OPERATION;
 }
 
 /** Count of requests in flight */
 int EmulatedCamera2::getInProgressCount() {
-    return NO_ERROR;
+    return INVALID_OPERATION;
 }
 
 /** Cancel all captures in flight */
 int EmulatedCamera2::flushCapturesInProgress() {
-    return NO_ERROR;
+    return INVALID_OPERATION;
 }
 
-/** Reprocessing input stream management */
-int EmulatedCamera2::reprocessStreamDequeueBuffer(buffer_handle_t** buffer,
-        int *stride) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamEnqueueBuffer(buffer_handle_t* buffer) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamCancelBuffer(buffer_handle_t* buffer) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetBufferCount(int count) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetCrop(int left, int top, int right, int bottom) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetTimestamp(int64_t timestamp) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetUsage(int usage) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamSetSwapInterval(int interval) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamGetMinUndequeuedBufferCount(int *count) {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::reprocessStreamLockBuffer(buffer_handle_t *buffer) {
-    return NO_ERROR;
+/** Construct a default request for a given use case */
+int EmulatedCamera2::constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request) {
+    return INVALID_OPERATION;
 }
 
 /** Output stream creation and management */
 
-int EmulatedCamera2::getStreamSlotCount() {
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::allocateStream(uint32_t stream_slot,
+int EmulatedCamera2::allocateStream(
         uint32_t width,
         uint32_t height,
         int format,
-        camera2_stream_ops_t *stream_ops) {
-    return NO_ERROR;
+        const camera2_stream_ops_t *stream_ops,
+        uint32_t *stream_id,
+        uint32_t *format_actual,
+        uint32_t *usage,
+        uint32_t *max_buffers) {
+    return INVALID_OPERATION;
 }
 
-int EmulatedCamera2::releaseStream(uint32_t stream_slot) {
-    return NO_ERROR;
+int EmulatedCamera2::registerStreamBuffers(
+        uint32_t stream_id,
+        int num_buffers,
+        buffer_handle_t *buffers) {
+    return INVALID_OPERATION;
+}
+
+
+int EmulatedCamera2::releaseStream(uint32_t stream_id) {
+    return INVALID_OPERATION;
+}
+
+/** Reprocessing input stream management */
+
+int EmulatedCamera2::allocateReprocessStream(
+        uint32_t width,
+        uint32_t height,
+        uint32_t format,
+        const camera2_stream_in_ops_t *reprocess_stream_ops,
+        uint32_t *stream_id,
+        uint32_t *consumer_usage,
+        uint32_t *max_buffers) {
+    return INVALID_OPERATION;
+}
+
+int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
+    return INVALID_OPERATION;
+}
+
+/** 3A triggering */
+
+int EmulatedCamera2::triggerAction(uint32_t trigger_id,
+                                   int ext1, int ext2) {
+    return INVALID_OPERATION;
 }
 
 /** Custom tag query methods */
@@ -253,14 +187,10 @@
     return -1;
 }
 
-/** Shutdown and debug methods */
-
-int EmulatedCamera2::release() {
-    return NO_ERROR;
-}
+/** Debug methods */
 
 int EmulatedCamera2::dump(int fd) {
-    return NO_ERROR;
+    return INVALID_OPERATION;
 }
 
 /****************************************************************************
@@ -272,214 +202,118 @@
  * hardware/libhardware/include/hardware/camera2.h for information on each
  * of these callbacks. Implemented in this class, these callbacks simply
  * dispatch the call into an instance of EmulatedCamera2 class defined by the
- * 'camera_device2' parameter.
+ * 'camera_device2' parameter, or set a member value in the same.
  ***************************************************************************/
 
-int EmulatedCamera2::set_request_queue_src_ops(struct camera2_device *d,
-        camera2_metadata_queue_src_ops *queue_src_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setRequestQueueSrcOps(queue_src_ops);
+EmulatedCamera2* getInstance(const camera2_device_t *d) {
+    const EmulatedCamera2* cec = static_cast<const EmulatedCamera2*>(d);
+    return const_cast<EmulatedCamera2*>(cec);
 }
 
-int EmulatedCamera2::get_request_queue_dst_ops(struct camera2_device *d,
-        camera2_metadata_queue_dst_ops **queue_dst_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *queue_dst_ops = static_cast<camera2_metadata_queue_dst_ops*>(
-        &ec->mRequestQueueDstOps);
+int EmulatedCamera2::set_request_queue_src_ops(const camera2_device_t *d,
+        const camera2_request_queue_src_ops *queue_src_ops) {
+    EmulatedCamera2* ec = getInstance(d);
+    ec->mRequestQueueSrc = queue_src_ops;
     return NO_ERROR;
 }
 
-int EmulatedCamera2::request_queue_notify_queue_not_empty(
-        camera2_metadata_queue_dst_ops *q) {
-    EmulatedCamera2* ec = static_cast<QueueDstOps*>(q)->parent;
-    return ec->requestQueueNotifyNotEmpty();
+int EmulatedCamera2::notify_request_queue_not_empty(const camera2_device_t *d) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->requestQueueNotify();
 }
 
-int EmulatedCamera2::set_reprocess_queue_src_ops(struct camera2_device *d,
-        camera2_metadata_queue_src_ops *queue_src_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setReprocessQueueSrcOps(queue_src_ops);
-}
-
-int EmulatedCamera2::get_reprocess_queue_dst_ops(struct camera2_device *d,
-        camera2_metadata_queue_dst_ops **queue_dst_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *queue_dst_ops = static_cast<camera2_metadata_queue_dst_ops*>(
-        &ec->mReprocessQueueDstOps);
+int EmulatedCamera2::set_frame_queue_dst_ops(const camera2_device_t *d,
+        const camera2_frame_queue_dst_ops *queue_dst_ops) {
+    EmulatedCamera2* ec = getInstance(d);
+    ec->mFrameQueueDst = queue_dst_ops;
     return NO_ERROR;
 }
 
-int EmulatedCamera2::reprocess_queue_notify_queue_not_empty(
-        camera2_metadata_queue_dst_ops *q) {
-    EmulatedCamera2* ec = static_cast<QueueDstOps*>(q)->parent;
-    return ec->reprocessQueueNotifyNotEmpty();
-}
-
-int EmulatedCamera2::set_frame_queue_dst_ops(struct camera2_device *d,
-        camera2_metadata_queue_dst_ops *queue_dst_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setFrameQueueDstOps(queue_dst_ops);
-}
-
-int EmulatedCamera2::get_frame_queue_src_ops(struct camera2_device *d,
-        camera2_metadata_queue_src_ops **queue_src_ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *queue_src_ops = static_cast<camera2_metadata_queue_src_ops*>(
-        &ec->mFrameQueueSrcOps);
-    return NO_ERROR;
-}
-
-int EmulatedCamera2::frame_queue_buffer_count(camera2_metadata_queue_src_ops *q) {
-    EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
-    return ec->frameQueueBufferCount();
-}
-
-int EmulatedCamera2::frame_queue_dequeue(camera2_metadata_queue_src_ops *q,
-        camera_metadata_t **buffer) {
-    EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
-    return ec->frameQueueDequeue(buffer);
-}
-
-int EmulatedCamera2::frame_queue_free(camera2_metadata_queue_src_ops *q,
-        camera_metadata_t *old_buffer) {
-    EmulatedCamera2 *ec = static_cast<QueueSrcOps*>(q)->parent;
-    return ec->frameQueueFree(old_buffer);
-}
-
-int EmulatedCamera2::set_notify_callback(struct camera2_device *d,
-        camera2_notify_callback notify_cb) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    return ec->setNotifyCallback(notify_cb);
-}
-
-int EmulatedCamera2::get_in_progress_count(struct camera2_device *d) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+int EmulatedCamera2::get_in_progress_count(const camera2_device_t *d) {
+    EmulatedCamera2* ec = getInstance(d);
     return ec->getInProgressCount();
 }
 
-int EmulatedCamera2::flush_captures_in_progress(struct camera2_device *d) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+int EmulatedCamera2::flush_captures_in_progress(const camera2_device_t *d) {
+    EmulatedCamera2* ec = getInstance(d);
     return ec->flushCapturesInProgress();
 }
 
-int EmulatedCamera2::get_reprocess_stream_ops(camera2_device_t *d,
-        camera2_stream_ops **stream) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
-    *stream = static_cast<camera2_stream_ops*>(&ec->mReprocessStreamOps);
-    return NO_ERROR;
+int EmulatedCamera2::construct_default_request(const camera2_device_t *d,
+        int request_template,
+        camera_metadata_t **request) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->constructDefaultRequest(request_template, request);
 }
 
-int EmulatedCamera2::reprocess_stream_dequeue_buffer(camera2_stream_ops *s,
-        buffer_handle_t** buffer, int *stride) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamDequeueBuffer(buffer, stride);
+int EmulatedCamera2::allocate_stream(const camera2_device_t *d,
+        uint32_t width,
+        uint32_t height,
+        int format,
+        const camera2_stream_ops_t *stream_ops,
+        uint32_t *stream_id,
+        uint32_t *format_actual,
+        uint32_t *usage,
+        uint32_t *max_buffers) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->allocateStream(width, height, format, stream_ops,
+            stream_id, format_actual, usage, max_buffers);
 }
 
-int EmulatedCamera2::reprocess_stream_enqueue_buffer(camera2_stream_ops *s,
-        buffer_handle_t* buffer) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamEnqueueBuffer(buffer);
+int EmulatedCamera2::register_stream_buffers(const camera2_device_t *d,
+        uint32_t stream_id,
+        int num_buffers,
+        buffer_handle_t *buffers) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->registerStreamBuffers(stream_id,
+            num_buffers,
+            buffers);
+}
+int EmulatedCamera2::release_stream(const camera2_device_t *d,
+        uint32_t stream_id) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->releaseStream(stream_id);
 }
 
-int EmulatedCamera2::reprocess_stream_cancel_buffer(camera2_stream_ops *s,
-        buffer_handle_t* buffer) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamCancelBuffer(buffer);
-}
-
-int EmulatedCamera2::reprocess_stream_set_buffer_count(camera2_stream_ops *s,
-        int count) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetBufferCount(count);
-}
-
-int EmulatedCamera2::reprocess_stream_set_crop(camera2_stream_ops *s,
-        int left, int top, int right, int bottom) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetCrop(left, top, right, bottom);
-}
-
-int EmulatedCamera2::reprocess_stream_set_timestamp(camera2_stream_ops *s,
-        int64_t timestamp) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetTimestamp(timestamp);
-}
-
-int EmulatedCamera2::reprocess_stream_set_usage(camera2_stream_ops *s,
-        int usage) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetUsage(usage);
-}
-
-int EmulatedCamera2::reprocess_stream_set_swap_interval(camera2_stream_ops *s,
-        int interval) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamSetSwapInterval(interval);
-}
-
-int EmulatedCamera2::reprocess_stream_get_min_undequeued_buffer_count(
-        const camera2_stream_ops *s,
-        int *count) {
-    EmulatedCamera2* ec = static_cast<const StreamOps*>(s)->parent;
-    return ec->reprocessStreamGetMinUndequeuedBufferCount(count);
-}
-
-int EmulatedCamera2::reprocess_stream_lock_buffer(camera2_stream_ops *s,
-        buffer_handle_t* buffer) {
-    EmulatedCamera2* ec = static_cast<StreamOps*>(s)->parent;
-    return ec->reprocessStreamLockBuffer(buffer);
-}
-
-int EmulatedCamera2::get_stream_slot_count(struct camera2_device *d) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->getStreamSlotCount();
-}
-
-int EmulatedCamera2::allocate_stream(struct camera2_device *d,
-        uint32_t stream_slot,
+int EmulatedCamera2::allocate_reprocess_stream(const camera2_device_t *d,
         uint32_t width,
         uint32_t height,
         uint32_t format,
-        camera2_stream_ops_t *stream_ops) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->allocateStream(stream_slot, width, height, format, stream_ops);
+        const camera2_stream_in_ops_t *reprocess_stream_ops,
+        uint32_t *stream_id,
+        uint32_t *consumer_usage,
+        uint32_t *max_buffers) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->allocateReprocessStream(width, height, format,
+            reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
 }
 
-int EmulatedCamera2::release_stream(struct camera2_device *d,
-        uint32_t stream_slot) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->releaseStream(stream_slot);
+int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
+        uint32_t stream_id) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->releaseReprocessStream(stream_id);
 }
 
-void EmulatedCamera2::release(struct camera2_device *d) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    ec->release();
+int EmulatedCamera2::trigger_action(const camera2_device_t *d,
+        uint32_t trigger_id,
+        int ext1,
+        int ext2) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->triggerAction(trigger_id, ext1, ext2);
 }
 
-int EmulatedCamera2::dump(struct camera2_device *d, int fd) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(d);
-    return ec->dump(fd);
+int EmulatedCamera2::set_notify_callback(const camera2_device_t *d,
+        camera2_notify_callback notify_cb, void* user) {
+    EmulatedCamera2* ec = getInstance(d);
+    Mutex::Autolock l(ec->mMutex);
+    ec->mNotifyCb = notify_cb;
+    ec->mNotifyUserPtr = user;
+    return NO_ERROR;
 }
 
-int EmulatedCamera2::close(struct hw_device_t* device) {
-    EmulatedCamera2* ec =
-            static_cast<EmulatedCamera2*>(
-                reinterpret_cast<struct camera2_device*>(device) );
-    if (ec == NULL) {
-        ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
-        return -EINVAL;
-    }
-    return ec->closeCamera();
-}
-
-int EmulatedCamera2::get_metadata_vendor_tag_ops(struct camera2_device *d,
+int EmulatedCamera2::get_metadata_vendor_tag_ops(const camera2_device_t *d,
         vendor_tag_query_ops_t **ops) {
-    EmulatedCamera2* ec = static_cast<EmulatedCamera2*>(d);
+    EmulatedCamera2* ec = getInstance(d);
     *ops = static_cast<vendor_tag_query_ops_t*>(
             &ec->mVendorTagOps);
     return NO_ERROR;
@@ -506,22 +340,49 @@
     return ec->getVendorTagType(tag);
 }
 
+int EmulatedCamera2::dump(const camera2_device_t *d, int fd) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->dump(fd);
+}
+
+int EmulatedCamera2::close(struct hw_device_t* device) {
+    EmulatedCamera2* ec =
+            static_cast<EmulatedCamera2*>(
+                reinterpret_cast<camera2_device_t*>(device) );
+    if (ec == NULL) {
+        ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
+        return -EINVAL;
+    }
+    return ec->closeCamera();
+}
+
+void EmulatedCamera2::sendNotification(int32_t msgType,
+        int32_t ext1, int32_t ext2, int32_t ext3) {
+    camera2_notify_callback notifyCb;
+    {
+        Mutex::Autolock l(mMutex);
+        notifyCb = mNotifyCb;
+    }
+    if (notifyCb != NULL) {
+        notifyCb(msgType, ext1, ext2, ext3, mNotifyUserPtr);
+    }
+}
+
 camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
     EmulatedCamera2::set_request_queue_src_ops,
-    EmulatedCamera2::get_request_queue_dst_ops,
-    EmulatedCamera2::set_reprocess_queue_src_ops,
-    EmulatedCamera2::get_reprocess_queue_dst_ops,
+    EmulatedCamera2::notify_request_queue_not_empty,
     EmulatedCamera2::set_frame_queue_dst_ops,
-    EmulatedCamera2::get_frame_queue_src_ops,
-    EmulatedCamera2::set_notify_callback,
     EmulatedCamera2::get_in_progress_count,
     EmulatedCamera2::flush_captures_in_progress,
-    EmulatedCamera2::get_reprocess_stream_ops,
-    EmulatedCamera2::get_stream_slot_count,
+    EmulatedCamera2::construct_default_request,
     EmulatedCamera2::allocate_stream,
+    EmulatedCamera2::register_stream_buffers,
     EmulatedCamera2::release_stream,
+    EmulatedCamera2::allocate_reprocess_stream,
+    EmulatedCamera2::release_reprocess_stream,
+    EmulatedCamera2::trigger_action,
+    EmulatedCamera2::set_notify_callback,
     EmulatedCamera2::get_metadata_vendor_tag_ops,
-    EmulatedCamera2::release,
     EmulatedCamera2::dump
 };
 
diff --git a/tools/emulator/system/camera/EmulatedCamera2.h b/tools/emulator/system/camera/EmulatedCamera2.h
index feeadf9..a294454 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.h
+++ b/tools/emulator/system/camera/EmulatedCamera2.h
@@ -28,6 +28,8 @@
 #include "hardware/camera2.h"
 #include "system/camera_metadata.h"
 #include "EmulatedBaseCamera.h"
+#include <utils/Thread.h>
+#include <utils/Mutex.h>
 
 namespace android {
 
@@ -67,7 +69,7 @@
     virtual status_t Initialize();
 
     /****************************************************************************
-     * Camera API implementation
+     * Camera module API and generic hardware device API implementation
      ***************************************************************************/
 
 public:
@@ -75,7 +77,7 @@
 
     virtual status_t closeCamera();
 
-    virtual status_t getCameraInfo(struct camera_info* info);
+    virtual status_t getCameraInfo(struct camera_info* info) = 0;
 
     /****************************************************************************
      * Camera API implementation.
@@ -83,177 +85,134 @@
      ***************************************************************************/
 
 protected:
-    /** Request input queue */
-
-    int setRequestQueueSrcOps(
-        camera2_metadata_queue_src_ops *request_queue_src_ops);
-
-    int requestQueueNotifyNotEmpty();
-
-    /** Reprocessing input queue */
-
-    int setReprocessQueueSrcOps(
-        camera2_metadata_queue_src_ops *reprocess_queue_src_ops);
-
-    int reprocessQueueNotifyNotEmpty();
-
-    /** Frame output queue */
-
-    int setFrameQueueDstOps(camera2_metadata_queue_dst_ops *frame_queue_dst_ops);
-
-    int frameQueueBufferCount();
-    int frameQueueDequeue(camera_metadata_t **buffer);
-    int frameQueueFree(camera_metadata_t *old_buffer);
-
-    /** Notifications to application */
-    int setNotifyCallback(camera2_notify_callback notify_cb);
+    /** Request input queue notification */
+    virtual int requestQueueNotify();
 
     /** Count of requests in flight */
-    int getInProgressCount();
+    virtual int getInProgressCount();
 
     /** Cancel all captures in flight */
-    int flushCapturesInProgress();
+    virtual int flushCapturesInProgress();
 
-    /** Reprocessing input stream management */
-    int reprocessStreamDequeueBuffer(buffer_handle_t** buffer,
-            int *stride);
-
-    int reprocessStreamEnqueueBuffer(buffer_handle_t* buffer);
-
-    int reprocessStreamCancelBuffer(buffer_handle_t* buffer);
-
-    int reprocessStreamSetBufferCount(int count);
-
-    int reprocessStreamSetCrop(int left, int top, int right, int bottom);
-
-    int reprocessStreamSetTimestamp(int64_t timestamp);
-
-    int reprocessStreamSetUsage(int usage);
-
-    int reprocessStreamSetSwapInterval(int interval);
-
-    int reprocessStreamGetMinUndequeuedBufferCount(int *count);
-
-    int reprocessStreamLockBuffer(buffer_handle_t *buffer);
+    virtual int constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request);
 
     /** Output stream creation and management */
-
-    int getStreamSlotCount();
-
-    int allocateStream(uint32_t stream_slot,
+    virtual int allocateStream(
             uint32_t width,
             uint32_t height,
             int format,
-            camera2_stream_ops_t *stream_ops);
+            const camera2_stream_ops_t *stream_ops,
+            uint32_t *stream_id,
+            uint32_t *format_actual,
+            uint32_t *usage,
+            uint32_t *max_buffers);
 
-    int releaseStream(uint32_t stream_slot);
+    virtual int registerStreamBuffers(
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers);
+
+    virtual int releaseStream(uint32_t stream_id);
+
+    /** Input stream creation and management */
+    virtual int allocateReprocessStream(
+            uint32_t width,
+            uint32_t height,
+            uint32_t format,
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id,
+            uint32_t *consumer_usage,
+            uint32_t *max_buffers);
+
+    virtual int releaseReprocessStream(uint32_t stream_id);
+
+    /** 3A action triggering */
+    virtual int triggerAction(uint32_t trigger_id,
+            int32_t ext1, int32_t ext2);
 
     /** Custom tag definitions */
-    const char* getVendorSectionName(uint32_t tag);
-    const char* getVendorTagName(uint32_t tag);
-    int         getVendorTagType(uint32_t tag);
+    virtual const char* getVendorSectionName(uint32_t tag);
+    virtual const char* getVendorTagName(uint32_t tag);
+    virtual int         getVendorTagType(uint32_t tag);
 
-    /** Shutdown and debug methods */
+    /** Debug methods */
 
-    int release();
-
-    int dump(int fd);
-
-    int close();
+    virtual int dump(int fd);
 
     /****************************************************************************
      * Camera API callbacks as defined by camera2_device_ops structure.  See
      * hardware/libhardware/include/hardware/camera2.h for information on each
      * of these callbacks. Implemented in this class, these callbacks simply
-     * dispatch the call into an instance of EmulatedCamera2 class defined in the
-     * 'camera_device2' parameter.
+     * dispatch the call into an instance of EmulatedCamera2 class defined in
+     * the 'camera_device2' parameter.
      ***************************************************************************/
 
 private:
     /** Input request queue */
-    static int set_request_queue_src_ops(camera2_device_t *,
-            camera2_metadata_queue_src_ops *queue_src_ops);
-    static int get_request_queue_dst_ops(camera2_device_t *,
-            camera2_metadata_queue_dst_ops **queue_dst_ops);
-    // for get_request_queue_dst_ops
-    static int request_queue_notify_queue_not_empty(
-        camera2_metadata_queue_dst_ops *);
-
-    /** Input reprocess queue */
-    static int set_reprocess_queue_src_ops(camera2_device_t *,
-            camera2_metadata_queue_src_ops *reprocess_queue_src_ops);
-    static int get_reprocess_queue_dst_ops(camera2_device_t *,
-            camera2_metadata_queue_dst_ops **queue_dst_ops);
-    // for reprocess_queue_dst_ops
-    static int reprocess_queue_notify_queue_not_empty(
-            camera2_metadata_queue_dst_ops *);
+    static int set_request_queue_src_ops(const camera2_device_t *,
+            const camera2_request_queue_src_ops *queue_src_ops);
+    static int notify_request_queue_not_empty(const camera2_device_t *);
 
     /** Output frame queue */
-    static int set_frame_queue_dst_ops(camera2_device_t *,
-            camera2_metadata_queue_dst_ops *queue_dst_ops);
-    static int get_frame_queue_src_ops(camera2_device_t *,
-            camera2_metadata_queue_src_ops **queue_src_ops);
-    // for get_frame_queue_src_ops
-    static int frame_queue_buffer_count(camera2_metadata_queue_src_ops *);
-    static int frame_queue_dequeue(camera2_metadata_queue_src_ops *,
-            camera_metadata_t **buffer);
-    static int frame_queue_free(camera2_metadata_queue_src_ops *,
-            camera_metadata_t *old_buffer);
-
-    /** Notifications to application */
-    static int set_notify_callback(camera2_device_t *,
-            camera2_notify_callback notify_cb);
+    static int set_frame_queue_dst_ops(const camera2_device_t *,
+            const camera2_frame_queue_dst_ops *queue_dst_ops);
 
     /** In-progress request management */
-    static int get_in_progress_count(camera2_device_t *);
+    static int get_in_progress_count(const camera2_device_t *);
 
-    static int flush_captures_in_progress(camera2_device_t *);
+    static int flush_captures_in_progress(const camera2_device_t *);
 
-    /** Input reprocessing stream */
-    static int get_reprocess_stream_ops(camera2_device_t *,
-            camera2_stream_ops_t **stream);
-    // for get_reprocess_stream_ops
-    static int reprocess_stream_dequeue_buffer(camera2_stream_ops *,
-            buffer_handle_t** buffer, int *stride);
-    static int reprocess_stream_enqueue_buffer(camera2_stream_ops *,
-            buffer_handle_t* buffer);
-    static int reprocess_stream_cancel_buffer(camera2_stream_ops *,
-            buffer_handle_t* buffer);
-    static int reprocess_stream_set_buffer_count(camera2_stream_ops *,
-            int count);
-    static int reprocess_stream_set_crop(camera2_stream_ops *,
-            int left, int top, int right, int bottom);
-    static int reprocess_stream_set_timestamp(camera2_stream_ops *,
-            int64_t timestamp);
-    static int reprocess_stream_set_usage(camera2_stream_ops *,
-            int usage);
-    static int reprocess_stream_set_swap_interval(camera2_stream_ops *,
-            int interval);
-    static int reprocess_stream_get_min_undequeued_buffer_count(
-            const camera2_stream_ops *,
-            int *count);
-    static int reprocess_stream_lock_buffer(camera2_stream_ops *,
-            buffer_handle_t* buffer);
+    /** Request template creation */
+    static int construct_default_request(const camera2_device_t *,
+            int request_template,
+            camera_metadata_t **request);
 
-    /** Output stream allocation and management */
+    /** Stream management */
+    static int allocate_stream(const camera2_device_t *,
+            uint32_t width,
+            uint32_t height,
+            int format,
+            const camera2_stream_ops_t *stream_ops,
+            uint32_t *stream_id,
+            uint32_t *format_actual,
+            uint32_t *usage,
+            uint32_t *max_buffers);
 
-    static int get_stream_slot_count(camera2_device_t *);
+    static int register_stream_buffers(const camera2_device_t *,
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers);
 
-    static int allocate_stream(camera2_device_t *,
-            uint32_t stream_slot,
+    static int release_stream(const camera2_device_t *,
+            uint32_t stream_id);
+
+    static int allocate_reprocess_stream(const camera2_device_t *,
             uint32_t width,
             uint32_t height,
             uint32_t format,
-            camera2_stream_ops_t *stream_ops);
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id,
+            uint32_t *consumer_usage,
+            uint32_t *max_buffers);
 
-    static int release_stream(camera2_device_t *,
-            uint32_t stream_slot);
+    static int release_reprocess_stream(const camera2_device_t *,
+            uint32_t stream_id);
 
-    static void release(camera2_device_t *);
+    /** 3A triggers*/
+    static int trigger_action(const camera2_device_t *,
+            uint32_t trigger_id,
+            int ext1,
+            int ext2);
+
+    /** Notifications to application */
+    static int set_notify_callback(const camera2_device_t *,
+            camera2_notify_callback notify_cb,
+            void *user);
 
     /** Vendor metadata registration */
-
-    static int get_metadata_vendor_tag_ops(camera2_device_t *,
+    static int get_metadata_vendor_tag_ops(const camera2_device_t *,
             vendor_tag_query_ops_t **ops);
     // for get_metadata_vendor_tag_ops
     static const char* get_camera_vendor_section_name(
@@ -266,38 +225,36 @@
             const vendor_tag_query_ops_t *,
             uint32_t tag);
 
-    static int dump(camera2_device_t *, int fd);
+    static int dump(const camera2_device_t *, int fd);
 
+    /** For hw_device_t ops */
     static int close(struct hw_device_t* device);
 
     /****************************************************************************
-     * Data members
+     * Data members shared with implementations
      ***************************************************************************/
+  protected:
+    /** Mutex for calls through camera2 device interface */
+    Mutex mMutex;
 
-  private:
-    static camera2_device_ops_t sDeviceOps;
-
-    struct QueueDstOps : public camera2_metadata_queue_dst_ops {
-        EmulatedCamera2 *parent;
-    };
-
-    struct QueueSrcOps : public camera2_metadata_queue_src_ops {
-        EmulatedCamera2 *parent;
-    };
-
-    struct StreamOps : public camera2_stream_ops {
-        EmulatedCamera2 *parent;
-    };
+    const camera2_request_queue_src_ops *mRequestQueueSrc;
+    const camera2_frame_queue_dst_ops *mFrameQueueDst;
 
     struct TagOps : public vendor_tag_query_ops {
         EmulatedCamera2 *parent;
     };
-
-    QueueDstOps mRequestQueueDstOps;
-    QueueDstOps mReprocessQueueDstOps;
-    QueueSrcOps mFrameQueueSrcOps;
-    StreamOps   mReprocessStreamOps;
     TagOps      mVendorTagOps;
+
+    void sendNotification(int32_t msgType,
+            int32_t ext1, int32_t ext2, int32_t ext3);
+
+    /****************************************************************************
+     * Data members
+     ***************************************************************************/
+  private:
+    static camera2_device_ops_t sDeviceOps;
+    camera2_notify_callback mNotifyCb;
+    void* mNotifyUserPtr;
 };
 
 }; /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedCameraFactory.cpp b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
index 2960751..84248ca 100755
--- a/tools/emulator/system/camera/EmulatedCameraFactory.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
@@ -19,7 +19,7 @@
  * available for emulation.
  */
 
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera_Factory"
 #include <cutils/log.h>
 #include <cutils/properties.h>
@@ -44,6 +44,7 @@
           mFakeCameraNum(0),
           mConstructedOK(false)
 {
+    status_t res;
     /* Connect to the factory service in the emulator, and create Qemu cameras. */
     if (mQemuClient.connectClient(NULL) == NO_ERROR) {
         /* Connection has succeeded. Create emulated cameras for each camera
@@ -75,11 +76,13 @@
         switch (getBackCameraHalVersion()) {
             case 1:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera(camera_id, true,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             case 2:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera2(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera2(camera_id, true,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             default:
                 ALOGE("%s: Unknown back camera hal version requested: %d", __FUNCTION__,
@@ -88,12 +91,15 @@
         if (mEmulatedCameras[camera_id] != NULL) {
             ALOGV("%s: Back camera device version is %d", __FUNCTION__,
                     getBackCameraHalVersion());
-            if (mEmulatedCameras[camera_id]->Initialize() != NO_ERROR) {
+            res = mEmulatedCameras[camera_id]->Initialize();
+            if (res != NO_ERROR) {
+                ALOGE("%s: Unable to intialize back camera %d: %s (%d)",
+                        __FUNCTION__, camera_id, strerror(-res), res);
                 delete mEmulatedCameras[camera_id];
-                mEmulatedCameras--;
+                mEmulatedCameraNum--;
             }
         } else {
-            mEmulatedCameras--;
+            mEmulatedCameraNum--;
             ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
         }
     }
@@ -121,25 +127,31 @@
         switch (getFrontCameraHalVersion()) {
             case 1:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera(camera_id, false,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             case 2:
                 mEmulatedCameras[camera_id] =
-                        new EmulatedFakeCamera2(camera_id, false, &HAL_MODULE_INFO_SYM.common);
+                        new EmulatedFakeCamera2(camera_id, false,
+                                &HAL_MODULE_INFO_SYM.common);
                 break;
             default:
-                ALOGE("%s: Unknown front camera hal version requested: %d", __FUNCTION__,
+                ALOGE("%s: Unknown front camera hal version requested: %d",
+                        __FUNCTION__,
                         getFrontCameraHalVersion());
         }
         if (mEmulatedCameras[camera_id] != NULL) {
             ALOGV("%s: Front camera device version is %d", __FUNCTION__,
                     getFrontCameraHalVersion());
-            if (mEmulatedCameras[camera_id]->Initialize() != NO_ERROR) {
+            res = mEmulatedCameras[camera_id]->Initialize();
+            if (res != NO_ERROR) {
+                ALOGE("%s: Unable to intialize front camera %d: %s (%d)",
+                        __FUNCTION__, camera_id, strerror(-res), res);
                 delete mEmulatedCameras[camera_id];
-                mEmulatedCameras--;
+                mEmulatedCameraNum--;
             }
         } else {
-            mEmulatedCameras--;
+            mEmulatedCameraNum--;
             ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
         }
     }
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
index aa62244..d630db4 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
@@ -19,15 +19,65 @@
  * functionality of an advanced fake camera.
  */
 
-#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera_FakeCamera2"
-#include <cutils/log.h>
-#include <cutils/properties.h>
+#include <utils/Log.h>
+
 #include "EmulatedFakeCamera2.h"
 #include "EmulatedCameraFactory.h"
+#include <ui/Rect.h>
+#include <ui/GraphicBufferMapper.h>
+#include "gralloc_cb.h"
 
 namespace android {
 
+const uint32_t EmulatedFakeCamera2::kAvailableFormats[5] = {
+        HAL_PIXEL_FORMAT_RAW_SENSOR,
+        HAL_PIXEL_FORMAT_BLOB,
+        HAL_PIXEL_FORMAT_RGBA_8888,
+        HAL_PIXEL_FORMAT_YV12,
+        HAL_PIXEL_FORMAT_YCrCb_420_SP
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
+    640, 480
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
+    Sensor::kFrameDurationRange[0]
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
+    640, 480, 320, 240
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
+    320, 240, 160, 120
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
+    Sensor::kFrameDurationRange[0]
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
+    640, 480
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
+    320, 240
+    //    Sensor::kResolution[0], Sensor::kResolution[1]
+};
+
+
+const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
+    Sensor::kFrameDurationRange[0]
+};
+
+
 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
         bool facingBack,
         struct hw_module_t* module)
@@ -38,17 +88,2128 @@
             facingBack ? "back" : "front");
 }
 
-EmulatedFakeCamera2::~EmulatedFakeCamera2()
-{
+EmulatedFakeCamera2::~EmulatedFakeCamera2() {
+    if (mCameraInfo != NULL) {
+        free_camera_metadata(mCameraInfo);
+    }
 }
 
 /****************************************************************************
  * Public API overrides
  ***************************************************************************/
 
-status_t EmulatedFakeCamera2::Initialize()
-{
+status_t EmulatedFakeCamera2::Initialize() {
+    status_t res;
+
+    set_camera_metadata_vendor_tag_ops(
+            static_cast<vendor_tag_query_ops_t*>(&mVendorTagOps));
+
+    res = constructStaticInfo(&mCameraInfo, true);
+    if (res != OK) {
+        ALOGE("%s: Unable to allocate static info: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    res = constructStaticInfo(&mCameraInfo, false);
+    if (res != OK) {
+        ALOGE("%s: Unable to fill in static info: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    if (res != OK) return res;
+
+    mNextStreamId = 0;
+    mRawStreamCount = 0;
+    mProcessedStreamCount = 0;
+    mJpegStreamCount = 0;
+
     return NO_ERROR;
 }
 
+/****************************************************************************
+ * Camera module API overrides
+ ***************************************************************************/
+
+status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
+    status_t res;
+    ALOGV("%s", __FUNCTION__);
+
+    mConfigureThread = new ConfigureThread(this);
+    mReadoutThread = new ReadoutThread(this);
+    mControlThread = new ControlThread(this);
+    mSensor = new Sensor(this);
+    mJpegCompressor = new JpegCompressor(this);
+
+    mNextStreamId = 0;
+
+    res = mSensor->startUp();
+    if (res != NO_ERROR) return res;
+
+    res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
+    if (res != NO_ERROR) return res;
+
+    res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
+    if (res != NO_ERROR) return res;
+
+    res = mControlThread->run("EmulatedFakeCamera2::controlThread");
+    if (res != NO_ERROR) return res;
+
+    return EmulatedCamera2::connectCamera(device);
+}
+
+status_t EmulatedFakeCamera2::closeCamera() {
+    Mutex::Autolock l(mMutex);
+
+    status_t res;
+    ALOGV("%s", __FUNCTION__);
+
+    res = mSensor->shutDown();
+    if (res != NO_ERROR) {
+        ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
+        return res;
+    }
+
+    mConfigureThread->requestExit();
+    mReadoutThread->requestExit();
+    mControlThread->requestExit();
+    mJpegCompressor->cancel();
+
+    mConfigureThread->join();
+    mReadoutThread->join();
+    mControlThread->join();
+
+    ALOGV("%s exit", __FUNCTION__);
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
+    info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
+    info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
+    return EmulatedCamera2::getCameraInfo(info);
+}
+
+/****************************************************************************
+ * Camera device API overrides
+ ***************************************************************************/
+
+/** Request input queue */
+
+int EmulatedFakeCamera2::requestQueueNotify() {
+    ALOGV("Request queue notification received");
+
+    ALOG_ASSERT(mRequestQueueSrc != NULL,
+            "%s: Request queue src not set, but received queue notification!",
+            __FUNCTION__);
+    ALOG_ASSERT(mFrameQueueDst != NULL,
+            "%s: Request queue src not set, but received queue notification!",
+            __FUNCTION__);
+    ALOG_ASSERT(mStreams.size() != 0,
+            "%s: No streams allocated, but received queue notification!",
+            __FUNCTION__);
+    return mConfigureThread->newRequestAvailable();
+}
+
+int EmulatedFakeCamera2::getInProgressCount() {
+    Mutex::Autolock l(mMutex);
+
+    int requestCount = 0;
+    requestCount += mConfigureThread->getInProgressCount();
+    requestCount += mReadoutThread->getInProgressCount();
+    requestCount += mJpegCompressor->isBusy() ? 1 : 0;
+
+    return requestCount;
+}
+
+int EmulatedFakeCamera2::constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request) {
+
+    if (request == NULL) return BAD_VALUE;
+    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
+        return BAD_VALUE;
+    }
+    status_t res;
+    // Pass 1, calculate size and allocate
+    res = constructDefaultRequest(request_template,
+            request,
+            true);
+    if (res != OK) {
+        return res;
+    }
+    // Pass 2, build request
+    res = constructDefaultRequest(request_template,
+            request,
+            false);
+    if (res != OK) {
+        ALOGE("Unable to populate new request for template %d",
+                request_template);
+    }
+
+    return res;
+}
+
+int EmulatedFakeCamera2::allocateStream(
+        uint32_t width,
+        uint32_t height,
+        int format,
+        const camera2_stream_ops_t *stream_ops,
+        uint32_t *stream_id,
+        uint32_t *format_actual,
+        uint32_t *usage,
+        uint32_t *max_buffers) {
+    Mutex::Autolock l(mMutex);
+
+    if (format != CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) {
+        unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
+        unsigned int formatIdx = 0;
+        unsigned int sizeOffsetIdx = 0;
+        for (; formatIdx < numFormats; formatIdx++) {
+            if (format == (int)kAvailableFormats[formatIdx]) break;
+        }
+        if (formatIdx == numFormats) {
+            ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
+            return BAD_VALUE;
+        }
+    } else {
+        // Translate to emulator's magic format.
+        // Note: It is assumed that this is a processed format (not raw or JPEG).
+        format = GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO;
+    }
+
+    const uint32_t *availableSizes;
+    size_t availableSizeCount;
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            availableSizes = kAvailableRawSizes;
+            availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            availableSizes = mFacingBack ?
+                    kAvailableJpegSizesBack : kAvailableJpegSizesFront;
+            availableSizeCount = mFacingBack ?
+                    sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t) :
+                    sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t);
+            break;
+        case GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO:
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        case HAL_PIXEL_FORMAT_YV12:
+        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+            availableSizes = mFacingBack ?
+                    kAvailableProcessedSizesBack : kAvailableProcessedSizesFront;
+            availableSizeCount = mFacingBack ?
+                    sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t) :
+                    sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t);
+            break;
+        default:
+            ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
+            return BAD_VALUE;
+    }
+
+    unsigned int resIdx = 0;
+    for (; resIdx < availableSizeCount; resIdx++) {
+        if (availableSizes[resIdx * 2] == width &&
+                availableSizes[resIdx * 2 + 1] == height) break;
+    }
+    if (resIdx == availableSizeCount) {
+        ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
+                format, width, height);
+        return BAD_VALUE;
+    }
+
+    switch (format) {
+        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            if (mRawStreamCount >= kMaxRawStreamCount) {
+                ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
+                        __FUNCTION__, mRawStreamCount);
+                return INVALID_OPERATION;
+            }
+            mRawStreamCount++;
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            if (mJpegStreamCount >= kMaxJpegStreamCount) {
+                ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
+                        __FUNCTION__, mJpegStreamCount);
+                return INVALID_OPERATION;
+            }
+            mJpegStreamCount++;
+            break;
+        default:
+            if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
+                ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
+                        __FUNCTION__, mProcessedStreamCount);
+                return INVALID_OPERATION;
+            }
+            mProcessedStreamCount++;
+    }
+
+    Stream newStream;
+    newStream.ops = stream_ops;
+    newStream.width = width;
+    newStream.height = height;
+    newStream.format = format;
+    // TODO: Query stride from gralloc
+    newStream.stride = width;
+
+    mStreams.add(mNextStreamId, newStream);
+
+    *stream_id = mNextStreamId;
+    if (format_actual) *format_actual = format;
+    *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
+    *max_buffers = 4;
+
+    ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
+            *stream_id, width, height, format, *usage, *max_buffers);
+
+    mNextStreamId++;
+    return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::registerStreamBuffers(
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers) {
+    Mutex::Autolock l(mMutex);
+
+    ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
+            stream_id, num_buffers);
+    // Need to find out what the final concrete pixel format for our stream is
+    // Assumes that all buffers have the same format.
+    if (num_buffers < 1) {
+        ALOGE("%s: Stream %d only has %d buffers!",
+                __FUNCTION__, stream_id, num_buffers);
+        return BAD_VALUE;
+    }
+    const cb_handle_t *streamBuffer =
+            reinterpret_cast<const cb_handle_t*>(buffers[0]);
+
+    int finalFormat = streamBuffer->format;
+
+    if (finalFormat == GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO) {
+        ALOGE("%s: Stream %d: Bad final pixel format "
+                "GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO; "
+                "concrete pixel format required!", __FUNCTION__, stream_id);
+        return BAD_VALUE;
+    }
+
+    ssize_t streamIndex = mStreams.indexOfKey(stream_id);
+    if (streamIndex < 0) {
+        ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
+        return BAD_VALUE;
+    }
+
+    Stream &stream = mStreams.editValueAt(streamIndex);
+
+    ALOGV("%s: Stream %d format set to %x, previously %x",
+            __FUNCTION__, stream_id, finalFormat, stream.format);
+
+    stream.format = finalFormat;
+
+    return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
+    Mutex::Autolock l(mMutex);
+
+    ssize_t streamIndex = mStreams.indexOfKey(stream_id);
+    if (streamIndex < 0) {
+        ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
+        return BAD_VALUE;
+    }
+
+    if (isStreamInUse(stream_id)) {
+        ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
+                stream_id);
+        return BAD_VALUE;
+    }
+
+    switch(mStreams.valueAt(streamIndex).format) {
+        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            mRawStreamCount--;
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            mJpegStreamCount--;
+            break;
+        default:
+            mProcessedStreamCount--;
+            break;
+    }
+
+    mStreams.removeItemsAt(streamIndex);
+
+    return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
+        int32_t ext1,
+        int32_t ext2) {
+    Mutex::Autolock l(mMutex);
+    return mControlThread->triggerAction(trigger_id,
+            ext1, ext2);
+}
+
+/** Custom tag definitions */
+
+// Emulator camera metadata sections
+enum {
+    EMULATOR_SCENE = VENDOR_SECTION,
+    END_EMULATOR_SECTIONS
+};
+
+enum {
+    EMULATOR_SCENE_START = EMULATOR_SCENE << 16,
+};
+
+// Emulator camera metadata tags
+enum {
+    // Hour of day to use for lighting calculations (0-23). Default: 12
+    EMULATOR_SCENE_HOUROFDAY = EMULATOR_SCENE_START,
+    EMULATOR_SCENE_END
+};
+
+unsigned int emulator_metadata_section_bounds[END_EMULATOR_SECTIONS -
+        VENDOR_SECTION][2] = {
+    { EMULATOR_SCENE_START, EMULATOR_SCENE_END }
+};
+
+const char *emulator_metadata_section_names[END_EMULATOR_SECTIONS -
+        VENDOR_SECTION] = {
+    "com.android.emulator.scene"
+};
+
+typedef struct emulator_tag_info {
+    const char *tag_name;
+    uint8_t     tag_type;
+} emulator_tag_info_t;
+
+emulator_tag_info_t emulator_scene[EMULATOR_SCENE_END - EMULATOR_SCENE_START] = {
+    { "hourOfDay", TYPE_INT32 }
+};
+
+emulator_tag_info_t *tag_info[END_EMULATOR_SECTIONS -
+        VENDOR_SECTION] = {
+    emulator_scene
+};
+
+const char* EmulatedFakeCamera2::getVendorSectionName(uint32_t tag) {
+    ALOGV("%s", __FUNCTION__);
+    uint32_t section = tag >> 16;
+    if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
+    return emulator_metadata_section_names[section - VENDOR_SECTION];
+}
+
+const char* EmulatedFakeCamera2::getVendorTagName(uint32_t tag) {
+    ALOGV("%s", __FUNCTION__);
+    uint32_t section = tag >> 16;
+    if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
+    uint32_t section_index = section - VENDOR_SECTION;
+    if (tag >= emulator_metadata_section_bounds[section_index][1]) {
+        return NULL;
+    }
+    uint32_t tag_index = tag & 0xFFFF;
+    return tag_info[section_index][tag_index].tag_name;
+}
+
+int EmulatedFakeCamera2::getVendorTagType(uint32_t tag) {
+    ALOGV("%s", __FUNCTION__);
+    uint32_t section = tag >> 16;
+    if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return -1;
+    uint32_t section_index = section - VENDOR_SECTION;
+    if (tag >= emulator_metadata_section_bounds[section_index][1]) {
+        return -1;
+    }
+    uint32_t tag_index = tag & 0xFFFF;
+    return tag_info[section_index][tag_index].tag_type;
+}
+
+/** Shutdown and debug methods */
+
+int EmulatedFakeCamera2::dump(int fd) {
+    String8 result;
+
+    result.appendFormat("    Camera HAL device: EmulatedFakeCamera2\n");
+    result.appendFormat("      Streams:\n");
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        int id = mStreams.keyAt(i);
+        const Stream& s = mStreams.valueAt(i);
+        result.appendFormat(
+            "         Stream %d: %d x %d, format 0x%x, stride %d\n",
+            id, s.width, s.height, s.format, s.stride);
+    }
+
+    write(fd, result.string(), result.size());
+
+    return NO_ERROR;
+}
+
+void EmulatedFakeCamera2::signalError() {
+    // TODO: Let parent know so we can shut down cleanly
+    ALOGE("Worker thread is signaling a serious error");
+}
+
+/** Pipeline control worker thread methods */
+
+EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent),
+        mRequestCount(0),
+        mNextBuffers(NULL) {
+    mRunning = false;
+}
+
+EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
+    Mutex::Autolock lock(mInputMutex);
+
+    ALOGV("Starting up ConfigureThread");
+    mRequest = NULL;
+    mActive  = false;
+    mRunning = true;
+
+    mInputSignal.signal();
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
+    Mutex::Autolock lock(mInputMutex);
+    if (!mRunning) {
+        ALOGV("Waiting for configure thread to start");
+        mInputSignal.wait(mInputMutex);
+    }
+    return OK;
+}
+
+status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
+    waitUntilRunning();
+
+    Mutex::Autolock lock(mInputMutex);
+
+    mActive = true;
+    mInputSignal.signal();
+
+    return OK;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
+    Mutex::Autolock lock(mInternalsMutex);
+
+    if (mNextBuffers == NULL) return false;
+    for (size_t i=0; i < mNextBuffers->size(); i++) {
+        if ((*mNextBuffers)[i].streamId == (int)id) return true;
+    }
+    return false;
+}
+
+int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
+    Mutex::Autolock lock(mInputMutex);
+    return mRequestCount;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
+    static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
+    status_t res;
+
+    // Check if we're currently processing or just waiting
+    {
+        Mutex::Autolock lock(mInputMutex);
+        if (!mActive) {
+            // Inactive, keep waiting until we've been signaled
+            status_t res;
+            res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
+            if (res != NO_ERROR && res != TIMED_OUT) {
+                ALOGE("%s: Error waiting for input requests: %d",
+                        __FUNCTION__, res);
+                return false;
+            }
+            if (!mActive) return true;
+            ALOGV("New request available");
+        }
+        // Active
+    }
+    if (mRequest == NULL) {
+        Mutex::Autolock il(mInternalsMutex);
+
+        ALOGV("Getting next request");
+        res = mParent->mRequestQueueSrc->dequeue_request(
+            mParent->mRequestQueueSrc,
+            &mRequest);
+        if (res != NO_ERROR) {
+            ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
+            mParent->signalError();
+            return false;
+        }
+        if (mRequest == NULL) {
+            ALOGV("Request queue empty, going inactive");
+            // No requests available, go into inactive mode
+            Mutex::Autolock lock(mInputMutex);
+            mActive = false;
+            return true;
+        } else {
+            Mutex::Autolock lock(mInputMutex);
+            mRequestCount++;
+        }
+        // Get necessary parameters for sensor config
+
+        mParent->mControlThread->processRequest(mRequest);
+
+        camera_metadata_entry_t streams;
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_REQUEST_OUTPUT_STREAMS,
+                &streams);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading output stream tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+
+        mNextBuffers = new Buffers;
+        mNextNeedsJpeg = false;
+        ALOGV("Setting up buffers for capture");
+        for (size_t i = 0; i < streams.count; i++) {
+            int streamId = streams.data.u8[i];
+            const Stream &s = mParent->getStreamInfo(streamId);
+            if (s.format == GRALLOC_EMULATOR_PIXEL_FORMAT_AUTO) {
+                ALOGE("%s: Stream %d does not have a concrete pixel format, but "
+                        "is included in a request!", __FUNCTION__, streamId);
+                mParent->signalError();
+                return false;
+            }
+            StreamBuffer b;
+            b.streamId = streams.data.u8[i];
+            b.width  = s.width;
+            b.height = s.height;
+            b.format = s.format;
+            b.stride = s.stride;
+            mNextBuffers->push_back(b);
+            ALOGV("  Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
+                    i, b.streamId, b.width, b.height, b.format, b.stride);
+            if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+                mNextNeedsJpeg = true;
+            }
+        }
+
+        camera_metadata_entry_t e;
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_REQUEST_FRAME_COUNT,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading frame count tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        mNextFrameNumber = *e.data.i32;
+
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_EXPOSURE_TIME,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading exposure time tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        mNextExposureTime = *e.data.i64;
+
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_FRAME_DURATION,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading frame duration tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        mNextFrameDuration = *e.data.i64;
+
+        if (mNextFrameDuration <
+                mNextExposureTime + Sensor::kMinVerticalBlank) {
+            mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
+        }
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_SENSITIVITY,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        mNextSensitivity = *e.data.i32;
+
+        res = find_camera_metadata_entry(mRequest,
+                EMULATOR_SCENE_HOUROFDAY,
+                &e);
+        if (res == NO_ERROR) {
+            ALOGV("Setting hour: %d", *e.data.i32);
+            mParent->mSensor->getScene().setHour(*e.data.i32);
+        }
+
+        // Start waiting on sensor or JPEG block
+        if (mNextNeedsJpeg) {
+            ALOGV("Waiting for JPEG compressor");
+        } else {
+            ALOGV("Waiting for sensor");
+        }
+    }
+
+    if (mNextNeedsJpeg) {
+        bool jpegDone;
+        jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
+        if (!jpegDone) return true;
+
+        ALOGV("Waiting for sensor");
+        mNextNeedsJpeg = false;
+    }
+    bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
+
+    if (!vsync) return true;
+
+    Mutex::Autolock il(mInternalsMutex);
+    ALOGV("Configuring sensor for frame %d", mNextFrameNumber);
+    mParent->mSensor->setExposureTime(mNextExposureTime);
+    mParent->mSensor->setFrameDuration(mNextFrameDuration);
+    mParent->mSensor->setSensitivity(mNextSensitivity);
+
+    /** Get buffers to fill for this frame */
+    for (size_t i = 0; i < mNextBuffers->size(); i++) {
+        StreamBuffer &b = mNextBuffers->editItemAt(i);
+
+        Stream s = mParent->getStreamInfo(b.streamId);
+
+        res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
+        if (res != NO_ERROR || b.buffer == NULL) {
+            ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
+                    __FUNCTION__, b.streamId, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+
+        /* Lock the buffer from the perspective of the graphics mapper */
+        uint8_t *img;
+        const Rect rect(s.width, s.height);
+
+        res = GraphicBufferMapper::get().lock(*(b.buffer),
+                GRALLOC_USAGE_HW_CAMERA_WRITE,
+                rect, (void**)&(b.img) );
+
+        if (res != NO_ERROR) {
+            ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            s.ops->cancel_buffer(s.ops,
+                    b.buffer);
+            mParent->signalError();
+            return false;
+        }
+    }
+
+    mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffers);
+    mParent->mSensor->setDestinationBuffers(mNextBuffers);
+
+    mRequest = NULL;
+    mNextBuffers = NULL;
+
+    Mutex::Autolock lock(mInputMutex);
+    mRequestCount--;
+
+    return true;
+}
+
+EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent),
+        mRunning(false),
+        mActive(false),
+        mRequestCount(0),
+        mRequest(NULL),
+        mBuffers(NULL) {
+    mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
+    mInFlightHead = 0;
+    mInFlightTail = 0;
+}
+
+EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
+    delete mInFlightQueue;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
+    Mutex::Autolock lock(mInputMutex);
+    ALOGV("Starting up ReadoutThread");
+    mRunning = true;
+    mInputSignal.signal();
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
+    Mutex::Autolock lock(mInputMutex);
+    if (!mRunning) {
+        ALOGV("Waiting for readout thread to start");
+        mInputSignal.wait(mInputMutex);
+    }
+    return OK;
+}
+
+void EmulatedFakeCamera2::ReadoutThread::setNextCapture(
+        camera_metadata_t *request,
+        Buffers *buffers) {
+    Mutex::Autolock lock(mInputMutex);
+    if ( (mInFlightTail + 1) % kInFlightQueueSize == mInFlightHead) {
+        ALOGE("In flight queue full, dropping captures");
+        mParent->signalError();
+        return;
+    }
+    mInFlightQueue[mInFlightTail].request = request;
+    mInFlightQueue[mInFlightTail].buffers = buffers;
+    mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
+    mRequestCount++;
+
+    if (!mActive) {
+        mActive = true;
+        mInputSignal.signal();
+    }
+}
+
+bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
+    Mutex::Autolock lock(mInputMutex);
+
+    size_t i = mInFlightHead;
+    while (i != mInFlightTail) {
+        for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
+            if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
+                return true;
+        }
+        i = (i + 1) % kInFlightQueueSize;
+    }
+
+    Mutex::Autolock iLock(mInternalsMutex);
+
+    if (mBuffers != NULL) {
+        for (i = 0; i < mBuffers->size(); i++) {
+            if ( (*mBuffers)[i].streamId == (int)id) return true;
+        }
+    }
+
+    return false;
+}
+
+int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
+    Mutex::Autolock lock(mInputMutex);
+
+    return mRequestCount;
+}
+
+bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
+    static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
+    status_t res;
+
+    // Check if we're currently processing or just waiting
+    {
+        Mutex::Autolock lock(mInputMutex);
+        if (!mActive) {
+            // Inactive, keep waiting until we've been signaled
+            res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
+            if (res != NO_ERROR && res != TIMED_OUT) {
+                ALOGE("%s: Error waiting for capture requests: %d",
+                        __FUNCTION__, res);
+                mParent->signalError();
+                return false;
+            }
+            if (!mActive) return true;
+        }
+        // Active, see if we need a new request
+        if (mRequest == NULL) {
+            if (mInFlightHead == mInFlightTail) {
+                // Go inactive
+                ALOGV("Waiting for sensor data");
+                mActive = false;
+                return true;
+            } else {
+                Mutex::Autolock iLock(mInternalsMutex);
+                mRequest = mInFlightQueue[mInFlightHead].request;
+                mBuffers  = mInFlightQueue[mInFlightHead].buffers;
+                mInFlightQueue[mInFlightHead].request = NULL;
+                mInFlightQueue[mInFlightHead].buffers = NULL;
+                mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
+                ALOGV("Ready to read out request %p, %d buffers",
+                        mRequest, mBuffers->size());
+            }
+        }
+    }
+
+    // Active with request, wait on sensor to complete
+
+    nsecs_t captureTime;
+
+    bool gotFrame;
+    gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
+            &captureTime);
+
+    if (!gotFrame) return true;
+
+    // Got sensor data, construct frame and send it out
+    ALOGV("Readout: Constructing metadata and frames");
+    Mutex::Autolock iLock(mInternalsMutex);
+
+    camera_metadata_entry_t metadataMode;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_METADATA_MODE,
+            &metadataMode);
+
+    if (*metadataMode.data.u8 == ANDROID_REQUEST_METADATA_FULL) {
+        ALOGV("Metadata requested, constructing");
+
+        camera_metadata_t *frame = NULL;
+
+        size_t frame_entries = get_camera_metadata_entry_count(mRequest);
+        size_t frame_data    = get_camera_metadata_data_count(mRequest);
+
+        // TODO: Dynamically calculate based on enabled statistics, etc
+        frame_entries += 10;
+        frame_data += 100;
+
+        res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
+                frame_entries, frame_data, &frame);
+
+        if (res != NO_ERROR || frame == NULL) {
+            ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+
+        res = append_camera_metadata(frame, mRequest);
+        if (res != NO_ERROR) {
+            ALOGE("Unable to append request metadata");
+        }
+
+        add_camera_metadata_entry(frame,
+                ANDROID_SENSOR_TIMESTAMP,
+                &captureTime,
+                1);
+
+        int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
+        camera_metadata_entry_t requestedHour;
+        res = find_camera_metadata_entry(frame,
+                EMULATOR_SCENE_HOUROFDAY,
+                &requestedHour);
+        if (res == NAME_NOT_FOUND) {
+            ALOGV("Adding vendor tag");
+            res = add_camera_metadata_entry(frame,
+                    EMULATOR_SCENE_HOUROFDAY,
+                    &hourOfDay, 1);
+            if (res != NO_ERROR) {
+                ALOGE("Unable to add vendor tag");
+            }
+        } else if (res == OK) {
+            ALOGV("Replacing value in vendor tag");
+            *requestedHour.data.i32 = hourOfDay;
+        } else {
+            ALOGE("Error looking up vendor tag");
+        }
+
+        collectStatisticsMetadata(frame);
+        // TODO: Collect all final values used from sensor in addition to timestamp
+
+        mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
+                frame);
+    }
+
+    res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
+    if (res != NO_ERROR) {
+        ALOGE("%s: Unable to return request buffer to queue: %d",
+                __FUNCTION__, res);
+        mParent->signalError();
+        return false;
+    }
+    mRequest = NULL;
+
+    int compressedBufferIndex = -1;
+    ALOGV("Processing %d buffers", mBuffers->size());
+    for (size_t i = 0; i < mBuffers->size(); i++) {
+        const StreamBuffer &b = (*mBuffers)[i];
+        ALOGV("  Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
+                i, b.streamId, b.width, b.height, b.format, b.stride);
+        if (b.streamId >= 0) {
+            if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+                // Assumes only one BLOB buffer type per capture
+                compressedBufferIndex = i;
+            } else {
+                ALOGV("Sending image buffer %d to output stream %d",
+                        i, b.streamId);
+                GraphicBufferMapper::get().unlock(*(b.buffer));
+                const Stream &s = mParent->getStreamInfo(b.streamId);
+                res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
+                if (res != OK) {
+                    ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
+                            strerror(-res), res);
+                    mParent->signalError();
+                }
+            }
+        }
+    }
+
+    if (compressedBufferIndex == -1) {
+        delete mBuffers;
+        mBuffers = NULL;
+    } else {
+        ALOGV("Starting JPEG compression for buffer %d, stream %d",
+                compressedBufferIndex,
+                (*mBuffers)[compressedBufferIndex].streamId);
+        mParent->mJpegCompressor->start(mBuffers, captureTime);
+        mBuffers = NULL;
+    }
+
+    Mutex::Autolock l(mInputMutex);
+    mRequestCount--;
+
+    return true;
+}
+
+status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
+        camera_metadata_t *frame) {
+    // Completely fake face rectangles, don't correspond to real faces in scene
+    ALOGV("Collecting statistics metadata");
+
+    status_t res;
+    camera_metadata_entry_t entry;
+    res = find_camera_metadata_entry(frame,
+                ANDROID_STATS_FACE_DETECT_MODE,
+                &entry);
+    if (res != OK) {
+        ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (entry.data.u8[0] == ANDROID_STATS_FACE_DETECTION_OFF) return OK;
+
+    // The coordinate system for the face regions is the raw sensor pixel
+    // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
+    // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
+    // approximately place two faces on top of the windows of the house. No
+    // actual faces exist there, but might one day. Note that this doesn't
+    // account for the offsets used to account for aspect ratio differences, so
+    // the rectangles don't line up quite right.
+    const size_t numFaces = 2;
+    int32_t rects[numFaces * 4] = {
+            Sensor::kResolution[0] * 10 / 20,
+            Sensor::kResolution[1] * 15 / 20,
+            Sensor::kResolution[0] * 12 / 20,
+            Sensor::kResolution[1] * 17 / 20,
+
+            Sensor::kResolution[0] * 16 / 20,
+            Sensor::kResolution[1] * 15 / 20,
+            Sensor::kResolution[0] * 18 / 20,
+            Sensor::kResolution[1] * 17 / 20
+    };
+    // To simulate some kind of real detection going on, we jitter the rectangles on
+    // each frame by a few pixels in each dimension.
+    for (size_t i = 0; i < numFaces * 4; i++) {
+        rects[i] += (int32_t)(((float)rand() / RAND_MAX) * 6 - 3);
+    }
+    // The confidence scores (0-100) are similarly jittered.
+    uint8_t scores[numFaces] = { 85, 95 };
+    for (size_t i = 0; i < numFaces; i++) {
+        scores[i] += (int32_t)(((float)rand() / RAND_MAX) * 10 - 5);
+    }
+
+    res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_RECTANGLES,
+            rects, numFaces * 4);
+    if (res != OK) {
+        ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_SCORES,
+            scores, numFaces);
+    if (res != OK) {
+        ALOGE("%s: Unable to add face scores!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (entry.data.u8[0] == ANDROID_STATS_FACE_DETECTION_SIMPLE) return OK;
+
+    // Advanced face detection options - add eye/mouth coordinates.  The
+    // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
+    // mouthX, mouthY). The mapping is the same as the face rectangles.
+    int32_t features[numFaces * 6] = {
+        Sensor::kResolution[0] * 10.5 / 20,
+        Sensor::kResolution[1] * 16 / 20,
+        Sensor::kResolution[0] * 11.5 / 20,
+        Sensor::kResolution[1] * 16 / 20,
+        Sensor::kResolution[0] * 11 / 20,
+        Sensor::kResolution[1] * 16.5 / 20,
+
+        Sensor::kResolution[0] * 16.5 / 20,
+        Sensor::kResolution[1] * 16 / 20,
+        Sensor::kResolution[0] * 17.5 / 20,
+        Sensor::kResolution[1] * 16 / 20,
+        Sensor::kResolution[0] * 17 / 20,
+        Sensor::kResolution[1] * 16.5 / 20,
+    };
+    // Jitter these a bit less than the rects
+    for (size_t i = 0; i < numFaces * 6; i++) {
+        features[i] += (int32_t)(((float)rand() / RAND_MAX) * 4 - 2);
+    }
+    // These are unique IDs that are used to identify each face while it's
+    // visible to the detector (if a face went away and came back, it'd get a
+    // new ID).
+    int32_t ids[numFaces] = {
+        100, 200
+    };
+
+    res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_LANDMARKS,
+            features, numFaces * 6);
+    if (res != OK) {
+        ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    res = add_camera_metadata_entry(frame, ANDROID_STATS_FACE_IDS,
+            ids, numFaces);
+    if (res != OK) {
+        ALOGE("%s: Unable to add face scores!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    return OK;
+}
+
+EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent) {
+    mRunning = false;
+}
+
+EmulatedFakeCamera2::ControlThread::~ControlThread() {
+}
+
+status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
+    Mutex::Autolock lock(mInputMutex);
+
+    ALOGV("Starting up ControlThread");
+    mRunning = true;
+    mStartAf = false;
+    mCancelAf = false;
+    mStartPrecapture = false;
+
+    mControlMode = ANDROID_CONTROL_AUTO;
+
+    mEffectMode = ANDROID_CONTROL_EFFECT_OFF;
+    mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+
+    mAfMode = ANDROID_CONTROL_AF_AUTO;
+    mAfModeChange = false;
+
+    mAeMode = ANDROID_CONTROL_AE_ON;
+    mAwbMode = ANDROID_CONTROL_AWB_AUTO;
+
+    mAfTriggerId = 0;
+    mPrecaptureTriggerId = 0;
+
+    mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+    mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
+    mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+
+    mInputSignal.signal();
+    return NO_ERROR;
+}
+
+status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
+    Mutex::Autolock lock(mInputMutex);
+    if (!mRunning) {
+        ALOGV("Waiting for control thread to start");
+        mInputSignal.wait(mInputMutex);
+    }
+    return OK;
+}
+
+status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *request) {
+    Mutex::Autolock lock(mInputMutex);
+    // TODO: Add handling for all android.control.* fields here
+    camera_metadata_entry_t mode;
+    status_t res;
+
+    res = find_camera_metadata_entry(request,
+            ANDROID_CONTROL_MODE,
+            &mode);
+    mControlMode = mode.data.u8[0];
+
+    res = find_camera_metadata_entry(request,
+            ANDROID_CONTROL_EFFECT_MODE,
+            &mode);
+    mEffectMode = mode.data.u8[0];
+
+    res = find_camera_metadata_entry(request,
+            ANDROID_CONTROL_SCENE_MODE,
+            &mode);
+    mSceneMode = mode.data.u8[0];
+
+    res = find_camera_metadata_entry(request,
+            ANDROID_CONTROL_AF_MODE,
+            &mode);
+    if (mAfMode != mode.data.u8[0]) {
+        ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
+        mAfMode = mode.data.u8[0];
+        mAfModeChange = true;
+        mStartAf = false;
+        mCancelAf = false;
+    }
+
+    res = find_camera_metadata_entry(request,
+            ANDROID_CONTROL_AE_MODE,
+            &mode);
+    mAeMode = mode.data.u8[0];
+
+    res = find_camera_metadata_entry(request,
+            ANDROID_CONTROL_AWB_MODE,
+            &mode);
+    mAwbMode = mode.data.u8[0];
+
+    // TODO: Override control fields
+
+    return OK;
+}
+
+status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
+        int32_t ext1, int32_t ext2) {
+    Mutex::Autolock lock(mInputMutex);
+    switch (msgType) {
+        case CAMERA2_TRIGGER_AUTOFOCUS:
+            mAfTriggerId = ext1;
+            mStartAf = true;
+            mCancelAf = false;
+            break;
+        case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
+            mAfTriggerId = ext1;
+            mStartAf = false;
+            mCancelAf = true;
+            break;
+        case CAMERA2_TRIGGER_PRECAPTURE_METERING:
+            mPrecaptureTriggerId = ext1;
+            mStartPrecapture = true;
+            break;
+        default:
+            ALOGE("%s: Unknown action triggered: %d (arguments %d %d)",
+                    __FUNCTION__, msgType, ext1, ext2);
+            return BAD_VALUE;
+    }
+    return OK;
+}
+
+const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100000000;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500000000;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900000000;
+const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
+const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
+    kControlCycleDelay / 5000000000.0; // Once every 5 seconds
+
+bool EmulatedFakeCamera2::ControlThread::threadLoop() {
+    bool afModeChange = false;
+    bool afTriggered = false;
+    bool afCancelled = false;
+    uint8_t afState;
+    uint8_t afMode;
+    int32_t afTriggerId;
+    nsecs_t nextSleep = kControlCycleDelay;
+
+    {
+        Mutex::Autolock lock(mInputMutex);
+        if (mStartAf) {
+            afTriggered = true;
+            mStartAf = false;
+        } else if (mCancelAf) {
+            afCancelled = true;
+            mCancelAf = false;
+        }
+        afState = mAfState;
+        afMode = mAfMode;
+        afModeChange = mAfModeChange;
+        mAfModeChange = false;
+
+        afTriggerId = mAfTriggerId;
+    }
+
+    if (afCancelled || afModeChange) {
+        ALOGV("Resetting AF state due to cancel/mode change");
+        afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+        updateAfState(afState, afTriggerId);
+        mAfScanDuration = 0;
+        mLockAfterPassiveScan = false;
+    }
+
+    uint8_t oldAfState = afState;
+
+    if (afTriggered) {
+        afState = processAfTrigger(afMode, afState);
+    }
+
+    afState = maybeStartAfScan(afMode, afState);
+
+    afState = updateAfScan(afMode, afState, &nextSleep);
+
+    updateAfState(afState, afTriggerId);
+
+    int ret;
+    timespec t;
+    t.tv_sec = 0;
+    t.tv_nsec = nextSleep;
+    do {
+        ret = nanosleep(&t, &t);
+    } while (ret != 0);
+
+    return true;
+}
+
+int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
+        uint8_t afState) {
+    switch (afMode) {
+        case ANDROID_CONTROL_AF_OFF:
+        case ANDROID_CONTROL_AF_EDOF:
+            // Do nothing
+            break;
+        case ANDROID_CONTROL_AF_MACRO:
+        case ANDROID_CONTROL_AF_AUTO:
+            switch (afState) {
+                case ANDROID_CONTROL_AF_STATE_INACTIVE:
+                case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+                case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+                    // Start new focusing cycle
+                    mAfScanDuration =  ((double)rand() / RAND_MAX) *
+                        (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
+                    afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
+                    ALOGV("%s: AF scan start, duration %lld ms",
+                          __FUNCTION__, mAfScanDuration / 1000000);
+                    break;
+                case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
+                    // Ignore new request, already scanning
+                    break;
+                default:
+                    ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d",
+                          afState);
+            }
+            break;
+        case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE:
+            switch (afState) {
+                // Picture mode waits for passive scan to complete
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
+                    mLockAfterPassiveScan = true;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_INACTIVE:
+                    afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+                case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+                    // Must cancel to get out of these states
+                    break;
+                default:
+                    ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
+                          afState);
+            }
+            break;
+        case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO:
+            switch (afState) {
+                // Video mode does not wait for passive scan to complete
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
+                case ANDROID_CONTROL_AF_STATE_INACTIVE:
+                    afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+                case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+                    // Must cancel to get out of these states
+                    break;
+                default:
+                    ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d",
+                          afState);
+            }
+            break;
+        default:
+            break;
+    }
+    return afState;
+}
+
+int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
+        uint8_t afState) {
+    if ((afMode == ANDROID_CONTROL_AF_CONTINUOUS_VIDEO ||
+            afMode == ANDROID_CONTROL_AF_CONTINUOUS_PICTURE) &&
+        (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
+            afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
+
+        bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
+        if (startScan) {
+            // Start new passive focusing cycle
+            mAfScanDuration =  ((double)rand() / RAND_MAX) *
+                (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
+            afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
+            ALOGV("%s: AF passive scan start, duration %lld ms",
+                __FUNCTION__, mAfScanDuration / 1000000);
+        }
+    }
+    return afState;
+}
+
+int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
+        uint8_t afState, nsecs_t *maxSleep) {
+    if (! (afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
+            afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN ) ) {
+        return afState;
+    }
+
+    if (mAfScanDuration == 0) {
+        ALOGV("%s: AF scan done", __FUNCTION__);
+        switch (afMode) {
+            case ANDROID_CONTROL_AF_MACRO:
+            case ANDROID_CONTROL_AF_AUTO: {
+                bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
+                if (success) {
+                    afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+                } else {
+                    afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+                }
+                break;
+            }
+            case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE:
+                if (mLockAfterPassiveScan) {
+                    afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+                    mLockAfterPassiveScan = false;
+                } else {
+                    afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
+                }
+                break;
+            case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO:
+                afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
+                break;
+            default:
+                ALOGE("Unexpected AF mode in scan state");
+        }
+    } else {
+        if (mAfScanDuration <= *maxSleep) {
+            *maxSleep = mAfScanDuration;
+            mAfScanDuration = 0;
+        } else {
+            mAfScanDuration -= *maxSleep;
+        }
+    }
+    return afState;
+}
+
+void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
+        int32_t triggerId) {
+    Mutex::Autolock lock(mInputMutex);
+    if (mAfState != newState) {
+        ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__,
+                newState, triggerId);
+        mAfState = newState;
+        mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS,
+                newState, triggerId, 0);
+    }
+}
+
+/** Private methods */
+
+status_t EmulatedFakeCamera2::constructStaticInfo(
+        camera_metadata_t **info,
+        bool sizeRequest) const {
+
+    size_t entryCount = 0;
+    size_t dataCount = 0;
+    status_t ret;
+
+#define ADD_OR_SIZE( tag, data, count ) \
+    if ( ( ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, \
+            tag, data, count) ) != OK ) return ret
+
+    // android.lens
+
+    // 5 cm min focus distance for back camera, infinity (fixed focus) for front
+    const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
+    ADD_OR_SIZE(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE,
+            &minFocusDistance, 1);
+    // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
+    const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
+    ADD_OR_SIZE(ANDROID_LENS_HYPERFOCAL_DISTANCE,
+            &minFocusDistance, 1);
+
+    static const float focalLength = 3.30f; // mm
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS,
+            &focalLength, 1);
+    static const float aperture = 2.8f;
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_APERTURES,
+            &aperture, 1);
+    static const float filterDensity = 0;
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_FILTER_DENSITY,
+            &filterDensity, 1);
+    static const uint8_t availableOpticalStabilization =
+            ANDROID_LENS_OPTICAL_STABILIZATION_OFF;
+    ADD_OR_SIZE(ANDROID_LENS_AVAILABLE_OPTICAL_STABILIZATION,
+            &availableOpticalStabilization, 1);
+
+    static const int32_t lensShadingMapSize[] = {1, 1};
+    ADD_OR_SIZE(ANDROID_LENS_SHADING_MAP_SIZE, lensShadingMapSize,
+            sizeof(lensShadingMapSize)/sizeof(int32_t));
+
+    static const float lensShadingMap[3 * 1 * 1 ] =
+            { 1.f, 1.f, 1.f };
+    ADD_OR_SIZE(ANDROID_LENS_SHADING_MAP, lensShadingMap,
+            sizeof(lensShadingMap)/sizeof(float));
+
+    // Identity transform
+    static const int32_t geometricCorrectionMapSize[] = {2, 2};
+    ADD_OR_SIZE(ANDROID_LENS_GEOMETRIC_CORRECTION_MAP_SIZE,
+            geometricCorrectionMapSize,
+            sizeof(geometricCorrectionMapSize)/sizeof(int32_t));
+
+    static const float geometricCorrectionMap[2 * 3 * 2 * 2] = {
+            0.f, 0.f,  0.f, 0.f,  0.f, 0.f,
+            1.f, 0.f,  1.f, 0.f,  1.f, 0.f,
+            0.f, 1.f,  0.f, 1.f,  0.f, 1.f,
+            1.f, 1.f,  1.f, 1.f,  1.f, 1.f};
+    ADD_OR_SIZE(ANDROID_LENS_GEOMETRIC_CORRECTION_MAP,
+            geometricCorrectionMap,
+            sizeof(geometricCorrectionMap)/sizeof(float));
+
+    int32_t lensFacing = mFacingBack ?
+            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
+    ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
+
+    float lensPosition[3];
+    if (mFacingBack) {
+        // Back-facing camera is center-top on device
+        lensPosition[0] = 0;
+        lensPosition[1] = 20;
+        lensPosition[2] = -5;
+    } else {
+        // Front-facing camera is center-right on device
+        lensPosition[0] = 20;
+        lensPosition[1] = 20;
+        lensPosition[2] = 0;
+    }
+    ADD_OR_SIZE(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
+            sizeof(float));
+
+    // android.sensor
+
+    ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME_RANGE,
+            Sensor::kExposureTimeRange, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_MAX_FRAME_DURATION,
+            &Sensor::kFrameDurationRange[1], 1);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_AVAILABLE_SENSITIVITIES,
+            Sensor::kAvailableSensitivities,
+            sizeof(Sensor::kAvailableSensitivities)
+            /sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SENSOR_COLOR_FILTER_ARRANGEMENT,
+            &Sensor::kColorFilterArrangement, 1);
+
+    static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
+    ADD_OR_SIZE(ANDROID_SENSOR_PHYSICAL_SIZE,
+            sensorPhysicalSize, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_PIXEL_ARRAY_SIZE,
+            Sensor::kResolution, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE,
+            Sensor::kResolution, 2);
+
+    ADD_OR_SIZE(ANDROID_SENSOR_WHITE_LEVEL,
+            &Sensor::kMaxRawValue, 1);
+
+    static const int32_t blackLevelPattern[4] = {
+            Sensor::kBlackLevel, Sensor::kBlackLevel,
+            Sensor::kBlackLevel, Sensor::kBlackLevel
+    };
+    ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
+            blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
+
+    //TODO: sensor color calibration fields
+
+    // android.flash
+    static const uint8_t flashAvailable = 0;
+    ADD_OR_SIZE(ANDROID_FLASH_AVAILABLE, &flashAvailable, 1);
+
+    static const int64_t flashChargeDuration = 0;
+    ADD_OR_SIZE(ANDROID_FLASH_CHARGE_DURATION, &flashChargeDuration, 1);
+
+    // android.tonemap
+
+    static const int32_t tonemapCurvePoints = 128;
+    ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
+
+    // android.scaler
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS,
+            kAvailableFormats,
+            sizeof(kAvailableFormats)/sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
+            kAvailableRawSizes,
+            sizeof(kAvailableRawSizes)/sizeof(uint32_t));
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
+            kAvailableRawMinDurations,
+            sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
+
+    if (mFacingBack) {
+        ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
+                kAvailableProcessedSizesBack,
+                sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
+    } else {
+        ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
+                kAvailableProcessedSizesFront,
+                sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
+    }
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
+            kAvailableProcessedMinDurations,
+            sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
+
+    if (mFacingBack) {
+        ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
+                kAvailableJpegSizesBack,
+                sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
+    } else {
+        ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
+                kAvailableJpegSizesFront,
+                sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
+    }
+
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
+            kAvailableJpegMinDurations,
+            sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
+
+    static const float maxZoom = 10;
+    ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_ZOOM,
+            &maxZoom, 1);
+
+    // android.jpeg
+
+    static const int32_t jpegThumbnailSizes[] = {
+            160, 120,
+            320, 240
+     };
+    ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+            jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
+
+    static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
+    ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+
+    // android.stats
+
+    static const uint8_t availableFaceDetectModes[] = {
+        ANDROID_STATS_FACE_DETECTION_OFF,
+        ANDROID_STATS_FACE_DETECTION_SIMPLE,
+        ANDROID_STATS_FACE_DETECTION_FULL
+    };
+
+    ADD_OR_SIZE(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES,
+            availableFaceDetectModes,
+            sizeof(availableFaceDetectModes));
+
+    static const int32_t maxFaceCount = 8;
+    ADD_OR_SIZE(ANDROID_STATS_MAX_FACE_COUNT,
+            &maxFaceCount, 1);
+
+    static const int32_t histogramSize = 64;
+    ADD_OR_SIZE(ANDROID_STATS_HISTOGRAM_BUCKET_COUNT,
+            &histogramSize, 1);
+
+    static const int32_t maxHistogramCount = 1000;
+    ADD_OR_SIZE(ANDROID_STATS_MAX_HISTOGRAM_COUNT,
+            &maxHistogramCount, 1);
+
+    static const int32_t sharpnessMapSize[2] = {64, 64};
+    ADD_OR_SIZE(ANDROID_STATS_SHARPNESS_MAP_SIZE,
+            sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
+
+    static const int32_t maxSharpnessMapValue = 1000;
+    ADD_OR_SIZE(ANDROID_STATS_MAX_SHARPNESS_MAP_VALUE,
+            &maxSharpnessMapValue, 1);
+
+    // android.control
+
+    static const uint8_t availableSceneModes[] = {
+            ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+            availableSceneModes, sizeof(availableSceneModes));
+
+    static const uint8_t availableEffects[] = {
+            ANDROID_CONTROL_EFFECT_OFF
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+            availableEffects, sizeof(availableEffects));
+
+    int32_t max3aRegions = 0;
+    ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS,
+            &max3aRegions, 1);
+
+    static const uint8_t availableAeModes[] = {
+            ANDROID_CONTROL_AE_OFF,
+            ANDROID_CONTROL_AE_ON
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+            availableAeModes, sizeof(availableAeModes));
+
+    static const camera_metadata_rational exposureCompensationStep = {
+            1, 3
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP,
+            &exposureCompensationStep, 1);
+
+    int32_t exposureCompensationRange[] = {-9, 9};
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE,
+            exposureCompensationRange,
+            sizeof(exposureCompensationRange)/sizeof(int32_t));
+
+    static const int32_t availableTargetFpsRanges[] = {
+            5, 30, 15, 30
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+            availableTargetFpsRanges,
+            sizeof(availableTargetFpsRanges)/sizeof(int32_t));
+
+    static const uint8_t availableAntibandingModes[] = {
+            ANDROID_CONTROL_AE_ANTIBANDING_OFF,
+            ANDROID_CONTROL_AE_ANTIBANDING_AUTO
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+            availableAntibandingModes, sizeof(availableAntibandingModes));
+
+    static const uint8_t availableAwbModes[] = {
+            ANDROID_CONTROL_AWB_OFF,
+            ANDROID_CONTROL_AWB_AUTO,
+            ANDROID_CONTROL_AWB_INCANDESCENT,
+            ANDROID_CONTROL_AWB_FLUORESCENT,
+            ANDROID_CONTROL_AWB_DAYLIGHT,
+            ANDROID_CONTROL_AWB_SHADE
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+            availableAwbModes, sizeof(availableAwbModes));
+
+    static const uint8_t availableAfModesBack[] = {
+            ANDROID_CONTROL_AF_OFF,
+            ANDROID_CONTROL_AF_AUTO,
+            ANDROID_CONTROL_AF_MACRO,
+            ANDROID_CONTROL_AF_CONTINUOUS_VIDEO,
+            ANDROID_CONTROL_AF_CONTINUOUS_PICTURE
+    };
+
+    static const uint8_t availableAfModesFront[] = {
+            ANDROID_CONTROL_AF_OFF
+    };
+
+    if (mFacingBack) {
+        ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                    availableAfModesBack, sizeof(availableAfModesBack));
+    } else {
+        ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                    availableAfModesFront, sizeof(availableAfModesFront));
+    }
+
+    static const uint8_t availableVstabModes[] = {
+            ANDROID_CONTROL_VIDEO_STABILIZATION_OFF
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+            availableVstabModes, sizeof(availableVstabModes));
+
+#undef ADD_OR_SIZE
+    /** Allocate metadata if sizing */
+    if (sizeRequest) {
+        ALOGV("Allocating %d entries, %d extra bytes for "
+                "static camera info",
+                entryCount, dataCount);
+        *info = allocate_camera_metadata(entryCount, dataCount);
+        if (*info == NULL) {
+            ALOGE("Unable to allocate camera static info"
+                    "(%d entries, %d bytes extra data)",
+                    entryCount, dataCount);
+            return NO_MEMORY;
+        }
+    }
+    return OK;
+}
+
+status_t EmulatedFakeCamera2::constructDefaultRequest(
+        int request_template,
+        camera_metadata_t **request,
+        bool sizeRequest) const {
+
+    size_t entryCount = 0;
+    size_t dataCount = 0;
+    status_t ret;
+
+#define ADD_OR_SIZE( tag, data, count ) \
+    if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
+            tag, data, count) ) != OK ) return ret
+
+    static const int64_t USEC = 1000LL;
+    static const int64_t MSEC = USEC * 1000LL;
+    static const int64_t SEC = MSEC * 1000LL;
+
+    /** android.request */
+
+    static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+    ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
+
+    static const int32_t id = 0;
+    ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
+
+    static const int32_t frameCount = 0;
+    ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
+
+    // OUTPUT_STREAMS set by user
+    entryCount += 1;
+    dataCount += 5; // TODO: Should be maximum stream number
+
+    /** android.lens */
+
+    static const float focusDistance = 0;
+    ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
+
+    static const float aperture = 2.8f;
+    ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
+
+    static const float focalLength = 5.0f;
+    ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
+
+    static const float filterDensity = 0;
+    ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
+
+    static const uint8_t opticalStabilizationMode =
+            ANDROID_LENS_OPTICAL_STABILIZATION_OFF;
+    ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+            &opticalStabilizationMode, 1);
+
+    // FOCUS_RANGE set only in frame
+
+    /** android.sensor */
+
+    static const int64_t exposureTime = 10 * MSEC;
+    ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
+
+    static const int64_t frameDuration = 33333333L; // 1/30 s
+    ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
+
+    static const int32_t sensitivity = 100;
+    ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
+
+    // TIMESTAMP set only in frame
+
+    /** android.flash */
+
+    static const uint8_t flashMode = ANDROID_FLASH_OFF;
+    ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
+
+    static const uint8_t flashPower = 10;
+    ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
+
+    static const int64_t firingTime = 0;
+    ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
+
+    /** Processing block modes */
+    uint8_t hotPixelMode = 0;
+    uint8_t demosaicMode = 0;
+    uint8_t noiseMode = 0;
+    uint8_t shadingMode = 0;
+    uint8_t geometricMode = 0;
+    uint8_t colorMode = 0;
+    uint8_t tonemapMode = 0;
+    uint8_t edgeMode = 0;
+    switch (request_template) {
+      case CAMERA2_TEMPLATE_PREVIEW:
+        hotPixelMode = ANDROID_PROCESSING_FAST;
+        demosaicMode = ANDROID_PROCESSING_FAST;
+        noiseMode = ANDROID_PROCESSING_FAST;
+        shadingMode = ANDROID_PROCESSING_FAST;
+        geometricMode = ANDROID_PROCESSING_FAST;
+        colorMode = ANDROID_PROCESSING_FAST;
+        tonemapMode = ANDROID_PROCESSING_FAST;
+        edgeMode = ANDROID_PROCESSING_FAST;
+        break;
+      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+        hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+        hotPixelMode = ANDROID_PROCESSING_FAST;
+        demosaicMode = ANDROID_PROCESSING_FAST;
+        noiseMode = ANDROID_PROCESSING_FAST;
+        shadingMode = ANDROID_PROCESSING_FAST;
+        geometricMode = ANDROID_PROCESSING_FAST;
+        colorMode = ANDROID_PROCESSING_FAST;
+        tonemapMode = ANDROID_PROCESSING_FAST;
+        edgeMode = ANDROID_PROCESSING_FAST;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+        hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        break;
+      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+        hotPixelMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        demosaicMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        noiseMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        shadingMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        geometricMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        colorMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        tonemapMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        edgeMode = ANDROID_PROCESSING_HIGH_QUALITY;
+        break;
+      default:
+        hotPixelMode = ANDROID_PROCESSING_FAST;
+        demosaicMode = ANDROID_PROCESSING_FAST;
+        noiseMode = ANDROID_PROCESSING_FAST;
+        shadingMode = ANDROID_PROCESSING_FAST;
+        geometricMode = ANDROID_PROCESSING_FAST;
+        colorMode = ANDROID_PROCESSING_FAST;
+        tonemapMode = ANDROID_PROCESSING_FAST;
+        edgeMode = ANDROID_PROCESSING_FAST;
+        break;
+    }
+    ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
+    ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
+    ADD_OR_SIZE(ANDROID_NOISE_MODE, &noiseMode, 1);
+    ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
+    ADD_OR_SIZE(ANDROID_GEOMETRIC_MODE, &geometricMode, 1);
+    ADD_OR_SIZE(ANDROID_COLOR_MODE, &colorMode, 1);
+    ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
+    ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
+
+    /** android.noise */
+    static const uint8_t noiseStrength = 5;
+    ADD_OR_SIZE(ANDROID_NOISE_STRENGTH, &noiseStrength, 1);
+
+    /** android.color */
+    static const float colorTransform[9] = {
+        1.0f, 0.f, 0.f,
+        0.f, 1.f, 0.f,
+        0.f, 0.f, 1.f
+    };
+    ADD_OR_SIZE(ANDROID_COLOR_TRANSFORM, colorTransform, 9);
+
+    /** android.tonemap */
+    static const float tonemapCurve[4] = {
+        0.f, 0.f,
+        1.f, 1.f
+    };
+    ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
+    ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
+    ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
+
+    /** android.edge */
+    static const uint8_t edgeStrength = 5;
+    ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
+
+    /** android.scaler */
+    static const int32_t cropRegion[3] = {
+        0, 0, Sensor::kResolution[0]
+    };
+    ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
+
+    /** android.jpeg */
+    static const int32_t jpegQuality = 80;
+    ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
+
+    static const int32_t thumbnailSize[2] = {
+        640, 480
+    };
+    ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
+
+    static const int32_t thumbnailQuality = 80;
+    ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
+
+    static const double gpsCoordinates[2] = {
+        0, 0
+    };
+    ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
+
+    static const uint8_t gpsProcessingMethod[32] = "None";
+    ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
+
+    static const int64_t gpsTimestamp = 0;
+    ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
+
+    static const int32_t jpegOrientation = 0;
+    ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
+
+    /** android.stats */
+
+    static const uint8_t faceDetectMode = ANDROID_STATS_FACE_DETECTION_OFF;
+    ADD_OR_SIZE(ANDROID_STATS_FACE_DETECT_MODE, &faceDetectMode, 1);
+
+    static const uint8_t histogramMode = ANDROID_STATS_OFF;
+    ADD_OR_SIZE(ANDROID_STATS_HISTOGRAM_MODE, &histogramMode, 1);
+
+    static const uint8_t sharpnessMapMode = ANDROID_STATS_OFF;
+    ADD_OR_SIZE(ANDROID_STATS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
+
+    // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
+    // sharpnessMap only in frames
+
+    /** android.control */
+
+    uint8_t controlIntent = 0;
+    switch (request_template) {
+      case CAMERA2_TEMPLATE_PREVIEW:
+        controlIntent = ANDROID_CONTROL_INTENT_PREVIEW;
+        break;
+      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+        controlIntent = ANDROID_CONTROL_INTENT_STILL_CAPTURE;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+        controlIntent = ANDROID_CONTROL_INTENT_VIDEO_RECORD;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+        controlIntent = ANDROID_CONTROL_INTENT_VIDEO_SNAPSHOT;
+        break;
+      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+        controlIntent = ANDROID_CONTROL_INTENT_ZERO_SHUTTER_LAG;
+        break;
+      default:
+        controlIntent = ANDROID_CONTROL_INTENT_CUSTOM;
+        break;
+    }
+    ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
+
+    static const uint8_t controlMode = ANDROID_CONTROL_AUTO;
+    ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
+
+    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_OFF;
+    ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
+
+    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+    ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
+
+    static const uint8_t aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH;
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+
+    static const int32_t controlRegions[5] = {
+        0, 0, Sensor::kResolution[0], Sensor::kResolution[1], 1000
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
+
+    static const int32_t aeExpCompensation = 0;
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_EXP_COMPENSATION, &aeExpCompensation, 1);
+
+    static const int32_t aeTargetFpsRange[2] = {
+        10, 30
+    };
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
+
+    static const uint8_t aeAntibandingMode =
+            ANDROID_CONTROL_AE_ANTIBANDING_AUTO;
+    ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
+
+    static const uint8_t awbMode =
+            ANDROID_CONTROL_AWB_AUTO;
+    ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+    ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
+
+    uint8_t afMode = 0;
+    switch (request_template) {
+      case CAMERA2_TEMPLATE_PREVIEW:
+        afMode = ANDROID_CONTROL_AF_AUTO;
+        break;
+      case CAMERA2_TEMPLATE_STILL_CAPTURE:
+        afMode = ANDROID_CONTROL_AF_AUTO;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_RECORD:
+        afMode = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO;
+        break;
+      case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
+        afMode = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO;
+        break;
+      case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
+        afMode = ANDROID_CONTROL_AF_CONTINUOUS_PICTURE;
+        break;
+      default:
+        afMode = ANDROID_CONTROL_AF_AUTO;
+        break;
+    }
+    ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
+
+    ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
+
+    static const uint8_t vstabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
+    ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
+
+    // aeState, awbState, afState only in frame
+
+    /** Allocate metadata if sizing */
+    if (sizeRequest) {
+        ALOGV("Allocating %d entries, %d extra bytes for "
+                "request template type %d",
+                entryCount, dataCount, request_template);
+        *request = allocate_camera_metadata(entryCount, dataCount);
+        if (*request == NULL) {
+            ALOGE("Unable to allocate new request template type %d "
+                    "(%d entries, %d bytes extra data)", request_template,
+                    entryCount, dataCount);
+            return NO_MEMORY;
+        }
+    }
+    return OK;
+#undef ADD_OR_SIZE
+}
+
+status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
+        bool sizeRequest,
+        size_t *entryCount,
+        size_t *dataCount,
+        uint32_t tag,
+        const void *entryData,
+        size_t entryDataCount) {
+    status_t res;
+    if (!sizeRequest) {
+        return add_camera_metadata_entry(request, tag, entryData,
+                entryDataCount);
+    } else {
+        int type = get_camera_metadata_tag_type(tag);
+        if (type < 0 ) return BAD_VALUE;
+        (*entryCount)++;
+        (*dataCount) += calculate_camera_metadata_entry_data_size(type,
+                entryDataCount);
+        return OK;
+    }
+}
+
+bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
+    // Assumes mMutex is locked; otherwise new requests could enter
+    // configureThread while readoutThread is being checked
+
+    // Order of isStreamInUse calls matters
+    if (mConfigureThread->isStreamInUse(id) ||
+            mReadoutThread->isStreamInUse(id) ||
+            mJpegCompressor->isStreamInUse(id) ) {
+        ALOGE("%s: Stream %d is in use in active requests!",
+                __FUNCTION__, id);
+        return true;
+    }
+    return false;
+ }
+
+const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
+    Mutex::Autolock lock(mMutex);
+
+    return mStreams.valueFor(streamId);
+}
+
 };  /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.h b/tools/emulator/system/camera/EmulatedFakeCamera2.h
index 89b12d3..e9e0dfc 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.h
@@ -24,6 +24,13 @@
  */
 
 #include "EmulatedCamera2.h"
+#include "fake-pipeline2/Base.h"
+#include "fake-pipeline2/Sensor.h"
+#include "fake-pipeline2/JpegCompressor.h"
+#include <utils/Condition.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
 
 namespace android {
 
@@ -44,22 +51,317 @@
 
 public:
     /* Initializes EmulatedFakeCamera2 instance. */
-     status_t Initialize();
+    status_t Initialize();
 
     /****************************************************************************
-     * EmulatedCamera abstract API implementation.
+     * Camera Module API and generic hardware device API implementation
+     ***************************************************************************/
+public:
+
+    virtual status_t connectCamera(hw_device_t** device);
+
+    virtual status_t closeCamera();
+
+    virtual status_t getCameraInfo(struct camera_info *info);
+
+    /****************************************************************************
+     * EmulatedCamera2 abstract API implementation.
+     ***************************************************************************/
+protected:
+    /** Request input queue */
+
+    virtual int requestQueueNotify();
+
+    /** Count of requests in flight */
+    virtual int getInProgressCount();
+
+    /** Cancel all captures in flight */
+    //virtual int flushCapturesInProgress();
+
+    /** Construct default request */
+    virtual int constructDefaultRequest(
+            int request_template,
+            camera_metadata_t **request);
+
+    virtual int allocateStream(
+            uint32_t width,
+            uint32_t height,
+            int format,
+            const camera2_stream_ops_t *stream_ops,
+            uint32_t *stream_id,
+            uint32_t *format_actual,
+            uint32_t *usage,
+            uint32_t *max_buffers);
+
+    virtual int registerStreamBuffers(
+            uint32_t stream_id,
+            int num_buffers,
+            buffer_handle_t *buffers);
+
+    virtual int releaseStream(uint32_t stream_id);
+
+    // virtual int allocateReprocessStream(
+    //         uint32_t width,
+    //         uint32_t height,
+    //         uint32_t format,
+    //         const camera2_stream_ops_t *stream_ops,
+    //         uint32_t *stream_id,
+    //         uint32_t *format_actual,
+    //         uint32_t *usage,
+    //         uint32_t *max_buffers);
+
+    // virtual int releaseReprocessStream(uint32_t stream_id);
+
+    virtual int triggerAction(uint32_t trigger_id,
+            int32_t ext1,
+            int32_t ext2);
+
+    /** Custom tag definitions */
+    virtual const char* getVendorSectionName(uint32_t tag);
+    virtual const char* getVendorTagName(uint32_t tag);
+    virtual int         getVendorTagType(uint32_t tag);
+
+    /** Debug methods */
+
+    virtual int dump(int fd);
+
+public:
+    /****************************************************************************
+     * Utility methods called by configure/readout threads and pipeline
      ***************************************************************************/
 
-protected:
+    // Get information about a given stream. Will lock mMutex
+    const Stream &getStreamInfo(uint32_t streamId);
+
+    // Notifies rest of camera subsystem of serious error
+    void signalError();
+
+private:
+    /****************************************************************************
+     * Utility methods
+     ***************************************************************************/
+    /** Construct static camera metadata, two-pass */
+    status_t constructStaticInfo(
+            camera_metadata_t **info,
+            bool sizeRequest) const;
+
+    /** Two-pass implementation of constructDefaultRequest */
+    status_t constructDefaultRequest(
+            int request_template,
+            camera_metadata_t **request,
+            bool sizeRequest) const;
+    /** Helper function for constructDefaultRequest */
+    static status_t addOrSize( camera_metadata_t *request,
+            bool sizeRequest,
+            size_t *entryCount,
+            size_t *dataCount,
+            uint32_t tag,
+            const void *entry_data,
+            size_t entry_count);
+
+    /** Determine if the stream id is listed in any currently-in-flight
+     * requests. Assumes mMutex is locked */
+    bool isStreamInUse(uint32_t streamId);
 
     /****************************************************************************
-     * Data memebers.
+     * Pipeline controller threads
+     ***************************************************************************/
+
+    class ConfigureThread: public Thread {
+      public:
+        ConfigureThread(EmulatedFakeCamera2 *parent);
+        ~ConfigureThread();
+
+        status_t waitUntilRunning();
+        status_t newRequestAvailable();
+        status_t readyToRun();
+
+        bool isStreamInUse(uint32_t id);
+        int getInProgressCount();
+      private:
+        EmulatedFakeCamera2 *mParent;
+
+        bool mRunning;
+        bool threadLoop();
+
+        Mutex mInputMutex; // Protects mActive, mRequestCount
+        Condition mInputSignal;
+        bool mActive; // Whether we're waiting for input requests or actively
+                      // working on them
+        size_t mRequestCount;
+
+        camera_metadata_t *mRequest;
+
+        Mutex mInternalsMutex; // Lock before accessing below members.
+        bool    mNextNeedsJpeg;
+        int32_t mNextFrameNumber;
+        int64_t mNextExposureTime;
+        int64_t mNextFrameDuration;
+        int32_t mNextSensitivity;
+        Buffers *mNextBuffers;
+    };
+
+    class ReadoutThread: public Thread {
+      public:
+        ReadoutThread(EmulatedFakeCamera2 *parent);
+        ~ReadoutThread();
+
+        status_t readyToRun();
+
+        // Input
+        status_t waitUntilRunning();
+        void setNextCapture(camera_metadata_t *request,
+                Buffers *buffers);
+
+        bool isStreamInUse(uint32_t id);
+        int getInProgressCount();
+      private:
+        EmulatedFakeCamera2 *mParent;
+
+        bool mRunning;
+        bool threadLoop();
+
+        status_t collectStatisticsMetadata(camera_metadata_t *frame);
+
+        // Inputs
+        Mutex mInputMutex; // Protects mActive, mInFlightQueue, mRequestCount
+        Condition mInputSignal;
+        bool mActive;
+
+        static const int kInFlightQueueSize = 4;
+        struct InFlightQueue {
+            camera_metadata_t *request;
+            Buffers *buffers;
+        } *mInFlightQueue;
+
+        size_t mInFlightHead;
+        size_t mInFlightTail;
+
+        size_t mRequestCount;
+
+        // Internals
+        Mutex mInternalsMutex;
+        camera_metadata_t *mRequest;
+        Buffers *mBuffers;
+
+    };
+
+    // 3A management thread (auto-exposure, focus, white balance)
+    class ControlThread: public Thread {
+      public:
+        ControlThread(EmulatedFakeCamera2 *parent);
+        ~ControlThread();
+
+        status_t readyToRun();
+
+        status_t waitUntilRunning();
+
+        // Interpret request's control parameters and override
+        // capture settings as needed
+        status_t processRequest(camera_metadata_t *request);
+
+        status_t triggerAction(uint32_t msgType,
+                int32_t ext1, int32_t ext2);
+      private:
+        ControlThread(const ControlThread &t);
+        ControlThread& operator=(const ControlThread &t);
+
+        // Constants controlling fake 3A behavior
+        static const nsecs_t kControlCycleDelay;
+        static const nsecs_t kMinAfDuration;
+        static const nsecs_t kMaxAfDuration;
+        static const float kAfSuccessRate;
+        static const float kContinuousAfStartRate;
+
+        EmulatedFakeCamera2 *mParent;
+
+        bool mRunning;
+        bool threadLoop();
+
+        Mutex mInputMutex; // Protects input methods
+        Condition mInputSignal;
+
+        // Trigger notifications
+        bool mStartAf;
+        bool mCancelAf;
+        bool mStartPrecapture;
+
+        // Latest state for 3A request fields
+        uint8_t mControlMode;
+
+        uint8_t mEffectMode;
+        uint8_t mSceneMode;
+
+        uint8_t mAfMode;
+        bool mAfModeChange;
+
+        uint8_t mAwbMode;
+        uint8_t mAeMode;
+
+        // Latest trigger IDs
+        int32_t mAfTriggerId;
+        int32_t mPrecaptureTriggerId;
+
+        // Current state for 3A algorithms
+        uint8_t mAfState;
+        uint8_t mAeState;
+        uint8_t mAwbState;
+
+        // Private to threadLoop and its utility methods
+
+        nsecs_t mAfScanDuration;
+        bool mLockAfterPassiveScan;
+
+        // Utility methods
+        int processAfTrigger(uint8_t afMode, uint8_t afState);
+        int maybeStartAfScan(uint8_t afMode, uint8_t afState);
+        int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep);
+        void updateAfState(uint8_t newState, int32_t triggerId);
+
+    };
+
+    /****************************************************************************
+     * Static configuration information
+     ***************************************************************************/
+private:
+    static const uint32_t kMaxRawStreamCount = 1;
+    static const uint32_t kMaxProcessedStreamCount = 3;
+    static const uint32_t kMaxJpegStreamCount = 1;
+    static const uint32_t kAvailableFormats[];
+    static const uint32_t kAvailableRawSizes[];
+    static const uint64_t kAvailableRawMinDurations[];
+    static const uint32_t kAvailableProcessedSizesBack[];
+    static const uint32_t kAvailableProcessedSizesFront[];
+    static const uint64_t kAvailableProcessedMinDurations[];
+    static const uint32_t kAvailableJpegSizesBack[];
+    static const uint32_t kAvailableJpegSizesFront[];
+    static const uint64_t kAvailableJpegMinDurations[];
+
+    /****************************************************************************
+     * Data members.
      ***************************************************************************/
 
 protected:
     /* Facing back (true) or front (false) switch. */
-    bool                        mFacingBack;
+    bool mFacingBack;
 
+private:
+    /** Stream manipulation */
+    uint32_t mNextStreamId;
+    uint32_t mRawStreamCount;
+    uint32_t mProcessedStreamCount;
+    uint32_t mJpegStreamCount;
+
+    KeyedVector<uint32_t, Stream> mStreams;
+
+    /** Simulated hardware interfaces */
+    sp<Sensor> mSensor;
+    sp<JpegCompressor> mJpegCompressor;
+
+    /** Pipeline control threads */
+    sp<ConfigureThread> mConfigureThread;
+    sp<ReadoutThread>   mReadoutThread;
+    sp<ControlThread>   mControlThread;
 };
 
 }; /* namespace android */
diff --git a/tools/emulator/system/camera/fake-pipeline2/Base.h b/tools/emulator/system/camera/fake-pipeline2/Base.h
new file mode 100644
index 0000000..f7ef9b1
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Base.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This file includes various basic structures that are needed by multiple parts
+ * of the fake camera 2 implementation.
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_BASE_H
+#define HW_EMULATOR_CAMERA2_BASE_H
+
+#include <system/window.h>
+#include <hardware/camera2.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+
+/* Internal structure for passing buffers across threads */
+struct StreamBuffer {
+    int streamId;
+    uint32_t width, height;
+    uint32_t format;
+    uint32_t stride;
+    buffer_handle_t *buffer;
+    uint8_t *img;
+};
+typedef Vector<StreamBuffer> Buffers;
+
+struct Stream {
+    const camera2_stream_ops_t *ops;
+    uint32_t width, height;
+    int32_t format;
+    uint32_t stride;
+};
+
+} // namespace android;
+
+#endif
diff --git a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
new file mode 100644
index 0000000..76fbb94
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
@@ -0,0 +1,256 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera2_JpegCompressor"
+
+#include <utils/Log.h>
+#include <ui/GraphicBufferMapper.h>
+
+#include "JpegCompressor.h"
+#include "../EmulatedFakeCamera2.h"
+
+namespace android {
+
+JpegCompressor::JpegCompressor(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mIsBusy(false),
+        mParent(parent),
+        mBuffers(NULL),
+        mCaptureTime(0) {
+}
+
+JpegCompressor::~JpegCompressor() {
+    Mutex::Autolock lock(mMutex);
+}
+
+status_t JpegCompressor::start(Buffers *buffers,
+        nsecs_t captureTime) {
+    Mutex::Autolock lock(mMutex);
+    {
+        Mutex::Autolock busyLock(mBusyMutex);
+
+        if (mIsBusy) {
+            ALOGE("%s: Already processing a buffer!", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+
+        mIsBusy = true;
+
+        mBuffers = buffers;
+        mCaptureTime = captureTime;
+    }
+
+    status_t res;
+    res = run("EmulatedFakeCamera2::JpegCompressor");
+    if (res != OK) {
+        ALOGE("%s: Unable to start up compression thread: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        delete mBuffers;
+    }
+    return res;
+}
+
+status_t JpegCompressor::cancel() {
+    requestExitAndWait();
+    return OK;
+}
+
+status_t JpegCompressor::readyToRun() {
+    return OK;
+}
+
+bool JpegCompressor::threadLoop() {
+    Mutex::Autolock lock(mMutex);
+    ALOGV("%s: Starting compression thread", __FUNCTION__);
+
+    // Find source and target buffers
+
+    bool foundJpeg = false, mFoundAux = false;
+    for (size_t i = 0; i < mBuffers->size(); i++) {
+        const StreamBuffer &b = (*mBuffers)[i];
+        if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+            mJpegBuffer = b;
+            mFoundJpeg = true;
+        } else if (b.streamId == -1) {
+            mAuxBuffer = b;
+            mFoundAux = true;
+        }
+        if (mFoundJpeg && mFoundAux) break;
+    }
+    if (!mFoundJpeg || !mFoundAux) {
+        ALOGE("%s: Unable to find buffers for JPEG source/destination",
+                __FUNCTION__);
+        cleanUp();
+        return false;
+    }
+
+    // Set up error management
+
+    mJpegErrorInfo = NULL;
+    JpegError error;
+    error.parent = this;
+
+    mCInfo.err = jpeg_std_error(&error);
+    mCInfo.err->error_exit = jpegErrorHandler;
+
+    jpeg_create_compress(&mCInfo);
+    if (checkError("Error initializing compression")) return false;
+
+    // Route compressed data straight to output stream buffer
+
+    JpegDestination jpegDestMgr;
+    jpegDestMgr.parent = this;
+    jpegDestMgr.init_destination = jpegInitDestination;
+    jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
+    jpegDestMgr.term_destination = jpegTermDestination;
+
+    mCInfo.dest = &jpegDestMgr;
+
+    // Set up compression parameters
+
+    mCInfo.image_width = mAuxBuffer.width;
+    mCInfo.image_height = mAuxBuffer.height;
+    mCInfo.input_components = 3;
+    mCInfo.in_color_space = JCS_RGB;
+
+    jpeg_set_defaults(&mCInfo);
+    if (checkError("Error configuring defaults")) return false;
+
+    // Do compression
+
+    jpeg_start_compress(&mCInfo, TRUE);
+    if (checkError("Error starting compression")) return false;
+
+    size_t rowStride = mAuxBuffer.stride * 3;
+    const size_t kChunkSize = 32;
+    while (mCInfo.next_scanline < mCInfo.image_height) {
+        JSAMPROW chunk[kChunkSize];
+        for (size_t i = 0 ; i < kChunkSize; i++) {
+            chunk[i] = (JSAMPROW)
+                    (mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
+        }
+        jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
+        if (checkError("Error while compressing")) return false;
+        if (exitPending()) {
+            ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
+            cleanUp();
+            return false;
+        }
+    }
+
+    jpeg_finish_compress(&mCInfo);
+    if (checkError("Error while finishing compression")) return false;
+
+    // Write to JPEG output stream
+
+    ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
+          mJpegBuffer.streamId);
+
+    GraphicBufferMapper::get().unlock(*(mJpegBuffer.buffer));
+    status_t res;
+    const Stream &s = mParent->getStreamInfo(mJpegBuffer.streamId);
+    res = s.ops->enqueue_buffer(s.ops, mCaptureTime, mJpegBuffer.buffer);
+    if (res != OK) {
+        ALOGE("%s: Error queueing compressed image buffer %p: %s (%d)",
+                __FUNCTION__, mJpegBuffer.buffer, strerror(-res), res);
+        mParent->signalError();
+    }
+
+    // All done
+
+    cleanUp();
+
+    return false;
+}
+
+bool JpegCompressor::isBusy() {
+    Mutex::Autolock busyLock(mBusyMutex);
+    return mIsBusy;
+}
+
+bool JpegCompressor::isStreamInUse(uint32_t id) {
+    Mutex::Autolock lock(mBusyMutex);
+
+    if (mBuffers && mIsBusy) {
+        for (size_t i = 0; i < mBuffers->size(); i++) {
+            if ( (*mBuffers)[i].streamId == (int)id ) return true;
+        }
+    }
+    return false;
+}
+
+bool JpegCompressor::waitForDone(nsecs_t timeout) {
+    Mutex::Autolock lock(mBusyMutex);
+    status_t res = OK;
+    if (mIsBusy) {
+        res = mDone.waitRelative(mBusyMutex, timeout);
+    }
+    return (res == OK);
+}
+
+bool JpegCompressor::checkError(const char *msg) {
+    if (mJpegErrorInfo) {
+        char errBuffer[JMSG_LENGTH_MAX];
+        mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
+        ALOGE("%s: %s: %s",
+                __FUNCTION__, msg, errBuffer);
+        cleanUp();
+        mJpegErrorInfo = NULL;
+        return true;
+    }
+    return false;
+}
+
+void JpegCompressor::cleanUp() {
+    jpeg_destroy_compress(&mCInfo);
+    Mutex::Autolock lock(mBusyMutex);
+
+    if (mFoundAux) {
+        delete[] mAuxBuffer.img;
+    }
+    delete mBuffers;
+    mBuffers = NULL;
+
+    mIsBusy = false;
+    mDone.signal();
+}
+
+void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
+    JpegError *error = static_cast<JpegError*>(cinfo->err);
+    error->parent->mJpegErrorInfo = cinfo;
+}
+
+void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
+    JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
+    ALOGV("%s: Setting destination to %p, size %d",
+            __FUNCTION__, dest->parent->mJpegBuffer.img, kMaxJpegSize);
+    dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer.img);
+    dest->free_in_buffer = kMaxJpegSize;
+}
+
+boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
+    ALOGE("%s: JPEG destination buffer overflow!",
+            __FUNCTION__);
+    return true;
+}
+
+void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
+    ALOGV("%s: Done writing JPEG data. %d bytes left in buffer",
+            __FUNCTION__, cinfo->dest->free_in_buffer);
+}
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.h b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.h
new file mode 100644
index 0000000..ea2a84f
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * This class simulates a hardware JPEG compressor.  It receives image buffers
+ * in RGBA_8888 format, processes them in a worker thread, and then pushes them
+ * out to their destination stream.
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_JPEG_H
+#define HW_EMULATOR_CAMERA2_JPEG_H
+
+#include "utils/Thread.h"
+#include "utils/Mutex.h"
+#include "utils/Timers.h"
+
+#include "Base.h"
+
+#include <stdio.h>
+
+extern "C" {
+#include <jpeglib.h>
+}
+
+namespace android {
+
+class EmulatedFakeCamera2;
+
+class JpegCompressor: private Thread, public virtual RefBase {
+  public:
+
+    JpegCompressor(EmulatedFakeCamera2 *parent);
+    ~JpegCompressor();
+
+    // Start compressing COMPRESSED format buffers; JpegCompressor takes
+    // ownership of the Buffers vector.
+    status_t start(Buffers *buffers,
+            nsecs_t captureTime);
+
+    status_t cancel();
+
+    bool isBusy();
+    bool isStreamInUse(uint32_t id);
+
+    bool waitForDone(nsecs_t timeout);
+
+    // TODO: Measure this
+    static const size_t kMaxJpegSize = 300000;
+
+  private:
+    Mutex mBusyMutex;
+    bool mIsBusy;
+    Condition mDone;
+
+    Mutex mMutex;
+
+    EmulatedFakeCamera2 *mParent;
+
+    Buffers *mBuffers;
+    nsecs_t mCaptureTime;
+
+    StreamBuffer mJpegBuffer, mAuxBuffer;
+    bool mFoundJpeg, mFoundAux;
+
+    jpeg_compress_struct mCInfo;
+
+    struct JpegError : public jpeg_error_mgr {
+        JpegCompressor *parent;
+    };
+    j_common_ptr mJpegErrorInfo;
+
+    struct JpegDestination : public jpeg_destination_mgr {
+        JpegCompressor *parent;
+    };
+
+    static void jpegErrorHandler(j_common_ptr cinfo);
+
+    static void jpegInitDestination(j_compress_ptr cinfo);
+    static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
+    static void jpegTermDestination(j_compress_ptr cinfo);
+
+    bool checkError(const char *msg);
+    void cleanUp();
+
+    /**
+     * Inherited Thread virtual overrides
+     */
+  private:
+    virtual status_t readyToRun();
+    virtual bool threadLoop();
+};
+
+} // namespace android
+
+#endif
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.cpp b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
new file mode 100644
index 0000000..ca50350
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
@@ -0,0 +1,459 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera_Scene"
+#include <utils/Log.h>
+#include <stdlib.h>
+
+#include "Scene.h"
+
+// TODO: This should probably be done host-side in OpenGL for speed and better
+// quality
+
+namespace android {
+
+// Define single-letter shortcuts for scene definition, for directly indexing
+// mCurrentColors
+#define G (Scene::GRASS * Scene::NUM_CHANNELS)
+#define S (Scene::GRASS_SHADOW * Scene::NUM_CHANNELS)
+#define H (Scene::HILL * Scene::NUM_CHANNELS)
+#define W (Scene::WALL * Scene::NUM_CHANNELS)
+#define R (Scene::ROOF * Scene::NUM_CHANNELS)
+#define D (Scene::DOOR * Scene::NUM_CHANNELS)
+#define C (Scene::CHIMNEY * Scene::NUM_CHANNELS)
+#define I (Scene::WINDOW * Scene::NUM_CHANNELS)
+#define U (Scene::SUN * Scene::NUM_CHANNELS)
+#define K (Scene::SKY * Scene::NUM_CHANNELS)
+#define M (Scene::MOON * Scene::NUM_CHANNELS)
+
+const int Scene::kSceneWidth = 20;
+const int Scene::kSceneHeight = 20;
+
+const uint8_t Scene::kScene[Scene::kSceneWidth * Scene::kSceneHeight] = {
+    //      5         10        15        20
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K, // 5
+    K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
+    K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,H,H,H,
+    K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,C,C,H,H,H,
+    K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,C,C,H,H,H,
+    H,K,K,K,K,K,H,R,R,R,R,R,R,R,R,R,R,R,R,H, // 10
+    H,K,K,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
+    H,H,H,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
+    H,H,H,K,K,H,H,H,W,W,W,W,W,W,W,W,W,W,H,H,
+    S,S,S,G,G,S,S,S,W,W,W,W,W,W,W,W,W,W,S,S,
+    S,G,G,G,G,S,S,S,W,I,I,W,D,D,W,I,I,W,S,S, // 15
+    G,G,G,G,G,G,S,S,W,I,I,W,D,D,W,I,I,W,S,S,
+    G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
+    G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
+    G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G,
+    G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G, // 20
+    //      5         10        15        20
+};
+
+#undef G
+#undef S
+#undef H
+#undef W
+#undef R
+#undef D
+#undef C
+#undef I
+#undef U
+#undef K
+#undef M
+
+Scene::Scene(
+    int sensorWidthPx,
+    int sensorHeightPx,
+    float sensorSensitivity):
+        mSensorWidth(sensorWidthPx),
+        mSensorHeight(sensorHeightPx),
+        mHour(12),
+        mExposureDuration(0.033f),
+        mSensorSensitivity(sensorSensitivity)
+{
+    // Map scene to sensor pixels
+    if (mSensorWidth > mSensorHeight) {
+        mMapDiv = (mSensorWidth / (kSceneWidth + 1) ) + 1;
+    } else {
+        mMapDiv = (mSensorHeight / (kSceneHeight + 1) ) + 1;
+    }
+    mOffsetX = (kSceneWidth * mMapDiv - mSensorWidth) / 2;
+    mOffsetY = (kSceneHeight * mMapDiv - mSensorHeight) / 2;
+
+    // Assume that sensor filters are sRGB primaries to start
+    mFilterR[0]  =  3.2406f; mFilterR[1]  = -1.5372f; mFilterR[2]  = -0.4986f;
+    mFilterGr[0] = -0.9689f; mFilterGr[1] =  1.8758f; mFilterGr[2] =  0.0415f;
+    mFilterGb[0] = -0.9689f; mFilterGb[1] =  1.8758f; mFilterGb[2] =  0.0415f;
+    mFilterB[0]  =  0.0557f; mFilterB[1]  = -0.2040f; mFilterB[2]  =  1.0570f;
+
+
+}
+
+Scene::~Scene() {
+}
+
+void Scene::setColorFilterXYZ(
+        float rX, float rY, float rZ,
+        float grX, float grY, float grZ,
+        float gbX, float gbY, float gbZ,
+        float bX, float bY, float bZ) {
+    mFilterR[0]  = rX;  mFilterR[1]  = rY;  mFilterR[2]  = rZ;
+    mFilterGr[0] = grX; mFilterGr[1] = grY; mFilterGr[2] = grZ;
+    mFilterGb[0] = gbX; mFilterGb[1] = gbY; mFilterGb[2] = gbZ;
+    mFilterB[0]  = bX;  mFilterB[1]  = bY;  mFilterB[2]  = bZ;
+}
+
+void Scene::setHour(int hour) {
+    ALOGV("Hour set to: %d", hour);
+    mHour = hour % 24;
+}
+
+int Scene::getHour() {
+    return mHour;
+}
+
+void Scene::setExposureDuration(float seconds) {
+    mExposureDuration = seconds;
+}
+
+void Scene::calculateScene(nsecs_t time) {
+    // Calculate time fractions for interpolation
+    int timeIdx = mHour / kTimeStep;
+    int nextTimeIdx = (timeIdx + 1) % (24 / kTimeStep);
+    const nsecs_t kOneHourInNsec = 1e9 * 60 * 60;
+    nsecs_t timeSinceIdx = (mHour - timeIdx * kTimeStep) * kOneHourInNsec + time;
+    float timeFrac = timeSinceIdx / (float)(kOneHourInNsec * kTimeStep);
+
+    // Determine overall sunlight levels
+    float sunLux =
+            kSunlight[timeIdx] * (1 - timeFrac) +
+            kSunlight[nextTimeIdx] * timeFrac;
+    ALOGV("Sun lux: %f", sunLux);
+
+    float sunShadeLux = sunLux * (kDaylightShadeIllum / kDirectSunIllum);
+
+    // Determine sun/shade illumination chromaticity
+    float currentSunXY[2];
+    float currentShadeXY[2];
+
+    const float *prevSunXY, *nextSunXY;
+    const float *prevShadeXY, *nextShadeXY;
+    if (kSunlight[timeIdx] == kSunsetIllum ||
+            kSunlight[timeIdx] == kTwilightIllum) {
+        prevSunXY = kSunsetXY;
+        prevShadeXY = kSunsetXY;
+    } else {
+        prevSunXY = kDirectSunlightXY;
+        prevShadeXY = kDaylightXY;
+    }
+    if (kSunlight[nextTimeIdx] == kSunsetIllum ||
+            kSunlight[nextTimeIdx] == kTwilightIllum) {
+        nextSunXY = kSunsetXY;
+        nextShadeXY = kSunsetXY;
+    } else {
+        nextSunXY = kDirectSunlightXY;
+        nextShadeXY = kDaylightXY;
+    }
+    currentSunXY[0] = prevSunXY[0] * (1 - timeFrac) +
+            nextSunXY[0] * timeFrac;
+    currentSunXY[1] = prevSunXY[1] * (1 - timeFrac) +
+            nextSunXY[1] * timeFrac;
+
+    currentShadeXY[0] = prevShadeXY[0] * (1 - timeFrac) +
+            nextShadeXY[0] * timeFrac;
+    currentShadeXY[1] = prevShadeXY[1] * (1 - timeFrac) +
+            nextShadeXY[1] * timeFrac;
+
+    ALOGV("Sun XY: %f, %f, Shade XY: %f, %f",
+            currentSunXY[0], currentSunXY[1],
+            currentShadeXY[0], currentShadeXY[1]);
+
+    // Converting for xyY to XYZ:
+    // X = Y / y * x
+    // Y = Y
+    // Z = Y / y * (1 - x - y);
+    float sunXYZ[3] = {
+        sunLux / currentSunXY[1] * currentSunXY[0],
+        sunLux,
+        sunLux / currentSunXY[1] *
+        (1 - currentSunXY[0] - currentSunXY[1])
+    };
+    float sunShadeXYZ[3] = {
+        sunShadeLux / currentShadeXY[1] * currentShadeXY[0],
+        sunShadeLux,
+        sunShadeLux / currentShadeXY[1] *
+        (1 - currentShadeXY[0] - currentShadeXY[1])
+    };
+    ALOGV("Sun XYZ: %f, %f, %f",
+            sunXYZ[0], sunXYZ[1], sunXYZ[2]);
+    ALOGV("Sun shade XYZ: %f, %f, %f",
+            sunShadeXYZ[0], sunShadeXYZ[1], sunShadeXYZ[2]);
+
+    // Determine moonlight levels
+    float moonLux =
+            kMoonlight[timeIdx] * (1 - timeFrac) +
+            kMoonlight[nextTimeIdx] * timeFrac;
+    float moonShadeLux = moonLux * (kDaylightShadeIllum / kDirectSunIllum);
+
+    float moonXYZ[3] = {
+        moonLux / kMoonlightXY[1] * kMoonlightXY[0],
+        moonLux,
+        moonLux / kMoonlightXY[1] *
+        (1 - kMoonlightXY[0] - kMoonlightXY[1])
+    };
+    float moonShadeXYZ[3] = {
+        moonShadeLux / kMoonlightXY[1] * kMoonlightXY[0],
+        moonShadeLux,
+        moonShadeLux / kMoonlightXY[1] *
+        (1 - kMoonlightXY[0] - kMoonlightXY[1])
+    };
+
+    // Determine starlight level
+    const float kClearNightXYZ[3] = {
+        kClearNightIllum / kMoonlightXY[1] * kMoonlightXY[0],
+        kClearNightIllum,
+        kClearNightIllum / kMoonlightXY[1] *
+            (1 - kMoonlightXY[0] - kMoonlightXY[1])
+    };
+
+    // Calculate direct and shaded light
+    float directIllumXYZ[3] = {
+        sunXYZ[0] + moonXYZ[0] + kClearNightXYZ[0],
+        sunXYZ[1] + moonXYZ[1] + kClearNightXYZ[1],
+        sunXYZ[2] + moonXYZ[2] + kClearNightXYZ[2],
+    };
+
+    float shadeIllumXYZ[3] = {
+        kClearNightXYZ[0],
+        kClearNightXYZ[1],
+        kClearNightXYZ[2]
+    };
+
+    shadeIllumXYZ[0] += (mHour < kSunOverhead) ? sunXYZ[0] : sunShadeXYZ[0];
+    shadeIllumXYZ[1] += (mHour < kSunOverhead) ? sunXYZ[1] : sunShadeXYZ[1];
+    shadeIllumXYZ[2] += (mHour < kSunOverhead) ? sunXYZ[2] : sunShadeXYZ[2];
+
+    // Moon up period covers 23->0 transition, shift for simplicity
+    int adjHour = (mHour + 12) % 24;
+    int adjMoonOverhead = (kMoonOverhead + 12 ) % 24;
+    shadeIllumXYZ[0] += (adjHour < adjMoonOverhead) ?
+            moonXYZ[0] : moonShadeXYZ[0];
+    shadeIllumXYZ[1] += (adjHour < adjMoonOverhead) ?
+            moonXYZ[1] : moonShadeXYZ[1];
+    shadeIllumXYZ[2] += (adjHour < adjMoonOverhead) ?
+            moonXYZ[2] : moonShadeXYZ[2];
+
+    ALOGV("Direct XYZ: %f, %f, %f",
+            directIllumXYZ[0],directIllumXYZ[1],directIllumXYZ[2]);
+    ALOGV("Shade XYZ: %f, %f, %f",
+            shadeIllumXYZ[0], shadeIllumXYZ[1], shadeIllumXYZ[2]);
+
+    for (int i = 0; i < NUM_MATERIALS; i++) {
+        // Converting for xyY to XYZ:
+        // X = Y / y * x
+        // Y = Y
+        // Z = Y / y * (1 - x - y);
+        float matXYZ[3] = {
+            kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
+              kMaterials_xyY[i][0],
+            kMaterials_xyY[i][2],
+            kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
+              (1 - kMaterials_xyY[i][0] - kMaterials_xyY[i][1])
+        };
+
+        if (kMaterialsFlags[i] == 0 || kMaterialsFlags[i] & kSky) {
+            matXYZ[0] *= directIllumXYZ[0];
+            matXYZ[1] *= directIllumXYZ[1];
+            matXYZ[2] *= directIllumXYZ[2];
+        } else if (kMaterialsFlags[i] & kShadowed) {
+            matXYZ[0] *= shadeIllumXYZ[0];
+            matXYZ[1] *= shadeIllumXYZ[1];
+            matXYZ[2] *= shadeIllumXYZ[2];
+        } // else if (kMaterialsFlags[i] * kSelfLit), do nothing
+
+        ALOGV("Mat %d XYZ: %f, %f, %f", i, matXYZ[0], matXYZ[1], matXYZ[2]);
+        float luxToElectrons = mSensorSensitivity * mExposureDuration /
+                (kAperture * kAperture);
+        mCurrentColors[i*NUM_CHANNELS + 0] =
+                (mFilterR[0] * matXYZ[0] +
+                 mFilterR[1] * matXYZ[1] +
+                 mFilterR[2] * matXYZ[2])
+                * luxToElectrons;
+        mCurrentColors[i*NUM_CHANNELS + 1] =
+                (mFilterGr[0] * matXYZ[0] +
+                 mFilterGr[1] * matXYZ[1] +
+                 mFilterGr[2] * matXYZ[2])
+                * luxToElectrons;
+        mCurrentColors[i*NUM_CHANNELS + 2] =
+                (mFilterGb[0] * matXYZ[0] +
+                 mFilterGb[1] * matXYZ[1] +
+                 mFilterGb[2] * matXYZ[2])
+                * luxToElectrons;
+        mCurrentColors[i*NUM_CHANNELS + 3] =
+                (mFilterB[0] * matXYZ[0] +
+                 mFilterB[1] * matXYZ[1] +
+                 mFilterB[2] * matXYZ[2])
+                * luxToElectrons;
+
+        ALOGV("Color %d RGGB: %d, %d, %d, %d", i,
+                mCurrentColors[i*NUM_CHANNELS + 0],
+                mCurrentColors[i*NUM_CHANNELS + 1],
+                mCurrentColors[i*NUM_CHANNELS + 2],
+                mCurrentColors[i*NUM_CHANNELS + 3]);
+    }
+    // Shake viewpoint
+    mHandshakeX = rand() % mMapDiv/4 - mMapDiv/8;
+    mHandshakeY = rand() % mMapDiv/4 - mMapDiv/8;
+    // Set starting pixel
+    setReadoutPixel(0,0);
+}
+
+void Scene::setReadoutPixel(int x, int y) {
+    mCurrentX = x;
+    mCurrentY = y;
+    mSubX = (x + mOffsetX + mHandshakeX) % mMapDiv;
+    mSubY = (y + mOffsetY + mHandshakeY) % mMapDiv;
+    mSceneX = (x + mOffsetX + mHandshakeX) / mMapDiv;
+    mSceneY = (y + mOffsetY + mHandshakeY) / mMapDiv;
+    mSceneIdx = mSceneY * kSceneWidth + mSceneX;
+    mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
+}
+
+const uint32_t* Scene::getPixelElectrons() {
+    const uint32_t *pixel = mCurrentSceneMaterial;
+    mCurrentX++;
+    mSubX++;
+    if (mCurrentX >= mSensorWidth) {
+        mCurrentX = 0;
+        mCurrentY++;
+        if (mCurrentY >= mSensorHeight) mCurrentY = 0;
+        setReadoutPixel(mCurrentX, mCurrentY);
+    } else if (mSubX > mMapDiv) {
+        mSceneIdx++;
+        mSceneX++;
+        mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
+        mSubX = 0;
+    }
+    return pixel;
+}
+
+// RGB->YUV, Jpeg standard
+const float Scene::kRgb2Yuv[12] = {
+       0.299f,    0.587f,    0.114f,    0.f,
+    -0.16874f, -0.33126f,      0.5f, -128.f,
+         0.5f, -0.41869f, -0.08131f, -128.f,
+};
+
+// Aperture of imaging lens
+const float Scene::kAperture = 2.8;
+
+// Sun illumination levels through the day
+const float Scene::kSunlight[24/kTimeStep] =
+{
+    0, // 00:00
+    0,
+    0,
+    kTwilightIllum, // 06:00
+    kDirectSunIllum,
+    kDirectSunIllum,
+    kDirectSunIllum, // 12:00
+    kDirectSunIllum,
+    kDirectSunIllum,
+    kSunsetIllum, // 18:00
+    kTwilightIllum,
+    0
+};
+
+// Moon illumination levels through the day
+const float Scene::kMoonlight[24/kTimeStep] =
+{
+    kFullMoonIllum, // 00:00
+    kFullMoonIllum,
+    0,
+    0, // 06:00
+    0,
+    0,
+    0, // 12:00
+    0,
+    0,
+    0, // 18:00
+    0,
+    kFullMoonIllum
+};
+
+const int Scene::kSunOverhead = 12;
+const int Scene::kMoonOverhead = 0;
+
+// Used for sun illumination levels
+const float Scene::kDirectSunIllum     = 100000;
+const float Scene::kSunsetIllum        = 400;
+const float Scene::kTwilightIllum      = 4;
+// Used for moon illumination levels
+const float Scene::kFullMoonIllum      = 1;
+// Other illumination levels
+const float Scene::kDaylightShadeIllum = 20000;
+const float Scene::kClearNightIllum    = 2e-3;
+const float Scene::kStarIllum          = 2e-6;
+const float Scene::kLivingRoomIllum    = 50;
+
+const float Scene::kIncandescentXY[2]   = { 0.44757f, 0.40745f};
+const float Scene::kDirectSunlightXY[2] = { 0.34842f, 0.35161f};
+const float Scene::kDaylightXY[2]       = { 0.31271f, 0.32902f};
+const float Scene::kNoonSkyXY[2]        = { 0.346f,   0.359f};
+const float Scene::kMoonlightXY[2]      = { 0.34842f, 0.35161f};
+const float Scene::kSunsetXY[2]         = { 0.527f,   0.413f};
+
+const uint8_t Scene::kSelfLit  = 0x01;
+const uint8_t Scene::kShadowed = 0x02;
+const uint8_t Scene::kSky      = 0x04;
+
+// For non-self-lit materials, the Y component is normalized with 1=full
+// reflectance; for self-lit materials, it's the constant illuminance in lux.
+const float Scene::kMaterials_xyY[Scene::NUM_MATERIALS][3] = {
+    { 0.3688f, 0.4501f, .1329f }, // GRASS
+    { 0.3688f, 0.4501f, .1329f }, // GRASS_SHADOW
+    { 0.3986f, 0.5002f, .4440f }, // HILL
+    { 0.3262f, 0.5040f, .2297f }, // WALL
+    { 0.4336f, 0.3787f, .1029f }, // ROOF
+    { 0.3316f, 0.2544f, .0639f }, // DOOR
+    { 0.3425f, 0.3577f, .0887f }, // CHIMNEY
+    { kIncandescentXY[0], kIncandescentXY[1], kLivingRoomIllum }, // WINDOW
+    { kDirectSunlightXY[0], kDirectSunlightXY[1], kDirectSunIllum }, // SUN
+    { kNoonSkyXY[0], kNoonSkyXY[1], kDaylightShadeIllum / kDirectSunIllum }, // SKY
+    { kMoonlightXY[0], kMoonlightXY[1], kFullMoonIllum } // MOON
+};
+
+const uint8_t Scene::kMaterialsFlags[Scene::NUM_MATERIALS] = {
+    0,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kShadowed,
+    kSelfLit,
+    kSelfLit,
+    kSky,
+    kSelfLit,
+};
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.h b/tools/emulator/system/camera/fake-pipeline2/Scene.h
new file mode 100644
index 0000000..687e427
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.h
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The Scene class implements a simple physical simulation of a scene, using the
+ * CIE 1931 colorspace to represent light in physical units (lux).
+ *
+ * It's fairly approximate, but does provide a scene with realistic widely
+ * variable illumination levels and colors over time.
+ *
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_SCENE_H
+#define HW_EMULATOR_CAMERA2_SCENE_H
+
+#include "utils/Timers.h"
+
+namespace android {
+
+class Scene {
+  public:
+    Scene(int sensorWidthPx,
+            int sensorHeightPx,
+            float sensorSensitivity);
+    ~Scene();
+
+    // Set the filter coefficients for the red, green, and blue filters on the
+    // sensor. Used as an optimization to pre-calculate various illuminance
+    // values. Two different green filters can be provided, to account for
+    // possible cross-talk on a Bayer sensor. Must be called before
+    // calculateScene.
+    void setColorFilterXYZ(
+        float rX, float rY, float rZ,
+        float grX, float grY, float grZ,
+        float gbX, float gbY, float gbZ,
+        float bX, float bY, float bZ);
+
+    // Set time of day (24-hour clock). This controls the general light levels
+    // in the scene. Must be called before calculateScene
+    void setHour(int hour);
+    // Get current hour
+    int getHour();
+
+    // Set the duration of exposure for determining luminous exposure.
+    // Must be called before calculateScene
+    void setExposureDuration(float seconds);
+
+    // Calculate scene information for current hour and the time offset since
+    // the hour. Must be called at least once before calling getLuminousExposure.
+    // Resets pixel readout location to 0,0
+    void calculateScene(nsecs_t time);
+
+    // Set sensor pixel readout location.
+    void setReadoutPixel(int x, int y);
+
+    // Get sensor response in physical units (electrons) for light hitting the
+    // current readout pixel, after passing through color filters. The readout
+    // pixel will be auto-incremented. The returned array can be indexed with
+    // ColorChannels.
+    const uint32_t* getPixelElectrons();
+
+    enum ColorChannels {
+        R = 0,
+        Gr,
+        Gb,
+        B,
+        Y,
+        Cb,
+        Cr,
+        NUM_CHANNELS
+    };
+
+  private:
+    // Sensor color filtering coefficients in XYZ
+    float mFilterR[3];
+    float mFilterGr[3];
+    float mFilterGb[3];
+    float mFilterB[3];
+
+    int mOffsetX, mOffsetY;
+    int mMapDiv;
+
+    int mHandshakeX, mHandshakeY;
+
+    int mSensorWidth;
+    int mSensorHeight;
+    int mCurrentX;
+    int mCurrentY;
+    int mSubX;
+    int mSubY;
+    int mSceneX;
+    int mSceneY;
+    int mSceneIdx;
+    uint32_t *mCurrentSceneMaterial;
+
+    int mHour;
+    float mExposureDuration;
+    float mSensorSensitivity;
+
+    enum Materials {
+        GRASS = 0,
+        GRASS_SHADOW,
+        HILL,
+        WALL,
+        ROOF,
+        DOOR,
+        CHIMNEY,
+        WINDOW,
+        SUN,
+        SKY,
+        MOON,
+        NUM_MATERIALS
+    };
+
+    uint32_t mCurrentColors[NUM_MATERIALS*NUM_CHANNELS];
+
+    /**
+     * Constants for scene definition. These are various degrees of approximate.
+     */
+
+    // RGB->YUV conversion
+    static const float kRgb2Yuv[12];
+
+    // Aperture of imaging lens
+    static const float kAperture;
+
+    // Sun, moon illuminance levels in 2-hour increments. These don't match any
+    // real day anywhere.
+    static const uint32_t kTimeStep = 2;
+    static const float kSunlight[];
+    static const float kMoonlight[];
+    static const int kSunOverhead;
+    static const int kMoonOverhead;
+
+    // Illumination levels for various conditions, in lux
+    static const float kDirectSunIllum;
+    static const float kDaylightShadeIllum;
+    static const float kSunsetIllum;
+    static const float kTwilightIllum;
+    static const float kFullMoonIllum;
+    static const float kClearNightIllum;
+    static const float kStarIllum;
+    static const float kLivingRoomIllum;
+
+    // Chromaticity of various illumination sources
+    static const float kIncandescentXY[2];
+    static const float kDirectSunlightXY[2];
+    static const float kDaylightXY[2];
+    static const float kNoonSkyXY[2];
+    static const float kMoonlightXY[2];
+    static const float kSunsetXY[2];
+
+    static const uint8_t kSelfLit;
+    static const uint8_t kShadowed;
+    static const uint8_t kSky;
+
+    static const float kMaterials_xyY[NUM_MATERIALS][3];
+    static const uint8_t kMaterialsFlags[NUM_MATERIALS];
+
+    static const int kSceneWidth;
+    static const int kSceneHeight;
+    static const uint8_t kScene[];
+};
+
+}
+
+#endif // HW_EMULATOR_CAMERA2_SCENE_H
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
new file mode 100644
index 0000000..5eff98d
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
@@ -0,0 +1,488 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+#define LOG_TAG "EmulatedCamera2_Sensor"
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+
+#include "../EmulatedFakeCamera2.h"
+#include "Sensor.h"
+#include <cmath>
+#include <cstdlib>
+#include "system/camera_metadata.h"
+
+namespace android {
+
+const unsigned int Sensor::kResolution[2]  = {640, 480};
+
+const nsecs_t Sensor::kExposureTimeRange[2] =
+    {1000L, 30000000000L} ; // 1 us - 30 sec
+const nsecs_t Sensor::kFrameDurationRange[2] =
+    {33331760L, 30000000000L}; // ~1/30 s - 30 sec
+const nsecs_t Sensor::kMinVerticalBlank = 10000L;
+
+const uint8_t Sensor::kColorFilterArrangement = ANDROID_SENSOR_RGGB;
+
+// Output image data characteristics
+const uint32_t Sensor::kMaxRawValue = 4000;
+const uint32_t Sensor::kBlackLevel  = 1000;
+
+// Sensor sensitivity
+const float Sensor::kSaturationVoltage      = 0.520f;
+const uint32_t Sensor::kSaturationElectrons = 2000;
+const float Sensor::kVoltsPerLuxSecond      = 0.100f;
+
+const float Sensor::kElectronsPerLuxSecond =
+        Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
+        * Sensor::kVoltsPerLuxSecond;
+
+const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
+            Sensor::kSaturationElectrons;
+
+const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
+const float Sensor::kReadNoiseStddevAfterGain =  2.100; // in digital counts
+const float Sensor::kReadNoiseVarBeforeGain =
+            Sensor::kReadNoiseStddevBeforeGain *
+            Sensor::kReadNoiseStddevBeforeGain;
+const float Sensor::kReadNoiseVarAfterGain =
+            Sensor::kReadNoiseStddevAfterGain *
+            Sensor::kReadNoiseStddevAfterGain;
+
+// While each row has to read out, reset, and then expose, the (reset +
+// expose) sequence can be overlapped by other row readouts, so the final
+// minimum frame duration is purely a function of row readout time, at least
+// if there's a reasonable number of rows.
+const nsecs_t Sensor::kRowReadoutTime =
+            Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
+
+const uint32_t Sensor::kAvailableSensitivities[5] =
+    {100, 200, 400, 800, 1600};
+const uint32_t Sensor::kDefaultSensitivity = 100;
+
+/** A few utility functions for math, normal distributions */
+
+// Take advantage of IEEE floating-point format to calculate an approximate
+// square root. Accurate to within +-3.6%
+float sqrtf_approx(float r) {
+    // Modifier is based on IEEE floating-point representation; the
+    // manipulations boil down to finding approximate log2, dividing by two, and
+    // then inverting the log2. A bias is added to make the relative error
+    // symmetric about the real answer.
+    const int32_t modifier = 0x1FBB4000;
+
+    int32_t r_i = *(int32_t*)(&r);
+    r_i = (r_i >> 1) + modifier;
+
+    return *(float*)(&r_i);
+}
+
+
+
+Sensor::Sensor(EmulatedFakeCamera2 *parent):
+        Thread(false),
+        mParent(parent),
+        mGotVSync(false),
+        mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
+        mFrameDuration(kFrameDurationRange[0]),
+        mGainFactor(kDefaultSensitivity),
+        mNextBuffers(NULL),
+        mCapturedBuffers(NULL),
+        mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
+{
+
+}
+
+Sensor::~Sensor() {
+    shutDown();
+}
+
+status_t Sensor::startUp() {
+    ALOGV("%s: E", __FUNCTION__);
+
+    int res;
+    mCapturedBuffers = NULL;
+    res = run("EmulatedFakeCamera2::Sensor",
+            ANDROID_PRIORITY_URGENT_DISPLAY);
+
+    if (res != OK) {
+        ALOGE("Unable to start up sensor capture thread: %d", res);
+    }
+    return res;
+}
+
+status_t Sensor::shutDown() {
+    ALOGV("%s: E", __FUNCTION__);
+
+    int res;
+    res = requestExitAndWait();
+    if (res != OK) {
+        ALOGE("Unable to shut down sensor capture thread: %d", res);
+    }
+    return res;
+}
+
+Scene &Sensor::getScene() {
+    return mScene;
+}
+
+void Sensor::setExposureTime(uint64_t ns) {
+    Mutex::Autolock lock(mControlMutex);
+    ALOGVV("Exposure set to %f", ns/1000000.f);
+    mExposureTime = ns;
+}
+
+void Sensor::setFrameDuration(uint64_t ns) {
+    Mutex::Autolock lock(mControlMutex);
+    ALOGVV("Frame duration set to %f", ns/1000000.f);
+    mFrameDuration = ns;
+}
+
+void Sensor::setSensitivity(uint32_t gain) {
+    Mutex::Autolock lock(mControlMutex);
+    ALOGVV("Gain set to %d", gain);
+    mGainFactor = gain;
+}
+
+void Sensor::setDestinationBuffers(Buffers *buffers) {
+    Mutex::Autolock lock(mControlMutex);
+    mNextBuffers = buffers;
+}
+
+bool Sensor::waitForVSync(nsecs_t reltime) {
+    int res;
+    Mutex::Autolock lock(mControlMutex);
+
+    mGotVSync = false;
+    res = mVSync.waitRelative(mControlMutex, reltime);
+    if (res != OK && res != TIMED_OUT) {
+        ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
+        return false;
+    }
+    return mGotVSync;
+}
+
+bool Sensor::waitForNewFrame(nsecs_t reltime,
+        nsecs_t *captureTime) {
+    Mutex::Autolock lock(mReadoutMutex);
+    uint8_t *ret;
+    if (mCapturedBuffers == NULL) {
+        int res;
+        res = mReadoutComplete.waitRelative(mReadoutMutex, reltime);
+        if (res == TIMED_OUT) {
+            return false;
+        } else if (res != OK || mCapturedBuffers == NULL) {
+            ALOGE("Error waiting for sensor readout signal: %d", res);
+            return false;
+        }
+    }
+    *captureTime = mCaptureTime;
+    mCapturedBuffers = NULL;
+    return true;
+}
+
+status_t Sensor::readyToRun() {
+    ALOGV("Starting up sensor thread");
+    mStartupTime = systemTime();
+    mNextCaptureTime = 0;
+    mNextCapturedBuffers = NULL;
+    return OK;
+}
+
+bool Sensor::threadLoop() {
+    /**
+     * Sensor capture operation main loop.
+     *
+     * Stages are out-of-order relative to a single frame's processing, but
+     * in-order in time.
+     */
+
+    /**
+     * Stage 1: Read in latest control parameters
+     */
+    uint64_t exposureDuration;
+    uint64_t frameDuration;
+    uint32_t gain;
+    Buffers *nextBuffers;
+    {
+        Mutex::Autolock lock(mControlMutex);
+        exposureDuration = mExposureTime;
+        frameDuration    = mFrameDuration;
+        gain             = mGainFactor;
+        nextBuffers      = mNextBuffers;
+        // Don't reuse a buffer set
+        mNextBuffers = NULL;
+
+        // Signal VSync for start of readout
+        ALOGVV("Sensor VSync");
+        mGotVSync = true;
+        mVSync.signal();
+    }
+
+    /**
+     * Stage 3: Read out latest captured image
+     */
+
+    Buffers *capturedBuffers = NULL;
+    nsecs_t captureTime = 0;
+
+    nsecs_t startRealTime  = systemTime();
+    // Stagefright cares about system time for timestamps, so base simulated
+    // time on that.
+    nsecs_t simulatedTime    = startRealTime;
+    nsecs_t frameEndRealTime = startRealTime + frameDuration;
+    nsecs_t frameReadoutEndRealTime = startRealTime +
+            kRowReadoutTime * kResolution[1];
+
+    if (mNextCapturedBuffers != NULL) {
+        ALOGVV("Sensor starting readout");
+        // Pretend we're doing readout now; will signal once enough time has elapsed
+        capturedBuffers = mNextCapturedBuffers;
+        captureTime    = mNextCaptureTime;
+    }
+    simulatedTime += kRowReadoutTime + kMinVerticalBlank;
+
+    // TODO: Move this signal to another thread to simulate readout
+    // time properly
+    if (capturedBuffers != NULL) {
+        ALOGVV("Sensor readout complete");
+        Mutex::Autolock lock(mReadoutMutex);
+        mCapturedBuffers = capturedBuffers;
+        mCaptureTime = captureTime;
+        mReadoutComplete.signal();
+        capturedBuffers = NULL;
+    }
+
+    /**
+     * Stage 2: Capture new image
+     */
+
+    mNextCaptureTime = simulatedTime;
+    mNextCapturedBuffers = nextBuffers;
+
+    if (mNextCapturedBuffers != NULL) {
+        ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
+                (float)exposureDuration/1e6, gain);
+        mScene.setExposureDuration((float)exposureDuration/1e9);
+        mScene.calculateScene(mNextCaptureTime);
+        for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
+            const StreamBuffer &b = (*mNextCapturedBuffers)[i];
+            ALOGVV("Sensor capturing buffer %d: stream %d,"
+                    " %d x %d, format %x, stride %d, buf %p, img %p",
+                    i, b.streamId, b.width, b.height, b.format, b.stride,
+                    b.buffer, b.img);
+            switch(b.format) {
+                case HAL_PIXEL_FORMAT_RAW_SENSOR:
+                    captureRaw(b.img, gain, b.stride);
+                    break;
+                case HAL_PIXEL_FORMAT_RGBA_8888:
+                    captureRGBA(b.img, gain, b.stride);
+                    break;
+                case HAL_PIXEL_FORMAT_BLOB:
+                    // Add auxillary buffer of the right size
+                    // Assumes only one BLOB (JPEG) buffer in
+                    // mNextCapturedBuffers
+                    StreamBuffer bAux;
+                    bAux.streamId = -1;
+                    bAux.width = b.width;
+                    bAux.height = b.height;
+                    bAux.format = HAL_PIXEL_FORMAT_RGB_888;
+                    bAux.stride = b.width;
+                    bAux.buffer = NULL;
+                    // TODO: Reuse these
+                    bAux.img = new uint8_t[b.width * b.height * 3];
+                    captureRGB(bAux.img, gain, b.stride);
+                    mNextCapturedBuffers->push_back(bAux);
+                    break;
+                case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+                    captureNV21(b.img, gain, b.stride);
+                    break;
+                case HAL_PIXEL_FORMAT_YV12:
+                    // TODO:
+                    ALOGE("%s: Format %x is TODO", __FUNCTION__, b.format);
+                    break;
+                default:
+                    ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
+                            b.format);
+                    break;
+            }
+        }
+    }
+
+    ALOGVV("Sensor vertical blanking interval");
+    nsecs_t workDoneRealTime = systemTime();
+    const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
+    if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
+        timespec t;
+        t.tv_sec = (frameEndRealTime - workDoneRealTime)  / 1000000000L;
+        t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
+
+        int ret;
+        do {
+            ret = nanosleep(&t, &t);
+        } while (ret != 0);
+    }
+    nsecs_t endRealTime = systemTime();
+    ALOGVV("Frame cycle took %d ms, target %d ms",
+            (int)((endRealTime - startRealTime)/1000000),
+            (int)(frameDuration / 1000000));
+    return true;
+};
+
+void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    float noiseVarGain =  totalGain * totalGain;
+    float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+            + kReadNoiseVarAfterGain;
+
+    int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
+    mScene.setReadoutPixel(0,0);
+    for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+        int *bayerRow = bayerSelect + (y & 0x1) * 2;
+        uint16_t *px = (uint16_t*)img + y * stride;
+        for (unsigned int x = 0; x < kResolution[0]; x++) {
+            uint32_t electronCount;
+            electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
+
+            // TODO: Better pixel saturation curve?
+            electronCount = (electronCount < kSaturationElectrons) ?
+                    electronCount : kSaturationElectrons;
+
+            // TODO: Better A/D saturation curve?
+            uint16_t rawCount = electronCount * totalGain;
+            rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
+
+            // Calculate noise value
+            // TODO: Use more-correct Gaussian instead of uniform noise
+            float photonNoiseVar = electronCount * noiseVarGain;
+            float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
+            // Scaled to roughly match gaussian/uniform noise stddev
+            float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
+
+            rawCount += kBlackLevel;
+            rawCount += noiseStddev * noiseSample;
+
+            *px++ = rawCount;
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("Raw sensor image captured");
+}
+
+void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    uint32_t inc = kResolution[0] / stride;
+
+    for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
+        uint8_t *px = img + outY * stride * 4;
+        mScene.setReadoutPixel(0, y);
+        for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
+            uint32_t rCount, gCount, bCount;
+            // TODO: Perfect demosaicing is a cheat
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            rCount = pixel[Scene::R]  * scale64x;
+            gCount = pixel[Scene::Gr] * scale64x;
+            bCount = pixel[Scene::B]  * scale64x;
+
+            *px++ = rCount < 255*64 ? rCount / 64 : 255;
+            *px++ = gCount < 255*64 ? gCount / 64 : 255;
+            *px++ = bCount < 255*64 ? bCount / 64 : 255;
+            *px++ = 255;
+            for (unsigned int j = 1; j < inc; j++)
+                mScene.getPixelElectrons();
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("RGBA sensor image captured");
+}
+
+void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    uint32_t inc = kResolution[0] / stride;
+
+    for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
+        mScene.setReadoutPixel(0, y);
+        uint8_t *px = img + outY * stride * 3;
+        for (unsigned int x = 0; x < kResolution[0]; x += inc) {
+            uint32_t rCount, gCount, bCount;
+            // TODO: Perfect demosaicing is a cheat
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            rCount = pixel[Scene::R]  * scale64x;
+            gCount = pixel[Scene::Gr] * scale64x;
+            bCount = pixel[Scene::B]  * scale64x;
+
+            *px++ = rCount < 255*64 ? rCount / 64 : 255;
+            *px++ = gCount < 255*64 ? gCount / 64 : 255;
+            *px++ = bCount < 255*64 ? bCount / 64 : 255;
+            for (unsigned int j = 1; j < inc; j++)
+                mScene.getPixelElectrons();
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+    }
+    ALOGVV("RGB sensor image captured");
+}
+
+void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+
+    // TODO: Make full-color
+    uint32_t inc = kResolution[0] / stride;
+    uint32_t outH = kResolution[1] / inc;
+    for (unsigned int y = 0, outY = 0, outUV = outH;
+         y < kResolution[1]; y+=inc, outY++, outUV ) {
+        uint8_t *pxY = img + outY * stride;
+        mScene.setReadoutPixel(0,y);
+        for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
+            uint32_t rCount, gCount, bCount;
+            // TODO: Perfect demosaicing is a cheat
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            rCount = pixel[Scene::R]  * scale64x;
+            gCount = pixel[Scene::Gr] * scale64x;
+            bCount = pixel[Scene::B]  * scale64x;
+            uint32_t avg = (rCount + gCount + bCount) / 3;
+            *pxY++ = avg < 255*64 ? avg / 64 : 255;
+            for (unsigned int j = 1; j < inc; j++)
+                mScene.getPixelElectrons();
+        }
+    }
+    for (unsigned int y = 0, outY = outH; y < kResolution[1]/2; y+=inc, outY++) {
+        uint8_t *px = img + outY * stride;
+        for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
+            // UV to neutral
+            *px++ = 128;
+            *px++ = 128;
+        }
+    }
+    ALOGVV("NV21 sensor image captured");
+}
+
+} // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.h b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
new file mode 100644
index 0000000..2919be4
--- /dev/null
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This class is a simple simulation of a typical CMOS cellphone imager chip,
+ * which outputs 12-bit Bayer-mosaic raw images.
+ *
+ * The sensor is abstracted as operating as a pipeline 3 stages deep;
+ * conceptually, each frame to be captured goes through these three stages. The
+ * processing step for the sensor is marked off by vertical sync signals, which
+ * indicate the start of readout of the oldest frame. The interval between
+ * processing steps depends on the frame duration of the frame currently being
+ * captured. The stages are 1) configure, 2) capture, and 3) readout. During
+ * configuration, the sensor's registers for settings such as exposure time,
+ * frame duration, and gain are set for the next frame to be captured. In stage
+ * 2, the image data for the frame is actually captured by the sensor. Finally,
+ * in stage 3, the just-captured data is read out and sent to the rest of the
+ * system.
+ *
+ * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
+ * sensor are exposed earlier in time than larger-numbered rows, with the time
+ * offset between each row being equal to the row readout time.
+ *
+ * The characteristics of this sensor don't correspond to any actual sensor,
+ * but are not far off typical sensors.
+ *
+ * Example timing diagram, with three frames:
+ *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
+ *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
+ * Legend:
+ *   C = update sensor registers for frame
+ *   v = row in reset (vertical blanking interval)
+ *   E = row capturing image data
+ *   R = row being read out
+ *   | = vertical sync signal
+ *time(ms)|   0          55        105       155            230     270
+ * Frame 0|   :configure : capture : readout :              :       :
+ *  Row # | ..|CCCC______|_________|_________|              :       :
+ *      0 |   :\          \vvvvvEEEER         \             :       :
+ *    500 |   : \          \vvvvvEEEER         \            :       :
+ *   1000 |   :  \          \vvvvvEEEER         \           :       :
+ *   1500 |   :   \          \vvvvvEEEER         \          :       :
+ *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
+ * Frame 1|   :           configure  capture      readout   :       :
+ *  Row # |   :          |CCCC_____|_________|______________|       :
+ *      0 |   :          :\         \vvvvvEEEER              \      :
+ *    500 |   :          : \         \vvvvvEEEER              \     :
+ *   1000 |   :          :  \         \vvvvvEEEER              \    :
+ *   1500 |   :          :   \         \vvvvvEEEER              \   :
+ *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
+ * Frame 2|   :          :          configure     capture    readout:
+ *  Row # |   :          :         |CCCC_____|______________|_______|...
+ *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
+ *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
+ *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
+ *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
+ *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
+ */
+
+#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
+#define HW_EMULATOR_CAMERA2_SENSOR_H
+
+#include "utils/Thread.h"
+#include "utils/Mutex.h"
+#include "utils/Timers.h"
+
+#include "Scene.h"
+#include "Base.h"
+
+namespace android {
+
+class EmulatedFakeCamera2;
+
+class Sensor: private Thread, public virtual RefBase {
+  public:
+
+    Sensor(EmulatedFakeCamera2 *parent);
+    ~Sensor();
+
+    /*
+     * Power control
+     */
+
+    status_t startUp();
+    status_t shutDown();
+
+    /*
+     * Access to scene
+     */
+    Scene &getScene();
+
+    /*
+     * Controls that can be updated every frame
+     */
+
+    void setExposureTime(uint64_t ns);
+    void setFrameDuration(uint64_t ns);
+    void setSensitivity(uint32_t gain);
+    // Buffer must be at least stride*height*2 bytes in size
+    void setDestinationBuffers(Buffers *buffers);
+
+    /*
+     * Controls that cause reconfiguration delay
+     */
+
+    void setBinning(int horizontalFactor, int verticalFactor);
+
+    /*
+     * Synchronizing with sensor operation (vertical sync)
+     */
+
+    // Wait until the sensor outputs its next vertical sync signal, meaning it
+    // is starting readout of its latest frame of data. Returns true if vertical
+    // sync is signaled, false if the wait timed out.
+    bool waitForVSync(nsecs_t reltime);
+
+    // Wait until a new frame has been read out, and then return the time
+    // capture started.  May return immediately if a new frame has been pushed
+    // since the last wait for a new frame. Returns true if new frame is
+    // returned, false if timed out.
+    bool waitForNewFrame(nsecs_t reltime,
+            nsecs_t *captureTime);
+
+    /**
+     * Static sensor characteristics
+     */
+    static const unsigned int kResolution[2];
+
+    static const nsecs_t kExposureTimeRange[2];
+    static const nsecs_t kFrameDurationRange[2];
+    static const nsecs_t kMinVerticalBlank;
+
+    static const uint8_t kColorFilterArrangement;
+
+    // Output image data characteristics
+    static const uint32_t kMaxRawValue;
+    static const uint32_t kBlackLevel;
+    // Sensor sensitivity, approximate
+
+    static const float kSaturationVoltage;
+    static const uint32_t kSaturationElectrons;
+    static const float kVoltsPerLuxSecond;
+    static const float kElectronsPerLuxSecond;
+
+    static const float kBaseGainFactor;
+
+    static const float kReadNoiseStddevBeforeGain; // In electrons
+    static const float kReadNoiseStddevAfterGain;  // In raw digital units
+    static const float kReadNoiseVarBeforeGain;
+    static const float kReadNoiseVarAfterGain;
+
+    // While each row has to read out, reset, and then expose, the (reset +
+    // expose) sequence can be overlapped by other row readouts, so the final
+    // minimum frame duration is purely a function of row readout time, at least
+    // if there's a reasonable number of rows.
+    static const nsecs_t kRowReadoutTime;
+
+    static const uint32_t kAvailableSensitivities[5];
+    static const uint32_t kDefaultSensitivity;
+
+  private:
+    EmulatedFakeCamera2 *mParent;
+
+    Mutex mControlMutex; // Lock before accessing control parameters
+    // Start of control parameters
+    Condition mVSync;
+    bool      mGotVSync;
+    uint64_t  mExposureTime;
+    uint64_t  mFrameDuration;
+    uint32_t  mGainFactor;
+    Buffers  *mNextBuffers;
+
+    // End of control parameters
+
+    Mutex mReadoutMutex; // Lock before accessing readout variables
+    // Start of readout variables
+    Condition mReadoutComplete;
+    Buffers  *mCapturedBuffers;
+    nsecs_t   mCaptureTime;
+    // End of readout variables
+
+    // Time of sensor startup, used for simulation zero-time point
+    nsecs_t mStartupTime;
+
+    /**
+     * Inherited Thread virtual overrides, and members only used by the
+     * processing thread
+     */
+  private:
+    virtual status_t readyToRun();
+
+    virtual bool threadLoop();
+
+    nsecs_t mNextCaptureTime;
+    Buffers *mNextCapturedBuffers;
+
+    Scene mScene;
+
+    void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
+    void captureNV21(uint8_t *img, uint32_t gain, uint32_t stride);
+};
+
+}
+
+#endif // HW_EMULATOR_CAMERA2_SENSOR_H
diff --git a/tools/idegen/README b/tools/idegen/README
index 1f773d8..02bb593 100644
--- a/tools/idegen/README
+++ b/tools/idegen/README
@@ -7,10 +7,10 @@
     If this is your first time using IDEGen...
 
         IDEA needs a lot of memory. Add "-Xms748m -Xmx748m" to your VM options
-        in "IDEA_HOME/bin/idea.vmoptions" on Linux or 
+        in "IDEA_HOME/bin/idea.vmoptions" on Linux or
         "IntelliJ IDEA.app/Contents/Info.plist" on OS X.
 
-        Create a JDK configuration named "1.5 (No Libraries)" by adding a new
+        Create a JDK configuration named "1.6 (No Libraries)" by adding a new
         JDK like you normally would and then removing all of the jar entries
         under the "Classpath" tab. This will ensure that you only get access to
         Android's core libraries and not those from your desktop VM.
@@ -18,13 +18,13 @@
     From the project's root directory...
 
         Repeat these steps after each sync...
-        
+
         1) make (to produce generated .java source)
         2) development/tools/idegen/idegen.sh
         3) Open android.ipr in IntelliJ. If you already have the project open,
            hit the sync button in IntelliJ, and it will automatically detect the
            updated configuration.
-        
+
         If you get unexpected compilation errors from IntelliJ, try running
         "Build -> Rebuild Project". Sometimes IntelliJ gets confused after the
         project changes significantly.
@@ -53,7 +53,7 @@
 
 Excluding source roots and jars
 
-    IDEGen keeps an exclusion list in the "excluded-paths" file. This file 
+    IDEGen keeps an exclusion list in the "excluded-paths" file. This file
     has one regular expression per line that matches paths (relative to the
     project root) that should be excluded from the IDE configuration. We
     use Java's regular expression parser (see java.util.regex.Parser).
@@ -62,7 +62,7 @@
     "excluded-paths" file in the project's root directory. For example, you
     might exclude all apps except the Browser in your IDE configuration with
     this regular expression: "^packages/apps/(?!Browser)".
-    
+
 Controlling source root ordering (Eclipse)
 
     You may want some source roots to come before others in Eclipse. Simply
@@ -77,4 +77,4 @@
     For example, if you want your applications's source root to come first,
     you might add an expression like "^packages/apps/MyApp/src$" to the top
     of the "path-precedence" file.  To make source roots under ./out come last,
-    add "^(?!out/)" (which matches all paths that don't start with "out/").
\ No newline at end of file
+    add "^(?!out/)" (which matches all paths that don't start with "out/").
diff --git a/tools/idegen/excluded-paths b/tools/idegen/excluded-paths
index 35280ad..9122c30 100644
--- a/tools/idegen/excluded-paths
+++ b/tools/idegen/excluded-paths
@@ -62,3 +62,6 @@
 # This directory contains only an R.java file which is the same as the one in
 # Camera_intermediates.
 ^out/target/common/obj/APPS/CameraTests_intermediates$
+
+# Exclude all prebuilts jars.
+^prebuilts/.*\.jar$
diff --git a/tools/idegen/idegen.ipr b/tools/idegen/idegen.ipr
index 00cf4fd..75771b5 100644
--- a/tools/idegen/idegen.ipr
+++ b/tools/idegen/idegen.ipr
@@ -135,7 +135,7 @@
     <option name="GENERATE_NO_WARNINGS" value="false" />
     <option name="DEPRECATION" value="true" />
     <option name="ADDITIONAL_OPTIONS_STRING" value="" />
-    <option name="MAXIMUM_HEAP_SIZE" value="128" />
+    <option name="MAXIMUM_HEAP_SIZE" value="800" />
   </component>
   <component name="JavadocGenerationManager">
     <option name="OUTPUT_DIRECTORY" />
@@ -298,7 +298,7 @@
       <module fileurl="file://$PROJECT_DIR$/idegen.iml" filepath="$PROJECT_DIR$/idegen.iml" />
     </modules>
   </component>
-  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_5" assert-keyword="true" jdk-15="true" project-jdk-name="1.5" project-jdk-type="JavaSDK">
+  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6 (no libraries)" project-jdk-type="JavaSDK">
     <output url="file://$PROJECT_DIR$/classes" />
   </component>
   <component name="RmicSettings">
diff --git a/tools/idegen/templates/android.ipr b/tools/idegen/templates/android.ipr
index d6aba4b..f857d4a 100644
--- a/tools/idegen/templates/android.ipr
+++ b/tools/idegen/templates/android.ipr
@@ -26,7 +26,6 @@
         <option name="PLACE_ASSIGNMENT_SIGN_ON_NEXT_LINE" value="true" />
       </value>
     </option>
-    <option name="USE_PER_PROJECT_SETTINGS" value="true" />
   </component>
   <component name="CompilerConfiguration">
     <option name="DEFAULT_COMPILER" value="Javac" />
@@ -122,7 +121,7 @@
     <option name="GENERATE_NO_WARNINGS" value="false" />
     <option name="DEPRECATION" value="false" />
     <option name="ADDITIONAL_OPTIONS_STRING" value="-Xlint:all,-deprecation,-serial" />
-    <option name="MAXIMUM_HEAP_SIZE" value="512" />
+    <option name="MAXIMUM_HEAP_SIZE" value="800" />
   </component>
   <component name="JavadocGenerationManager">
     <option name="OUTPUT_DIRECTORY" />
@@ -282,7 +281,7 @@
       <module fileurl="file://$PROJECT_DIR$/android.iml" filepath="$PROJECT_DIR$/android.iml" />
     </modules>
   </component>
-  <component name="ProjectRootManager" version="2" assert-keyword="true" jdk-15="true" project-jdk-name="1.5 (No Libraries)" project-jdk-type="JavaSDK">
+  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6 (No Libraries)" project-jdk-type="JavaSDK">
     <output url="file:///tmp/intellij$PROJECT_DIR$/classes" />
   </component>
   <component name="RmicSettings">