Reconcile with ics-mr1-release

Change-Id: I038f5d340276fc8b7498e3cb375ac1ece2d6917c
diff --git a/build/Android.mk b/build/Android.mk
index e4eeb0a..1cb0653 100644
--- a/build/Android.mk
+++ b/build/Android.mk
@@ -1,3 +1,27 @@
+# ===== SDK source.property files =====
+
+# Add all files to be generated from the source.prop templates to the SDK pre-requisites
+ALL_SDK_FILES += $(patsubst \
+                   $(TOPDIR)development/sdk/%_source.prop_template, \
+                   $(HOST_OUT)/development/sdk/%_source.properties, \
+                   $(wildcard $(TOPDIR)development/sdk/*_source.prop_template))
+
+# Rule to convert a source.prop template into the desired source.property
+# Rewritten variables:
+# - ${PLATFORM_VERSION}          e.g. "1.0"
+# - ${PLATFORM_SDK_VERSION}      e.g. "3", aka the API level
+# - ${PLATFORM_VERSION_CODENAME} e.g. "REL" (transformed into "") or "Cupcake"
+$(HOST_OUT)/development/sdk/%_source.properties : $(TOPDIR)development/sdk/%_source.prop_template
+	@echo Generate $@
+	$(hide) mkdir -p $(dir $@)
+	$(hide) sed -e 's/$${PLATFORM_VERSION}/$(PLATFORM_VERSION)/' \
+		 -e 's/$${PLATFORM_SDK_VERSION}/$(PLATFORM_SDK_VERSION)/' \
+		 -e 's/$${PLATFORM_VERSION_CODENAME}/$(subst REL,,$(PLATFORM_VERSION_CODENAME))/' \
+		 $< > $@
+
+
+# ===== Android Support/Compatibility Library =====
+
 LOCAL_PATH := $(call my-dir)
 
 # The source files for this library are _all_ generated, something we don't do
diff --git a/build/sdk-android-armeabi-v7a.atree b/build/sdk-android-armeabi-v7a.atree
index 8867bf7..e586945 100644
--- a/build/sdk-android-armeabi-v7a.atree
+++ b/build/sdk-android-armeabi-v7a.atree
@@ -18,4 +18,4 @@
 prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu-armv7 system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/kernel-qemu
 
 # version files for the SDK updater, from development.git
-development/sdk/images_armeabi-v7a_source.properties   system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
+${HOST_OUT}/development/sdk/images_armeabi-v7a_source.properties system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
diff --git a/build/sdk-android-armeabi.atree b/build/sdk-android-armeabi.atree
index a31eca6..15f1f58 100644
--- a/build/sdk-android-armeabi.atree
+++ b/build/sdk-android-armeabi.atree
@@ -17,4 +17,4 @@
 prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/kernel-qemu
 
 # version files for the SDK updater, from development.git
-development/sdk/images_armeabi_source.properties   system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
+${HOST_OUT}/development/sdk/images_armeabi_source.properties system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
diff --git a/build/sdk-android-x86.atree b/build/sdk-android-x86.atree
index 748478a..a309149 100644
--- a/build/sdk-android-x86.atree
+++ b/build/sdk-android-x86.atree
@@ -17,4 +17,4 @@
 prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/kernel-qemu
 
 # version files for the SDK updater, from development.git
-development/sdk/images_x86_source.properties   system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
+${HOST_OUT}/development/sdk/images_x86_source.properties system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
diff --git a/build/sdk-windows-x86.atree b/build/sdk-windows-x86.atree
index 68105a2..3144b27 100644
--- a/build/sdk-windows-x86.atree
+++ b/build/sdk-windows-x86.atree
@@ -27,7 +27,9 @@
 #
 
 rm platform-tools/adb
+rm platform-tools/fastboot
 bin/adb.exe                             strip platform-tools/adb.exe
+bin/fastboot.exe                        strip platform-tools/fastboot.exe
 bin/AdbWinUsbApi.dll                          platform-tools/AdbWinUsbApi.dll
 bin/AdbWinApi.dll                             platform-tools/AdbWinApi.dll
 
diff --git a/build/sdk.atree b/build/sdk.atree
index 148899b..a6f75f3 100644
--- a/build/sdk.atree
+++ b/build/sdk.atree
@@ -43,6 +43,7 @@
 bin/adb                                 strip platform-tools/adb
 bin/aapt                                strip platform-tools/aapt
 bin/aidl                                strip platform-tools/aidl
+bin/fastboot                            strip platform-tools/fastboot
 bin/llvm-rs-cc                          strip platform-tools/llvm-rs-cc
 
 # dx
@@ -50,7 +51,6 @@
 bin/dexdump                                   platform-tools/dexdump
 framework/dx.jar                              platform-tools/lib/dx.jar
 
-
 # Framework include for Renderscript
 frameworks/base/libs/rs/scriptc               platform-tools/renderscript/include
 external/clang/lib/Headers                    platform-tools/renderscript/clang-include
@@ -61,7 +61,7 @@
 ##############################################################################
 
 # version files for the SDK updater, from development.git
-development/sdk/platform_source.properties    platforms/${PLATFORM_NAME}/source.properties
+${HOST_OUT}/development/sdk/platform_source.properties platforms/${PLATFORM_NAME}/source.properties
 
 # copy build prop from out/.../sdk/
 sdk/sdk-build.prop                            platforms/${PLATFORM_NAME}/build.prop
@@ -132,7 +132,7 @@
 ##############################################################################
 
 # version files for the SDK updater, from sdk.git
-development/sdk/doc_source.properties docs/source.properties
+${HOST_OUT}/development/sdk/doc_source.properties docs/source.properties
 
 # the docs
 docs/offline-sdk docs
diff --git a/build/tools/sdk_repo.mk b/build/tools/sdk_repo.mk
index cc0d1be..a0d93de 100644
--- a/build/tools/sdk_repo.mk
+++ b/build/tools/sdk_repo.mk
@@ -88,10 +88,10 @@
 # $3=package to create, must be "sources"
 #
 define mk-sdk-repo-sources
-$(call sdk-repo-pkg-zip,$(1),$(2),$(3)): $(2) $(TOPDIR)development/sdk/source_source.properties
+$(call sdk-repo-pkg-zip,$(1),$(2),$(3)): $(2) $(HOST_OUT)/development/sdk/source_source.properties
 	@echo "Building SDK sources package"
-	$(hide) $(TOPDIR)development/build/tools/mk_sources_zip.py --exec-zip \
-			$(TOPDIR)development/sdk/source_source.properties \
+	$(hide) $(TOPDIR)development/build/tools/mk_sources_zip.py \
+			$(HOST_OUT)/development/sdk/source_source.properties \
 			$(call sdk-repo-pkg-zip,$(1),$(2),$(3)) \
 			$(TOPDIR).
 $(call dist-for-goals, sdk_repo, $(call sdk-repo-pkg-zip,$(1),$(2),$(3)))
diff --git a/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java b/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java
index 5d078c2..c2a1093 100644
--- a/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java
+++ b/cmds/monkey/src/com/android/commands/monkey/MonkeySourceNetworkViews.java
@@ -378,7 +378,7 @@
             if (command.size() == 2) {
                 String text = command.get(1);
                 List<AccessibilityNodeInfo> nodes = AccessibilityInteractionClient.getInstance()
-                    .findAccessibilityNodeInfosByViewTextInActiveWindow(sConnectionId, text);
+                    .findAccessibilityNodeInfosByTextInActiveWindow(sConnectionId, text);
                 ViewIntrospectionCommand idGetter = new GetAccessibilityIds();
                 List<String> emptyArgs = new ArrayList<String>();
                 StringBuilder ids = new StringBuilder();
diff --git a/host/windows/usb/android_winusb.inf b/host/windows/usb/android_winusb.inf
index 47cf2f7..ab25970 100755
--- a/host/windows/usb/android_winusb.inf
+++ b/host/windows/usb/android_winusb.inf
@@ -6,7 +6,7 @@
 Class               = AndroidUsbDeviceClass

 ClassGuid           = {3F966BD9-FA04-4ec5-991C-D326973B5128}

 Provider            = %ProviderName%

-DriverVer           = 12/06/2010,4.0.0000.00000

+DriverVer           = 10/19/2011,6.0.0000.00000

 CatalogFile.NTx86   = androidwinusb86.cat

 CatalogFile.NTamd64 = androidwinusba64.cat

 

@@ -26,42 +26,33 @@
 %ProviderName% = Google, NTx86, NTamd64

 

 [Google.NTx86]

-; HTC Dream

-%SingleAdbInterface%        = USB_Install, USB\VID_0BB4&PID_0C01

-%CompositeAdbInterface%     = USB_Install, USB\VID_0BB4&PID_0C02&MI_01

-%SingleBootLoaderInterface% = USB_Install, USB\VID_0BB4&PID_0FFF

-; HTC Magic

-%CompositeAdbInterface%     = USB_Install, USB\VID_0BB4&PID_0C03&MI_01

-;

-;Moto Sholes

-%SingleAdbInterface%        = USB_Install, USB\VID_22B8&PID_41DB

-%CompositeAdbInterface%     = USB_Install, USB\VID_22B8&PID_41DB&MI_01

-;

-;Google NexusOne

+

+;Google Nexus One

 %SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_0D02

 %CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_0D02&MI_01

 %SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_4E11

 %CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_4E12&MI_01

+

+;Google Nexus S

+%SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_4E21

 %CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_4E22&MI_01

+%SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_4E23

+%CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_4E24&MI_01

+

 

 [Google.NTamd64]

-; HTC Dream

-%SingleAdbInterface%        = USB_Install, USB\VID_0BB4&PID_0C01

-%CompositeAdbInterface%     = USB_Install, USB\VID_0BB4&PID_0C02&MI_01

-%SingleBootLoaderInterface% = USB_Install, USB\VID_0BB4&PID_0FFF

-; HTC Magic

-%CompositeAdbInterface%     = USB_Install, USB\VID_0BB4&PID_0C03&MI_01

-;

-;Moto Sholes

-%SingleAdbInterface%        = USB_Install, USB\VID_22B8&PID_41DB

-%CompositeAdbInterface%     = USB_Install, USB\VID_22B8&PID_41DB&MI_01

-;

-;Google NexusOne

+

+;Google Nexus One

 %SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_0D02

 %CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_0D02&MI_01

 %SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_4E11

 %CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_4E12&MI_01

+

+;Google Nexus S

+%SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_4E21

 %CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_4E22&MI_01

+%SingleAdbInterface%        = USB_Install, USB\VID_18D1&PID_4E23

+%CompositeAdbInterface%     = USB_Install, USB\VID_18D1&PID_4E24&MI_01

 

 [USB_Install]

 Include = winusb.inf

diff --git a/ndk/platforms/android-14/samples/native-media/AndroidManifest.xml b/ndk/platforms/android-14/samples/native-media/AndroidManifest.xml
new file mode 100644
index 0000000..07668cf
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/AndroidManifest.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+      package="com.example.nativemedia">
+
+    <uses-feature android:glEsVersion="0x00020000" />
+
+    <application android:icon="@drawable/icon" android:label="@string/app_name">
+        <activity android:name=".NativeMedia"
+                  android:label="@string/app_name">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+
+    </application>
+
+    <uses-sdk android:minSdkVersion="14" />
+
+    <!-- INTERNET is needed to use a URI-based media player, depending on the URI -->
+    <uses-permission android:name="android.permission.INTERNET"></uses-permission>
+
+</manifest>
diff --git a/ndk/platforms/android-14/samples/native-media/NativeMedia.ts b/ndk/platforms/android-14/samples/native-media/NativeMedia.ts
new file mode 100644
index 0000000..d9d54b2
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/NativeMedia.ts
Binary files differ
diff --git a/ndk/platforms/android-14/samples/native-media/README.txt b/ndk/platforms/android-14/samples/native-media/README.txt
index 516698a..f900e31 100644
--- a/ndk/platforms/android-14/samples/native-media/README.txt
+++ b/ndk/platforms/android-14/samples/native-media/README.txt
@@ -1,2 +1,8 @@
-The documentation for Android native media based on OpenMAX AL 1.0.1
-references this directory, but the example is not yet available.
+This sample app requires an MPEG-2 Transport Stream file to be
+placed in /sdcard/NativeMedia.ts and encoded as:
+
+  video: H.264 baseline profile
+  audio: AAC LC stereo
+
+For demonstration purposes we have supplied such a .ts file.
+Any actual stream must be created according to the MPEG-2 specification.
diff --git a/ndk/platforms/android-14/samples/native-media/default.properties b/ndk/platforms/android-14/samples/native-media/default.properties
new file mode 100644
index 0000000..2d69917
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/default.properties
@@ -0,0 +1,4 @@
+# Indicates whether an apk should be generated for each density.
+split.density=false
+# Project target.
+target=android-14
diff --git a/ndk/platforms/android-14/samples/native-media/jni/Android.mk b/ndk/platforms/android-14/samples/native-media/jni/Android.mk
new file mode 100644
index 0000000..369ccf8
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/jni/Android.mk
@@ -0,0 +1,30 @@
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE    := native-media-jni
+LOCAL_SRC_FILES := native-media-jni.c
+# for native multimedia
+LOCAL_LDLIBS    += -lOpenMAXAL
+# for logging
+LOCAL_LDLIBS    += -llog
+# for native windows
+LOCAL_LDLIBS    += -landroid
+
+LOCAL_CFLAGS    += -UNDEBUG
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/ndk/platforms/android-14/samples/native-media/jni/native-media-jni.c b/ndk/platforms/android-14/samples/native-media/jni/native-media-jni.c
new file mode 100644
index 0000000..bdf568a
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/jni/native-media-jni.c
@@ -0,0 +1,526 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This is a JNI example where we use native methods to play video
+ * using OpenMAX AL. See the corresponding Java source file located at:
+ *
+ *   src/com/example/nativemedia/NativeMedia/NativeMedia.java
+ *
+ * In this example we use assert() for "impossible" error conditions,
+ * and explicit handling and recovery for more likely error conditions.
+ */
+
+#include <assert.h>
+#include <jni.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+
+// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
+#include <android/log.h>
+#define TAG "NativeMedia"
+#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+
+// for native media
+#include <OMXAL/OpenMAXAL.h>
+#include <OMXAL/OpenMAXAL_Android.h>
+
+// for native window JNI
+#include <android/native_window_jni.h>
+
+// engine interfaces
+static XAObjectItf engineObject = NULL;
+static XAEngineItf engineEngine = NULL;
+
+// output mix interfaces
+static XAObjectItf outputMixObject = NULL;
+
+// streaming media player interfaces
+static XAObjectItf             playerObj = NULL;
+static XAPlayItf               playerPlayItf = NULL;
+static XAAndroidBufferQueueItf playerBQItf = NULL;
+static XAStreamInformationItf  playerStreamInfoItf = NULL;
+static XAVolumeItf             playerVolItf = NULL;
+
+// number of required interfaces for the MediaPlayer creation
+#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf
+
+// video sink for the player
+static ANativeWindow* theNativeWindow;
+
+// number of buffers in our buffer queue, an arbitrary number
+#define NB_BUFFERS 8
+
+// we're streaming MPEG-2 transport stream data, operate on transport stream block size
+#define MPEG2_TS_PACKET_SIZE 188
+
+// number of MPEG-2 transport stream blocks per buffer, an arbitrary number
+#define PACKETS_PER_BUFFER 10
+
+// determines how much memory we're dedicating to memory caching
+#define BUFFER_SIZE (PACKETS_PER_BUFFER*MPEG2_TS_PACKET_SIZE)
+
+// where we cache in memory the data to play
+// note this memory is re-used by the buffer queue callback
+static char dataCache[BUFFER_SIZE * NB_BUFFERS];
+
+// handle of the file to play
+static FILE *file;
+
+// has the app reached the end of the file
+static jboolean reachedEof = JNI_FALSE;
+
+// constant to identify a buffer context which is the end of the stream to decode
+static const int kEosBufferCntxt = 1980; // a magic value we can compare against
+
+// For mutual exclusion between callback thread and application thread(s).
+// The mutex protects reachedEof, discontinuity,
+// The condition is signalled when a discontinuity is acknowledged.
+
+static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
+static pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
+
+// whether a discontinuity is in progress
+static jboolean discontinuity = JNI_FALSE;
+
+static jboolean enqueueInitialBuffers(jboolean discontinuity);
+
+// AndroidBufferQueueItf callback to supply MPEG-2 TS packets to the media player
+static XAresult AndroidBufferQueueCallback(
+        XAAndroidBufferQueueItf caller,
+        void *pCallbackContext,        /* input */
+        void *pBufferContext,          /* input */
+        void *pBufferData,             /* input */
+        XAuint32 dataSize,             /* input */
+        XAuint32 dataUsed,             /* input */
+        const XAAndroidBufferItem *pItems,/* input */
+        XAuint32 itemsLength           /* input */)
+{
+    XAresult res;
+    int ok;
+
+    // pCallbackContext was specified as NULL at RegisterCallback and is unused here
+    assert(NULL == pCallbackContext);
+
+    // note there is never any contention on this mutex unless a discontinuity request is active
+    ok = pthread_mutex_lock(&mutex);
+    assert(0 == ok);
+
+    // was a discontinuity requested?
+    if (discontinuity) {
+        // Note: can't rewind after EOS, which we send when reaching EOF
+        // (don't send EOS if you plan to play more content through the same player)
+        if (!reachedEof) {
+            // clear the buffer queue
+            res = (*playerBQItf)->Clear(playerBQItf);
+            assert(XA_RESULT_SUCCESS == res);
+            // rewind the data source so we are guaranteed to be at an appropriate point
+            rewind(file);
+            // Enqueue the initial buffers, with a discontinuity indicator on first buffer
+            (void) enqueueInitialBuffers(JNI_TRUE);
+        }
+        // acknowledge the discontinuity request
+        discontinuity = JNI_FALSE;
+        ok = pthread_cond_signal(&cond);
+        assert(0 == ok);
+        goto exit;
+    }
+
+    if ((pBufferData == NULL) && (pBufferContext != NULL)) {
+        const int processedCommand = *(int *)pBufferContext;
+        if (kEosBufferCntxt == processedCommand) {
+            LOGV("EOS was processed\n");
+            // our buffer with the EOS message has been consumed
+            assert(0 == dataSize);
+            goto exit;
+        }
+    }
+
+    // pBufferData is a pointer to a buffer that we previously Enqueued
+    assert((dataSize > 0) && ((dataSize % MPEG2_TS_PACKET_SIZE) == 0));
+    assert(dataCache <= (char *) pBufferData && (char *) pBufferData <
+            &dataCache[BUFFER_SIZE * NB_BUFFERS]);
+    assert(0 == (((char *) pBufferData - dataCache) % BUFFER_SIZE));
+
+    // don't bother trying to read more data once we've hit EOF
+    if (reachedEof) {
+        goto exit;
+    }
+
+    size_t nbRead;
+    // note we do call fread from multiple threads, but never concurrently
+    size_t bytesRead;
+    bytesRead = fread(pBufferData, 1, BUFFER_SIZE, file);
+    if (bytesRead > 0) {
+        if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
+            LOGV("Dropping last packet because it is not whole");
+        }
+        size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
+        size_t bufferSize = packetsRead * MPEG2_TS_PACKET_SIZE;
+        res = (*caller)->Enqueue(caller, NULL /*pBufferContext*/,
+                pBufferData /*pData*/,
+                bufferSize /*dataLength*/,
+                NULL /*pMsg*/,
+                0 /*msgLength*/);
+        assert(XA_RESULT_SUCCESS == res);
+    } else {
+        // EOF or I/O error, signal EOS
+        XAAndroidBufferItem msgEos[1];
+        msgEos[0].itemKey = XA_ANDROID_ITEMKEY_EOS;
+        msgEos[0].itemSize = 0;
+        // EOS message has no parameters, so the total size of the message is the size of the key
+        //   plus the size if itemSize, both XAuint32
+        res = (*caller)->Enqueue(caller, (void *)&kEosBufferCntxt /*pBufferContext*/,
+                NULL /*pData*/, 0 /*dataLength*/,
+                msgEos /*pMsg*/,
+                sizeof(XAuint32)*2 /*msgLength*/);
+        assert(XA_RESULT_SUCCESS == res);
+        reachedEof = JNI_TRUE;
+    }
+
+exit:
+    ok = pthread_mutex_unlock(&mutex);
+    assert(0 == ok);
+    return XA_RESULT_SUCCESS;
+}
+
+
+// callback invoked whenever there is new or changed stream information
+static void StreamChangeCallback(XAStreamInformationItf caller,
+        XAuint32 eventId,
+        XAuint32 streamIndex,
+        void * pEventData,
+        void * pContext )
+{
+    LOGV("StreamChangeCallback called for stream %u", streamIndex);
+    // pContext was specified as NULL at RegisterStreamChangeCallback and is unused here
+    assert(NULL == pContext);
+    switch (eventId) {
+      case XA_STREAMCBEVENT_PROPERTYCHANGE: {
+        /** From spec 1.0.1:
+            "This event indicates that stream property change has occurred.
+            The streamIndex parameter identifies the stream with the property change.
+            The pEventData parameter for this event is not used and shall be ignored."
+         */
+
+        XAresult res;
+        XAuint32 domain;
+        res = (*caller)->QueryStreamType(caller, streamIndex, &domain);
+        assert(XA_RESULT_SUCCESS == res);
+        switch (domain) {
+          case XA_DOMAINTYPE_VIDEO: {
+            XAVideoStreamInformation videoInfo;
+            res = (*caller)->QueryStreamInformation(caller, streamIndex, &videoInfo);
+            assert(XA_RESULT_SUCCESS == res);
+            LOGV("Found video size %u x %u, codec ID=%u, frameRate=%u, bitRate=%u, duration=%u ms",
+                        videoInfo.width, videoInfo.height, videoInfo.codecId, videoInfo.frameRate,
+                        videoInfo.bitRate, videoInfo.duration);
+          } break;
+          default:
+            fprintf(stderr, "Unexpected domain %u\n", domain);
+            break;
+        }
+      } break;
+      default:
+        fprintf(stderr, "Unexpected stream event ID %u\n", eventId);
+        break;
+    }
+}
+
+
+// create the engine and output mix objects
+void Java_com_example_nativemedia_NativeMedia_createEngine(JNIEnv* env, jclass clazz)
+{
+    XAresult res;
+
+    // create engine
+    res = xaCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // realize the engine
+    res = (*engineObject)->Realize(engineObject, XA_BOOLEAN_FALSE);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // get the engine interface, which is needed in order to create other objects
+    res = (*engineObject)->GetInterface(engineObject, XA_IID_ENGINE, &engineEngine);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // create output mix
+    res = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, NULL, NULL);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // realize the output mix
+    res = (*outputMixObject)->Realize(outputMixObject, XA_BOOLEAN_FALSE);
+    assert(XA_RESULT_SUCCESS == res);
+
+}
+
+
+// Enqueue the initial buffers, and optionally signal a discontinuity in the first buffer
+static jboolean enqueueInitialBuffers(jboolean discontinuity)
+{
+
+    /* Fill our cache.
+     * We want to read whole packets (integral multiples of MPEG2_TS_PACKET_SIZE).
+     * fread returns units of "elements" not bytes, so we ask for 1-byte elements
+     * and then check that the number of elements is a multiple of the packet size.
+     */
+    size_t bytesRead;
+    bytesRead = fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file);
+    if (bytesRead <= 0) {
+        // could be premature EOF or I/O error
+        return JNI_FALSE;
+    }
+    if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
+        LOGV("Dropping last packet because it is not whole");
+    }
+    size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
+    LOGV("Initially queueing %u packets", packetsRead);
+
+    /* Enqueue the content of our cache before starting to play,
+       we don't want to starve the player */
+    size_t i;
+    for (i = 0; i < NB_BUFFERS && packetsRead > 0; i++) {
+        // compute size of this buffer
+        size_t packetsThisBuffer = packetsRead;
+        if (packetsThisBuffer > PACKETS_PER_BUFFER) {
+            packetsThisBuffer = PACKETS_PER_BUFFER;
+        }
+        size_t bufferSize = packetsThisBuffer * MPEG2_TS_PACKET_SIZE;
+        XAresult res;
+        if (discontinuity) {
+            // signal discontinuity
+            XAAndroidBufferItem items[1];
+            items[0].itemKey = XA_ANDROID_ITEMKEY_DISCONTINUITY;
+            items[0].itemSize = 0;
+            // DISCONTINUITY message has no parameters,
+            //   so the total size of the message is the size of the key
+            //   plus the size if itemSize, both XAuint32
+            res = (*playerBQItf)->Enqueue(playerBQItf, NULL /*pBufferContext*/,
+                    dataCache + i*BUFFER_SIZE, bufferSize, items /*pMsg*/,
+                    sizeof(XAuint32)*2 /*msgLength*/);
+            discontinuity = JNI_FALSE;
+        } else {
+            res = (*playerBQItf)->Enqueue(playerBQItf, NULL /*pBufferContext*/,
+                    dataCache + i*BUFFER_SIZE, bufferSize, NULL, 0);
+        }
+        assert(XA_RESULT_SUCCESS == res);
+        packetsRead -= packetsThisBuffer;
+    }
+
+    return JNI_TRUE;
+}
+
+
+// create streaming media player
+jboolean Java_com_example_nativemedia_NativeMedia_createStreamingMediaPlayer(JNIEnv* env,
+        jclass clazz, jstring filename)
+{
+    XAresult res;
+
+    // convert Java string to UTF-8
+    const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
+    assert(NULL != utf8);
+
+    // open the file to play
+    file = fopen(utf8, "rb");
+    if (file == NULL) {
+        return JNI_FALSE;
+    }
+
+    // configure data source
+    XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS };
+    XADataFormat_MIME format_mime = {
+            XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS };
+    XADataSource dataSrc = {&loc_abq, &format_mime};
+
+    // configure audio sink
+    XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
+    XADataSink audioSnk = { &loc_outmix, NULL };
+
+    // configure image video sink
+    XADataLocator_NativeDisplay loc_nd = {
+            XA_DATALOCATOR_NATIVEDISPLAY,        // locatorType
+            // the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
+            (void*)theNativeWindow,              // hWindow
+            // must be NULL
+            NULL                                 // hDisplay
+    };
+    XADataSink imageVideoSink = {&loc_nd, NULL};
+
+    // declare interfaces to use
+    XAboolean     required[NB_MAXAL_INTERFACES]
+                           = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE,           XA_BOOLEAN_TRUE};
+    XAInterfaceID iidArray[NB_MAXAL_INTERFACES]
+                           = {XA_IID_PLAY,     XA_IID_ANDROIDBUFFERQUEUESOURCE,
+                                               XA_IID_STREAMINFORMATION};
+
+    // create media player
+    res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
+            NULL, &audioSnk, &imageVideoSink, NULL, NULL,
+            NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
+            iidArray /*const XAInterfaceID *pInterfaceIds*/,
+            required /*const XAboolean *pInterfaceRequired*/);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // release the Java string and UTF-8
+    (*env)->ReleaseStringUTFChars(env, filename, utf8);
+
+    // realize the player
+    res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // get the play interface
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // get the stream information interface (for video size)
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // get the volume interface
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // get the Android buffer queue interface
+    res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUESOURCE, &playerBQItf);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // specify which events we want to be notified of
+    res = (*playerBQItf)->SetCallbackEventsMask(playerBQItf, XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // register the callback from which OpenMAX AL can retrieve the data to play
+    res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // we want to be notified of the video size once it's found, so we register a callback for that
+    res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf,
+            StreamChangeCallback, NULL);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // enqueue the initial buffers
+    if (!enqueueInitialBuffers(JNI_FALSE)) {
+        return JNI_FALSE;
+    }
+
+    // prepare the player
+    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // set the volume
+    res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);
+    assert(XA_RESULT_SUCCESS == res);
+
+    // start the playback
+    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
+        assert(XA_RESULT_SUCCESS == res);
+
+    return JNI_TRUE;
+}
+
+
+// set the playing state for the streaming media player
+void Java_com_example_nativemedia_NativeMedia_setPlayingStreamingMediaPlayer(JNIEnv* env,
+        jclass clazz, jboolean isPlaying)
+{
+    XAresult res;
+
+    // make sure the streaming media player was created
+    if (NULL != playerPlayItf) {
+
+        // set the player's state
+        res = (*playerPlayItf)->SetPlayState(playerPlayItf, isPlaying ?
+            XA_PLAYSTATE_PLAYING : XA_PLAYSTATE_PAUSED);
+        assert(XA_RESULT_SUCCESS == res);
+
+    }
+
+}
+
+
+// shut down the native media system
+void Java_com_example_nativemedia_NativeMedia_shutdown(JNIEnv* env, jclass clazz)
+{
+    // destroy streaming media player object, and invalidate all associated interfaces
+    if (playerObj != NULL) {
+        (*playerObj)->Destroy(playerObj);
+        playerObj = NULL;
+        playerPlayItf = NULL;
+        playerBQItf = NULL;
+        playerStreamInfoItf = NULL;
+        playerVolItf = NULL;
+    }
+
+    // destroy output mix object, and invalidate all associated interfaces
+    if (outputMixObject != NULL) {
+        (*outputMixObject)->Destroy(outputMixObject);
+        outputMixObject = NULL;
+    }
+
+    // destroy engine object, and invalidate all associated interfaces
+    if (engineObject != NULL) {
+        (*engineObject)->Destroy(engineObject);
+        engineObject = NULL;
+        engineEngine = NULL;
+    }
+
+    // close the file
+    if (file != NULL) {
+        fclose(file);
+        file = NULL;
+    }
+
+    // make sure we don't leak native windows
+    if (theNativeWindow != NULL) {
+        ANativeWindow_release(theNativeWindow);
+        theNativeWindow = NULL;
+    }
+}
+
+
+// set the surface
+void Java_com_example_nativemedia_NativeMedia_setSurface(JNIEnv *env, jclass clazz, jobject surface)
+{
+    // obtain a native window from a Java surface
+    theNativeWindow = ANativeWindow_fromSurface(env, surface);
+}
+
+
+// rewind the streaming media player
+void Java_com_example_nativemedia_NativeMedia_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz)
+{
+    XAresult res;
+
+    // make sure the streaming media player was created
+    if (NULL != playerBQItf && NULL != file) {
+        // first wait for buffers currently in queue to be drained
+        int ok;
+        ok = pthread_mutex_lock(&mutex);
+        assert(0 == ok);
+        discontinuity = JNI_TRUE;
+        // wait for discontinuity request to be observed by buffer queue callback
+        // Note: can't rewind after EOS, which we send when reaching EOF
+        // (don't send EOS if you plan to play more content through the same player)
+        while (discontinuity && !reachedEof) {
+            ok = pthread_cond_wait(&cond, &mutex);
+            assert(0 == ok);
+        }
+        ok = pthread_mutex_unlock(&mutex);
+        assert(0 == ok);
+    }
+
+}
diff --git a/ndk/platforms/android-14/samples/native-media/project.properties b/ndk/platforms/android-14/samples/native-media/project.properties
new file mode 100644
index 0000000..8f51418
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/project.properties
@@ -0,0 +1,13 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system use,
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+
+# Indicates whether an apk should be generated for each density.
+split.density=false
+# Project target.
+target=android-14
diff --git a/ndk/platforms/android-14/samples/native-media/res/drawable/icon.png b/ndk/platforms/android-14/samples/native-media/res/drawable/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/res/drawable/icon.png
Binary files differ
diff --git a/ndk/platforms/android-14/samples/native-media/res/layout/main.xml b/ndk/platforms/android-14/samples/native-media/res/layout/main.xml
new file mode 100644
index 0000000..0e41339
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/res/layout/main.xml
@@ -0,0 +1,133 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:orientation="vertical"
+    android:layout_width="fill_parent"
+    android:layout_height="fill_parent"
+    >
+<TextView
+    android:layout_width="fill_parent"
+    android:layout_height="wrap_content"
+    android:text="@string/hello"
+    />
+<TextView
+    android:layout_width="fill_parent"
+    android:layout_height="wrap_content"
+    android:text="@string/source_select"
+    />
+<Spinner
+    android:id="@+id/source_spinner"
+    android:layout_width="fill_parent"
+    android:layout_height="wrap_content"
+    android:text="@string/source_prompt"
+    />
+<TextView
+    android:layout_width="fill_parent"
+    android:layout_height="wrap_content"
+    android:text="@string/sink_select"
+    />
+<Spinner
+    android:id="@+id/sink_spinner"
+    android:layout_width="fill_parent"
+    android:layout_height="wrap_content"
+    android:text="@string/sink_prompt"
+    />
+
+<LinearLayout
+    android:orientation="horizontal"
+    android:layout_width="wrap_content"
+    android:layout_height="wrap_content"
+    >
+    <Button
+        android:id="@+id/start_java"
+        android:text="@string/start_java"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        />
+    <Button
+        android:id="@+id/start_native"
+        android:text="@string/start_native"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        />
+    <Button
+        android:id="@+id/finish"
+        android:text="@string/finish"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        />
+</LinearLayout>
+
+<LinearLayout
+    android:orientation="horizontal"
+    android:layout_width="wrap_content"
+    android:layout_height="wrap_content"
+    >
+    <Button
+        android:id="@+id/rewind_java"
+        android:text="@string/rewind_java"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        />
+    <Button
+        android:id="@+id/rewind_native"
+        android:text="@string/rewind_native"
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        />
+</LinearLayout>
+
+<LinearLayout
+    android:orientation="horizontal"
+    android:layout_width="wrap_content"
+    android:layout_height="wrap_content"
+    >
+    <TextView
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:text="S1"
+        />
+    <SurfaceView
+        android:id="@+id/surfaceview1"
+        android:layout_width="320px"
+        android:layout_height="240px"
+        />
+    <TextView
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:text="S2"
+        />
+    <SurfaceView
+        android:id="@+id/surfaceview2"
+        android:layout_width="400px"
+        android:layout_height="224px"
+        />
+</LinearLayout>
+
+<LinearLayout
+    android:orientation="horizontal"
+    android:layout_width="wrap_content"
+    android:layout_height="wrap_content"
+    >
+    <TextView
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:text="ST1"
+        />
+    <com.example.nativemedia.MyGLSurfaceView
+        android:id="@+id/glsurfaceview1"
+        android:layout_width="320px"
+        android:layout_height="240px"
+        />
+    <TextView
+        android:layout_width="fill_parent"
+        android:layout_height="wrap_content"
+        android:text="ST2"
+        />
+    <com.example.nativemedia.MyGLSurfaceView
+        android:id="@+id/glsurfaceview2"
+        android:layout_width="320px"
+        android:layout_height="240px"
+        />
+</LinearLayout>
+
+</LinearLayout>
diff --git a/ndk/platforms/android-14/samples/native-media/res/values/strings.xml b/ndk/platforms/android-14/samples/native-media/res/values/strings.xml
new file mode 100644
index 0000000..32a9a8e
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/res/values/strings.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <string name="hello">Hello, Android, using native media!</string>
+    <string name="app_name">NativeMedia</string>
+
+    <string name="start_java">Start/Pause\nJava MediaPlayer</string>
+    <string name="start_native">Start/Pause\nnative MediaPlayer</string>
+    <string name="finish">Finish</string>
+
+    <string name="rewind_java">Rewind\nJava MediaPlayer</string>
+    <string name="rewind_native">Rewind\nnative MediaPlayer</string>
+
+    <string name="source_select">Please select the media source</string>
+    <string name="source_prompt">Media source</string>
+    <string-array name="source_array">
+        <item>/sdcard/NativeMedia.ts</item>
+    </string-array>
+
+    <string name="sink_select">Please select the video sink</string>
+    <string name="sink_prompt">Video sink</string>
+    <string-array name="sink_array">
+        <item>Surface 1</item>
+        <item>Surface 2</item>
+        <item>SurfaceTexture 1</item>
+        <item>SurfaceTexture 2</item>
+    </string-array>
+
+</resources>
diff --git a/ndk/platforms/android-14/samples/native-media/src/com/example/nativemedia/MyGLSurfaceView.java b/ndk/platforms/android-14/samples/native-media/src/com/example/nativemedia/MyGLSurfaceView.java
new file mode 100644
index 0000000..39a7ecf
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/src/com/example/nativemedia/MyGLSurfaceView.java
@@ -0,0 +1,336 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.nativemedia;
+
+import android.graphics.SurfaceTexture;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.content.Context;
+
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.Matrix;
+
+import android.util.AttributeSet;
+
+public class MyGLSurfaceView extends GLSurfaceView {
+
+    MyRenderer mRenderer;
+
+    public MyGLSurfaceView(Context context) {
+        this(context, null);
+    }
+
+    public MyGLSurfaceView(Context context, AttributeSet attributeSet) {
+        super(context, attributeSet);
+        init();
+    }
+
+    private void init() {
+        setEGLContextClientVersion(2);
+        mRenderer = new MyRenderer();
+        setRenderer(mRenderer);
+    }
+
+    @Override
+    public void onPause() {
+        super.onPause();
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+    }
+
+    public SurfaceTexture getSurfaceTexture() {
+        return mRenderer.getSurfaceTexture();
+    }
+}
+
+class MyRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+
+    public MyRenderer() {
+        mVertices = ByteBuffer.allocateDirect(mVerticesData.length
+                * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
+        mVertices.put(mVerticesData).position(0);
+
+        Matrix.setIdentityM(mSTMatrix, 0);
+        Matrix.setIdentityM(mMMatrix, 0);
+        Matrix.rotateM(mMMatrix, 0, 20, 0, 1, 0);
+    }
+
+    public void onDrawFrame(GL10 glUnused) {
+        synchronized(this) {
+            if (updateSurface) {
+                mSurface.updateTexImage();
+
+                mSurface.getTransformMatrix(mSTMatrix);
+                updateSurface = false;
+            }
+        }
+
+        // Ignore the passed-in GL10 interface, and use the GLES20
+        // class's static methods instead.
+        GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+        GLES20.glUseProgram(mProgram);
+        checkGlError("glUseProgram");
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+        mVertices.position(VERTICES_DATA_POS_OFFSET);
+        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+                VERTICES_DATA_STRIDE_BYTES, mVertices);
+        checkGlError("glVertexAttribPointer maPosition");
+        GLES20.glEnableVertexAttribArray(maPositionHandle);
+        checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+        mVertices.position(VERTICES_DATA_UV_OFFSET);
+        GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
+                VERTICES_DATA_STRIDE_BYTES, mVertices);
+        checkGlError("glVertexAttribPointer maTextureHandle");
+        GLES20.glEnableVertexAttribArray(maTextureHandle);
+        checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+        Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
+        Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
+
+        GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+        GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+        checkGlError("glDrawArrays");
+    }
+
+    public void onSurfaceChanged(GL10 glUnused, int width, int height) {
+        // Ignore the passed-in GL10 interface, and use the GLES20
+        // class's static methods instead.
+        GLES20.glViewport(0, 0, width, height);
+        mRatio = (float) width / height;
+        Matrix.frustumM(mProjMatrix, 0, -mRatio, mRatio, -1, 1, 3, 7);
+    }
+
+    public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
+        // Ignore the passed-in GL10 interface, and use the GLES20
+        // class's static methods instead.
+
+        /* Set up alpha blending and an Android background color */
+        GLES20.glEnable(GLES20.GL_BLEND);
+        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+        GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f);
+
+        /* Set up shaders and handles to their variables */
+        mProgram = createProgram(mVertexShader, mFragmentShader);
+        if (mProgram == 0) {
+            return;
+        }
+        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+        checkGlError("glGetAttribLocation aPosition");
+        if (maPositionHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aPosition");
+        }
+        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+        checkGlError("glGetAttribLocation aTextureCoord");
+        if (maTextureHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for aTextureCoord");
+        }
+
+        muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+        checkGlError("glGetUniformLocation uMVPMatrix");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+        }
+
+        muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+        checkGlError("glGetUniformLocation uSTMatrix");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uSTMatrix");
+        }
+
+        checkGlError("glGetUniformLocation uCRatio");
+        if (muMVPMatrixHandle == -1) {
+            throw new RuntimeException("Could not get attrib location for uCRatio");
+        }
+
+        /*
+         * Create our texture. This has to be done each time the
+         * surface is created.
+         */
+
+        int[] textures = new int[1];
+        GLES20.glGenTextures(1, textures, 0);
+
+        mTextureID = textures[0];
+        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+        checkGlError("glBindTexture mTextureID");
+
+        // Can't do mipmapping with camera source
+        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+                GLES20.GL_NEAREST);
+        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+                GLES20.GL_LINEAR);
+        // Clamp to edge is the only option
+        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+                GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+                GLES20.GL_CLAMP_TO_EDGE);
+        checkGlError("glTexParameteri mTextureID");
+
+        /*
+         * Create the SurfaceTexture that will feed this textureID, and pass it to the camera
+         */
+
+        mSurface = new SurfaceTexture(mTextureID);
+        mSurface.setOnFrameAvailableListener(this);
+
+        Matrix.setLookAtM(mVMatrix, 0, 0, 0, 4f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
+
+        synchronized(this) {
+            updateSurface = false;
+        }
+    }
+
+    synchronized public void onFrameAvailable(SurfaceTexture surface) {
+        /* For simplicity, SurfaceTexture calls here when it has new
+         * data available.  Call may come in from some random thread,
+         * so let's be safe and use synchronize. No OpenGL calls can be done here.
+         */
+        updateSurface = true;
+        //Log.v(TAG, "onFrameAvailable " + surface.getTimestamp());
+    }
+
+    private int loadShader(int shaderType, String source) {
+        int shader = GLES20.glCreateShader(shaderType);
+        if (shader != 0) {
+            GLES20.glShaderSource(shader, source);
+            GLES20.glCompileShader(shader);
+            int[] compiled = new int[1];
+            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+            if (compiled[0] == 0) {
+                Log.e(TAG, "Could not compile shader " + shaderType + ":");
+                Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
+                GLES20.glDeleteShader(shader);
+                shader = 0;
+            }
+        }
+        return shader;
+    }
+
+    private int createProgram(String vertexSource, String fragmentSource) {
+        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertexShader == 0) {
+            return 0;
+        }
+        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (pixelShader == 0) {
+            return 0;
+        }
+
+        int program = GLES20.glCreateProgram();
+        if (program != 0) {
+            GLES20.glAttachShader(program, vertexShader);
+            checkGlError("glAttachShader");
+            GLES20.glAttachShader(program, pixelShader);
+            checkGlError("glAttachShader");
+            GLES20.glLinkProgram(program);
+            int[] linkStatus = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+            if (linkStatus[0] != GLES20.GL_TRUE) {
+                Log.e(TAG, "Could not link program: ");
+                Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+                GLES20.glDeleteProgram(program);
+                program = 0;
+            }
+        }
+        return program;
+    }
+
+    private void checkGlError(String op) {
+        int error;
+        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+            Log.e(TAG, op + ": glError " + error);
+            throw new RuntimeException(op + ": glError " + error);
+        }
+    }
+
+    private static final int FLOAT_SIZE_BYTES = 4;
+    private static final int VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+    private static final int VERTICES_DATA_POS_OFFSET = 0;
+    private static final int VERTICES_DATA_UV_OFFSET = 3;
+    private final float[] mVerticesData = {
+        // X, Y, Z, U, V
+        -1.0f, -1.0f, 0, 0.f, 0.f,
+        1.0f, -1.0f, 0, 1.f, 0.f,
+        -1.0f,  1.0f, 0, 0.f, 1.f,
+        1.0f,   1.0f, 0, 1.f, 1.f,
+    };
+
+    private FloatBuffer mVertices;
+
+    private final String mVertexShader =
+        "uniform mat4 uMVPMatrix;\n" +
+        "uniform mat4 uSTMatrix;\n" +
+        "attribute vec4 aPosition;\n" +
+        "attribute vec4 aTextureCoord;\n" +
+        "varying vec2 vTextureCoord;\n" +
+        "void main() {\n" +
+        "  gl_Position = uMVPMatrix * aPosition;\n" +
+        "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+        "}\n";
+
+    private final String mFragmentShader =
+        "#extension GL_OES_EGL_image_external : require\n" +
+        "precision mediump float;\n" +
+        "varying vec2 vTextureCoord;\n" +
+        "uniform samplerExternalOES sTexture;\n" +
+        "void main() {\n" +
+        "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+        "}\n";
+
+    private float[] mMVPMatrix = new float[16];
+    private float[] mProjMatrix = new float[16];
+    private float[] mMMatrix = new float[16];
+    private float[] mVMatrix = new float[16];
+    private float[] mSTMatrix = new float[16];
+
+    private int mProgram;
+    private int mTextureID;
+    private int muMVPMatrixHandle;
+    private int muSTMatrixHandle;
+    private int maPositionHandle;
+    private int maTextureHandle;
+
+    private float mRatio = 1.0f;
+    private SurfaceTexture mSurface;
+    private boolean updateSurface = false;
+
+    private static final String TAG = "MyRenderer";
+
+    // Magic key
+    private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+    public SurfaceTexture getSurfaceTexture() {
+        return mSurface;
+    }
+}
diff --git a/ndk/platforms/android-14/samples/native-media/src/com/example/nativemedia/NativeMedia.java b/ndk/platforms/android-14/samples/native-media/src/com/example/nativemedia/NativeMedia.java
new file mode 100644
index 0000000..3b5f49b
--- /dev/null
+++ b/ndk/platforms/android-14/samples/native-media/src/com/example/nativemedia/NativeMedia.java
@@ -0,0 +1,407 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.nativemedia;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.media.MediaPlayer;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.Spinner;
+import java.io.IOException;
+
+public class NativeMedia extends Activity {
+    static final String TAG = "NativeMedia";
+
+    String mSourceString = null;
+    String mSinkString = null;
+
+    // member variables for Java media player
+    MediaPlayer mMediaPlayer;
+    boolean mMediaPlayerIsPrepared = false;
+    SurfaceView mSurfaceView1;
+    SurfaceHolder mSurfaceHolder1;
+
+    // member variables for native media player
+    boolean mIsPlayingStreaming = false;
+    SurfaceView mSurfaceView2;
+    SurfaceHolder mSurfaceHolder2;
+
+    VideoSink mSelectedVideoSink;
+    VideoSink mJavaMediaPlayerVideoSink;
+    VideoSink mNativeMediaPlayerVideoSink;
+
+    SurfaceHolderVideoSink mSurfaceHolder1VideoSink, mSurfaceHolder2VideoSink;
+    GLViewVideoSink mGLView1VideoSink, mGLView2VideoSink;
+
+    /** Called when the activity is first created. */
+    @Override
+    public void onCreate(Bundle icicle) {
+        super.onCreate(icicle);
+        setContentView(R.layout.main);
+
+        mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1);
+        mGLView2 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview2);
+
+        // initialize native media system
+        createEngine();
+
+        // set up the Surface 1 video sink
+        mSurfaceView1 = (SurfaceView) findViewById(R.id.surfaceview1);
+        mSurfaceHolder1 = mSurfaceView1.getHolder();
+
+        mSurfaceHolder1.addCallback(new SurfaceHolder.Callback() {
+
+            public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+                Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height="
+                        + height);
+            }
+
+            public void surfaceCreated(SurfaceHolder holder) {
+                Log.v(TAG, "surfaceCreated");
+                setSurface(holder.getSurface());
+            }
+
+            public void surfaceDestroyed(SurfaceHolder holder) {
+                Log.v(TAG, "surfaceDestroyed");
+            }
+
+        });
+
+        // set up the Surface 2 video sink
+        mSurfaceView2 = (SurfaceView) findViewById(R.id.surfaceview2);
+        mSurfaceHolder2 = mSurfaceView2.getHolder();
+
+        mSurfaceHolder2.addCallback(new SurfaceHolder.Callback() {
+
+            public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+                Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height="
+                        + height);
+            }
+
+            public void surfaceCreated(SurfaceHolder holder) {
+                Log.v(TAG, "surfaceCreated");
+                setSurface(holder.getSurface());
+            }
+
+            public void surfaceDestroyed(SurfaceHolder holder) {
+                Log.v(TAG, "surfaceDestroyed");
+            }
+
+        });
+
+        // create Java media player
+        mMediaPlayer = new MediaPlayer();
+
+        // set up Java media player listeners
+        mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
+
+            public void onPrepared(MediaPlayer mediaPlayer) {
+                int width = mediaPlayer.getVideoWidth();
+                int height = mediaPlayer.getVideoHeight();
+                Log.v(TAG, "onPrepared width=" + width + ", height=" + height);
+                if (width != 0 && height != 0 && mJavaMediaPlayerVideoSink != null) {
+                    mJavaMediaPlayerVideoSink.setFixedSize(width, height);
+                }
+                mMediaPlayerIsPrepared = true;
+                mediaPlayer.start();
+            }
+
+        });
+
+        mMediaPlayer.setOnVideoSizeChangedListener(new MediaPlayer.OnVideoSizeChangedListener() {
+
+            public void onVideoSizeChanged(MediaPlayer mediaPlayer, int width, int height) {
+                Log.v(TAG, "onVideoSizeChanged width=" + width + ", height=" + height);
+                if (width != 0 && height != 0 && mJavaMediaPlayerVideoSink != null) {
+                    mJavaMediaPlayerVideoSink.setFixedSize(width, height);
+                }
+            }
+
+        });
+
+        // initialize content source spinner
+        Spinner sourceSpinner = (Spinner) findViewById(R.id.source_spinner);
+        ArrayAdapter<CharSequence> sourceAdapter = ArrayAdapter.createFromResource(
+                this, R.array.source_array, android.R.layout.simple_spinner_item);
+        sourceAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        sourceSpinner.setAdapter(sourceAdapter);
+        sourceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
+
+            public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
+                mSourceString = parent.getItemAtPosition(pos).toString();
+                Log.v(TAG, "onItemSelected " + mSourceString);
+            }
+
+            public void onNothingSelected(AdapterView parent) {
+                Log.v(TAG, "onNothingSelected");
+                mSourceString = null;
+            }
+
+        });
+
+        // initialize video sink spinner
+        Spinner sinkSpinner = (Spinner) findViewById(R.id.sink_spinner);
+        ArrayAdapter<CharSequence> sinkAdapter = ArrayAdapter.createFromResource(
+                this, R.array.sink_array, android.R.layout.simple_spinner_item);
+        sinkAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+        sinkSpinner.setAdapter(sinkAdapter);
+        sinkSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
+
+            public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
+                mSinkString = parent.getItemAtPosition(pos).toString();
+                Log.v(TAG, "onItemSelected " + mSinkString);
+                if ("Surface 1".equals(mSinkString)) {
+                    if (mSurfaceHolder1VideoSink == null) {
+                        mSurfaceHolder1VideoSink = new SurfaceHolderVideoSink(mSurfaceHolder1);
+                    }
+                    mSelectedVideoSink = mSurfaceHolder1VideoSink;
+                } else if ("Surface 2".equals(mSinkString)) {
+                    if (mSurfaceHolder2VideoSink == null) {
+                        mSurfaceHolder2VideoSink = new SurfaceHolderVideoSink(mSurfaceHolder2);
+                    }
+                    mSelectedVideoSink = mSurfaceHolder2VideoSink;
+                } else if ("SurfaceTexture 1".equals(mSinkString)) {
+                    if (mGLView1VideoSink == null) {
+                        mGLView1VideoSink = new GLViewVideoSink(mGLView1);
+                    }
+                    mSelectedVideoSink = mGLView1VideoSink;
+                } else if ("SurfaceTexture 2".equals(mSinkString)) {
+                    if (mGLView2VideoSink == null) {
+                        mGLView2VideoSink = new GLViewVideoSink(mGLView2);
+                    }
+                    mSelectedVideoSink = mGLView2VideoSink;
+                }
+            }
+
+            public void onNothingSelected(AdapterView parent) {
+                Log.v(TAG, "onNothingSelected");
+                mSinkString = null;
+                mSelectedVideoSink = null;
+            }
+
+        });
+
+        // initialize button click handlers
+
+        // Java MediaPlayer start/pause
+
+        ((Button) findViewById(R.id.start_java)).setOnClickListener(new View.OnClickListener() {
+
+            public void onClick(View view) {
+                if (mJavaMediaPlayerVideoSink == null) {
+                    if (mSelectedVideoSink == null) {
+                        return;
+                    }
+                    mSelectedVideoSink.useAsSinkForJava(mMediaPlayer);
+                    mJavaMediaPlayerVideoSink = mSelectedVideoSink;
+                }
+                if (!mMediaPlayerIsPrepared) {
+                    if (mSourceString != null) {
+                        try {
+                            mMediaPlayer.setDataSource(mSourceString);
+                        } catch (IOException e) {
+                            Log.e(TAG, "IOException " + e);
+                        }
+                        mMediaPlayer.prepareAsync();
+                    }
+                } else if (mMediaPlayer.isPlaying()) {
+                    mMediaPlayer.pause();
+                } else {
+                    mMediaPlayer.start();
+                }
+            }
+
+        });
+
+        // native MediaPlayer start/pause
+
+        ((Button) findViewById(R.id.start_native)).setOnClickListener(new View.OnClickListener() {
+
+            boolean created = false;
+            public void onClick(View view) {
+                if (!created) {
+                    if (mNativeMediaPlayerVideoSink == null) {
+                        if (mSelectedVideoSink == null) {
+                            return;
+                        }
+                        mSelectedVideoSink.useAsSinkForNative();
+                        mNativeMediaPlayerVideoSink = mSelectedVideoSink;
+                    }
+                    if (mSourceString != null) {
+                        created = createStreamingMediaPlayer(mSourceString);
+                    }
+                }
+                if (created) {
+                    mIsPlayingStreaming = !mIsPlayingStreaming;
+                    setPlayingStreamingMediaPlayer(mIsPlayingStreaming);
+                }
+            }
+
+        });
+
+        // finish
+
+        ((Button) findViewById(R.id.finish)).setOnClickListener(new View.OnClickListener() {
+
+            public void onClick(View view) {
+                finish();
+            }
+
+        });
+
+        // Java MediaPlayer rewind
+
+        ((Button) findViewById(R.id.rewind_java)).setOnClickListener(new View.OnClickListener() {
+
+            public void onClick(View view) {
+                if (mMediaPlayerIsPrepared) {
+                    mMediaPlayer.seekTo(0);
+                }
+            }
+
+        });
+
+        // native MediaPlayer rewind
+
+        ((Button) findViewById(R.id.rewind_native)).setOnClickListener(new View.OnClickListener() {
+
+            public void onClick(View view) {
+                if (mNativeMediaPlayerVideoSink != null) {
+                    rewindStreamingMediaPlayer();
+                }
+            }
+
+        });
+
+    }
+
+    /** Called when the activity is about to be paused. */
+    @Override
+    protected void onPause()
+    {
+        mIsPlayingStreaming = false;
+        setPlayingStreamingMediaPlayer(false);
+        mGLView1.onPause();
+        mGLView2.onPause();
+        super.onPause();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        mGLView1.onResume();
+        mGLView2.onResume();
+    }
+
+    /** Called when the activity is about to be destroyed. */
+    @Override
+    protected void onDestroy()
+    {
+        shutdown();
+        super.onDestroy();
+    }
+
+    private MyGLSurfaceView mGLView1, mGLView2;
+
+    /** Native methods, implemented in jni folder */
+    public static native void createEngine();
+    public static native boolean createStreamingMediaPlayer(String filename);
+    public static native void setPlayingStreamingMediaPlayer(boolean isPlaying);
+    public static native void shutdown();
+    public static native void setSurface(Surface surface);
+    public static native void rewindStreamingMediaPlayer();
+
+    /** Load jni .so on initialization */
+    static {
+         System.loadLibrary("native-media-jni");
+    }
+
+    // VideoSink abstracts out the difference between Surface and SurfaceTexture
+    // aka SurfaceHolder and GLSurfaceView
+    static abstract class VideoSink {
+
+        abstract void setFixedSize(int width, int height);
+        abstract void useAsSinkForJava(MediaPlayer mediaPlayer);
+        abstract void useAsSinkForNative();
+
+    }
+
+    static class SurfaceHolderVideoSink extends VideoSink {
+
+        private final SurfaceHolder mSurfaceHolder;
+
+        SurfaceHolderVideoSink(SurfaceHolder surfaceHolder) {
+            mSurfaceHolder = surfaceHolder;
+        }
+
+        void setFixedSize(int width, int height) {
+            mSurfaceHolder.setFixedSize(width, height);
+        }
+
+        void useAsSinkForJava(MediaPlayer mediaPlayer) {
+            // Use the newer MediaPlayer.setSurface(Surface) since API level 14
+            // instead of MediaPlayer.setDisplay(mSurfaceHolder) since API level 1,
+            // because setSurface also works with a Surface derived from a SurfaceTexture.
+            Surface s = mSurfaceHolder.getSurface();
+            mediaPlayer.setSurface(s);
+            s.release();
+        }
+
+        void useAsSinkForNative() {
+            Surface s = mSurfaceHolder.getSurface();
+            setSurface(s);
+            s.release();
+        }
+
+    }
+
+    static class GLViewVideoSink extends VideoSink {
+
+        private final MyGLSurfaceView mMyGLSurfaceView;
+
+        GLViewVideoSink(MyGLSurfaceView myGLSurfaceView) {
+            mMyGLSurfaceView = myGLSurfaceView;
+        }
+
+        void setFixedSize(int width, int height) {
+        }
+
+        void useAsSinkForJava(MediaPlayer mediaPlayer) {
+            SurfaceTexture st = mMyGLSurfaceView.getSurfaceTexture();
+            Surface s = new Surface(st);
+            mediaPlayer.setSurface(s);
+            s.release();
+        }
+
+        void useAsSinkForNative() {
+            SurfaceTexture st = mMyGLSurfaceView.getSurfaceTexture();
+            Surface s = new Surface(st);
+            setSurface(s);
+            s.release();
+        }
+
+    }
+
+}
diff --git a/ndk/platforms/android-3/include/sys/atomics.h b/ndk/platforms/android-3/include/sys/atomics.h
index d3fa145..3ada8de 100644
--- a/ndk/platforms/android-3/include/sys/atomics.h
+++ b/ndk/platforms/android-3/include/sys/atomics.h
@@ -33,10 +33,48 @@
 
 __BEGIN_DECLS
 
-extern int __atomic_cmpxchg(int old, int _new, volatile int *ptr);
-extern int __atomic_swap(int _new, volatile int *ptr);
-extern int __atomic_dec(volatile int *ptr);
-extern int __atomic_inc(volatile int *ptr);
+/* Note: atomic operations that were exported by the C library didn't
+ *       provide any memory barriers, which created potential issues on
+ *       multi-core devices. We now define them as inlined calls to
+ *       GCC sync builtins, which always provide a full barrier.
+ *
+ *       NOTE: The C library still exports atomic functions by the same
+ *              name to ensure ABI stability for existing NDK machine code.
+ *
+ *       If you are an NDK developer, we encourage you to rebuild your
+ *       unmodified sources against this header as soon as possible.
+ */
+#define __ATOMIC_INLINE__ static __inline__ __attribute__((always_inline))
+
+__ATOMIC_INLINE__ int
+__atomic_cmpxchg(int old, int _new, volatile int *ptr)
+{
+    /* We must return 0 on success */
+    return __sync_val_compare_and_swap(ptr, old, _new) != old;
+}
+
+__ATOMIC_INLINE__ int
+__atomic_swap(int _new, volatile int *ptr)
+{
+    int prev;
+    do {
+        prev = *ptr;
+    } while (__sync_val_compare_and_swap(ptr, prev, _new) != prev);
+    return prev;
+}
+
+__ATOMIC_INLINE__ int
+__atomic_dec(volatile int *ptr)
+{
+  return __sync_fetch_and_sub (ptr, 1);
+}
+
+__ATOMIC_INLINE__ int
+__atomic_inc(volatile int *ptr)
+{
+  return __sync_fetch_and_add (ptr, 1);
+}
+
 
 int __futex_wait(volatile void *ftx, int val, const struct timespec *timeout);
 int __futex_wake(volatile void *ftx, int count);
diff --git a/ndk/platforms/android-9/arch-x86/include/sys/atomics.h b/ndk/platforms/android-9/arch-x86/include/sys/atomics.h
deleted file mode 100644
index 7aed3ae..0000000
--- a/ndk/platforms/android-9/arch-x86/include/sys/atomics.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *  * Redistributions of source code must retain the above copyright
- *    notice, this list of conditions and the following disclaimer.
- *  * Redistributions in binary form must reproduce the above copyright
- *    notice, this list of conditions and the following disclaimer in
- *    the documentation and/or other materials provided with the
- *    distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
- * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
- * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
- * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
- * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
- * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
- * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
- * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- */
-#ifndef _SYS_ATOMICS_H
-#define _SYS_ATOMICS_H
-
-#include <sys/cdefs.h>
-#include <sys/time.h>
-
-__BEGIN_DECLS
-
-static inline __attribute__((always_inline)) int
-__atomic_cmpxchg(int old, int _new, volatile int *ptr)
-{
-  return !__sync_bool_compare_and_swap (ptr, old, _new);
-}
-
-static inline __attribute__((always_inline)) int
-__atomic_swap(int _new, volatile int *ptr)
-{
-  return __sync_lock_test_and_set(ptr, _new);
-}
-
-static inline __attribute__((always_inline)) int
-__atomic_dec(volatile int *ptr)
-{
-  return __sync_fetch_and_sub (ptr, 1);
-}
-
-static inline __attribute__((always_inline)) int
-__atomic_inc(volatile int *ptr)
-{
-  return __sync_fetch_and_add (ptr, 1);
-}
-
-int __futex_wait(volatile void *ftx, int val, const struct timespec *timeout);
-int __futex_wake(volatile void *ftx, int count);
-
-__END_DECLS
-
-#endif /* _SYS_ATOMICS_H */
diff --git a/ndk/platforms/android-9/include/android/native_activity.h b/ndk/platforms/android-9/include/android/native_activity.h
index d89bc8b..52997bf 100644
--- a/ndk/platforms/android-9/include/android/native_activity.h
+++ b/ndk/platforms/android-9/include/android/native_activity.h
@@ -60,7 +60,14 @@
     JNIEnv* env;
 
     /**
-     * The NativeActivity Java class.
+     * The NativeActivity object handle.
+     *
+     * IMPORTANT NOTE: This member is mis-named. It should really be named
+     * 'activity' instead of 'clazz', since it's a reference to the
+     * NativeActivity instance created by the system for you.
+     *
+     * We unfortunately cannot change this without breaking NDK
+     * source-compatibility.
      */
     jobject clazz;
 
diff --git a/samples/ApiDemos/AndroidManifest.xml b/samples/ApiDemos/AndroidManifest.xml
index 4e63dde..6aac428 100644
--- a/samples/ApiDemos/AndroidManifest.xml
+++ b/samples/ApiDemos/AndroidManifest.xml
@@ -882,6 +882,16 @@
             </intent-filter>
         </activity>
 
+        <!-- Accessibility Samples -->
+        <activity android:name=".accessibility.AccessibilityNodeProviderActivity"
+                android:label="@string/accessibility_node_provider"
+                android:enabled="@bool/atLeastIceCreamSandwich">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.SAMPLE_CODE" />
+            </intent-filter>
+        </activity>
+
         <!-- Application Updating Samples -->
 
 <!-- BEGIN_INCLUDE(app_update_declaration) -->
diff --git a/samples/ApiDemos/_index.html b/samples/ApiDemos/_index.html
index 3c45843..421eca6 100644
--- a/samples/ApiDemos/_index.html
+++ b/samples/ApiDemos/_index.html
@@ -1,19 +1,19 @@
-<p>The API Demos application includes a variety of small applications 
+<p>The API Demos application includes a variety of small applications
 that illustrate the use of various Android APIs. It includes samples of:
 </p>
 <ul>
-  <li>Notifications</li>  
-  <li>Alarms</li>  
-  <li>Progress Dialogs</li>  
-  <li>Intents</li>  
-  <li>Menus</li>  
-  <li>Search</li>  
-  <li>Persistent application state</li>  
-  <li>Preferences</li>  
-  <li>Background Services</li>  
-  <li>App Widgets</li>  
-  <li>Voice Recognition</li>  
-  <li>And many many more...</li>  
+  <li>Notifications</li>
+  <li>Alarms</li>
+  <li>Progress Dialogs</li>
+  <li>Intents</li>
+  <li>Menus</li>
+  <li>Search</li>
+  <li>Persistent application state</li>
+  <li>Preferences</li>
+  <li>Background Services</li>
+  <li>App Widgets</li>
+  <li>Voice Recognition</li>
+  <li>And many many more...</li>
 </ul>
 
 <div class="note">
@@ -36,6 +36,7 @@
 <li><a href="src/com/example/android/apis/graphics/TouchPaint.html">Stylus and hover
 support</a></li>
 <li><a href="src/com/example/android/apis/view/Switches.html">Switch widget</a></li>
+<li><a href="src/com/example/android/apis/accessibility/AccessibilityNodeProviderActivity.html">Accessibility Node Provider</a></li>
 <li><a
   href="src/com/example/android/apis/accessibility/TaskBackService.html">Window
   Querying Accessibility Service</a></li>
diff --git a/samples/ApiDemos/res/layout/accessibility_node_provider.xml b/samples/ApiDemos/res/layout/accessibility_node_provider.xml
new file mode 100644
index 0000000..cc10c9c
--- /dev/null
+++ b/samples/ApiDemos/res/layout/accessibility_node_provider.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="wrap_content"
+    android:layout_height="wrap_content"
+    android:orientation="vertical">
+
+    <TextView
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginBottom="50dip"
+        android:text="@string/accessibility_node_provider_instructions">
+    </TextView>
+
+    <view
+        class="com.example.android.apis.accessibility.AccessibilityNodeProviderActivity$VirtualSubtreeRootView"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content" >
+    </view>
+
+</LinearLayout>
diff --git a/samples/ApiDemos/res/values/strings.xml b/samples/ApiDemos/res/values/strings.xml
index 2913844..c57c03f 100644
--- a/samples/ApiDemos/res/values/strings.xml
+++ b/samples/ApiDemos/res/values/strings.xml
@@ -1292,6 +1292,10 @@
     <!--  Accessibility examples strings  -->
     <!-- ============================ -->
 
+    <string name="accessibility_node_provider">Accessibility/Accessibility Node Provider</string>
+    <string name="accessibility_node_provider_instructions">Enable TalkBack and Explore-by-touch from accessibility
+        settings. Then touch the colored squares.</string>
+
     <string name="accessibility_service">Accessibility/Accessibility Service</string>
     <string name="accessibility_service_label">ClockBack</string>
     <string name="accessibility_service_instructions">
diff --git a/samples/ApiDemos/src/com/example/android/apis/accessibility/AccessibilityNodeProviderActivity.java b/samples/ApiDemos/src/com/example/android/apis/accessibility/AccessibilityNodeProviderActivity.java
new file mode 100644
index 0000000..16914c7
--- /dev/null
+++ b/samples/ApiDemos/src/com/example/android/apis/accessibility/AccessibilityNodeProviderActivity.java
@@ -0,0 +1,484 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.android.apis.accessibility;
+
+import com.example.android.apis.R;
+
+import android.app.Activity;
+import android.app.Service;
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.os.Bundle;
+import android.text.TextUtils;
+import android.util.AttributeSet;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.accessibility.AccessibilityEvent;
+import android.view.accessibility.AccessibilityManager;
+import android.view.accessibility.AccessibilityNodeInfo;
+import android.view.accessibility.AccessibilityNodeProvider;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * This sample demonstrates how a View can expose a virtual view sub-tree
+ * rooted at it. A virtual sub-tree is composed of imaginary Views
+ * that are reported as a part of the view hierarchy for accessibility
+ * purposes. This enables custom views that draw complex content to report
+ * them selves as a tree of virtual views, thus conveying their logical
+ * structure.
+ * <p>
+ * For example, a View may draw a monthly calendar as a grid of days while
+ * each such day may contains some events. From a perspective of the View
+ * hierarchy the calendar is composed of a single View but an accessibility
+ * service would benefit of traversing the logical structure of the calendar
+ * by examining each day and each event on that day.
+ * </p>
+ */
+public class AccessibilityNodeProviderActivity extends Activity {
+    /** Called when the activity is first created. */
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.accessibility_node_provider);
+    }
+
+   /**
+    * This class presents a View that is composed of three virtual children
+    * each of which is drawn with a different color and represents a region
+    * of the View that has different semantics compared to other such regions.
+    * While the virtual view tree exposed by this class is one level deep
+    * for simplicity, there is no bound on the complexity of that virtual
+    * sub-tree.
+    */
+    public static class VirtualSubtreeRootView extends View {
+
+        /** Paint object for drawing the virtual sub-tree */
+        private final Paint mPaint = new Paint();
+
+        /** Temporary rectangle to minimize object creation. */
+        private final Rect mTempRect = new Rect();
+
+        /** Handle to the system accessibility service. */
+        private final AccessibilityManager mAccessibilityManager;
+
+        /** The virtual children of this View. */
+        private final List<VirtualView> mChildren = new ArrayList<VirtualView>();
+
+        /** The instance of the node provider for the virtual tree - lazily instantiated. */
+        private AccessibilityNodeProvider mAccessibilityNodeProvider;
+
+        /** The last hovered child used for event dispatching. */
+        private VirtualView mLastHoveredChild;
+
+        public VirtualSubtreeRootView(Context context, AttributeSet attrs) {
+            super(context, attrs);
+            mAccessibilityManager = (AccessibilityManager) context.getSystemService(
+                    Service.ACCESSIBILITY_SERVICE);
+            createVirtualChildren();
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public AccessibilityNodeProvider getAccessibilityNodeProvider() {
+            // Instantiate the provide only when requested. Since the system
+            // will call this method multiple times it is a good practice to
+            // cache the provider instance.
+            if (mAccessibilityNodeProvider == null) {
+                mAccessibilityNodeProvider = new VirtualDescendantsProvider();
+            }
+            return mAccessibilityNodeProvider;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public boolean dispatchHoverEvent(MotionEvent event) {
+            // This implementation assumes that the virtual children
+            // cannot overlap and are always visible. Do NOT use this
+            // code as a reference of how to implement hover event
+            // dispatch. Instead, refer to ViewGroup#dispatchHoverEvent.
+            boolean handled = false;
+            List<VirtualView> children = mChildren;
+            final int childCount = children.size();
+            for (int i = 0; i < childCount; i++) {
+                VirtualView child = children.get(i);
+                Rect childBounds = child.mBounds;
+                final int childCoordsX = (int) event.getX() + getScrollX();
+                final int childCoordsY = (int) event.getY() + getScrollY();
+                if (!childBounds.contains(childCoordsX, childCoordsY)) {
+                    continue;
+                }
+                final int action = event.getAction();
+                switch (action) {
+                    case MotionEvent.ACTION_HOVER_ENTER: {
+                        mLastHoveredChild = child;
+                        handled |= onHoverVirtualView(child, event);
+                        event.setAction(action);
+                    } break;
+                    case MotionEvent.ACTION_HOVER_MOVE: {
+                        if (child == mLastHoveredChild) {
+                            handled |= onHoverVirtualView(child, event);
+                            event.setAction(action);
+                        } else {
+                            MotionEvent eventNoHistory = event.getHistorySize() > 0
+                                ? MotionEvent.obtainNoHistory(event) : event;
+                            eventNoHistory.setAction(MotionEvent.ACTION_HOVER_EXIT);
+                            onHoverVirtualView(mLastHoveredChild, eventNoHistory);
+                            eventNoHistory.setAction(MotionEvent.ACTION_HOVER_ENTER);
+                            onHoverVirtualView(child, eventNoHistory);
+                            mLastHoveredChild = child;
+                            eventNoHistory.setAction(MotionEvent.ACTION_HOVER_MOVE);
+                            handled |= onHoverVirtualView(child, eventNoHistory);
+                            if (eventNoHistory != event) {
+                                eventNoHistory.recycle();
+                            } else {
+                                event.setAction(action);
+                            }
+                        }
+                    } break;
+                    case MotionEvent.ACTION_HOVER_EXIT: {
+                        mLastHoveredChild = null;
+                        handled |= onHoverVirtualView(child, event);
+                        event.setAction(action);
+                    } break;
+                }
+            }
+            if (!handled) {
+                handled |= onHoverEvent(event);
+            }
+            return handled;
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+            // The virtual children are ordered horizontally next to
+            // each other and take the entire space of this View.
+            int offsetX = 0;
+            List<VirtualView> children = mChildren;
+            final int childCount = children.size();
+            for (int i = 0; i < childCount; i++) {
+                VirtualView child = children.get(i);
+                Rect childBounds = child.mBounds;
+                childBounds.set(offsetX, 0, offsetX + childBounds.width(), childBounds.height());
+                offsetX += childBounds.width();
+            }
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+            // The virtual children are ordered horizontally next to
+            // each other and take the entire space of this View.
+            int width = 0;
+            int height = 0;
+            List<VirtualView> children = mChildren;
+            final int childCount = children.size();
+            for (int i = 0; i < childCount; i++) {
+                VirtualView child = children.get(i);
+                width += child.mBounds.width();
+                height = Math.max(height, child.mBounds.height());
+            }
+            setMeasuredDimension(width, height);
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        protected void onDraw(Canvas canvas) {
+            // Draw the virtual children with the reusable Paint object
+            // and with the bounds and color which are child specific.
+            Rect drawingRect = mTempRect;
+            List<VirtualView> children = mChildren;
+            final int childCount = children.size();
+            for (int i = 0; i < childCount; i++) {
+                VirtualView child = children.get(i);
+                drawingRect.set(child.mBounds);
+                mPaint.setColor(child.mColor);
+                mPaint.setAlpha(child.mAlpha);
+                canvas.drawRect(drawingRect, mPaint);
+            }
+        }
+
+        /**
+         * Creates the virtual children of this View.
+         */
+        private void createVirtualChildren() {
+            // The virtual portion of the tree is one level deep. Note
+            // that implementations can use any way of representing and
+            // drawing virtual view.
+            VirtualView firstChild = new VirtualView(0, new Rect(0, 0, 150, 150), Color.RED,
+                    "Virtual view 1");
+            mChildren.add(firstChild);
+            VirtualView secondChild = new VirtualView(1, new Rect(0, 0, 150, 150), Color.GREEN,
+                    "Virtual view 2");
+            mChildren.add(secondChild);
+            VirtualView thirdChild = new VirtualView(2, new Rect(0, 0, 150, 150), Color.BLUE,
+                    "Virtual view 3");
+            mChildren.add(thirdChild);
+        }
+
+        /**
+         * Set the selected state of a virtual view.
+         *
+         * @param virtualView The virtual view whose selected state to set.
+         * @param selected Whether the virtual view is selected.
+         */
+        private void setVirtualViewSelected(VirtualView virtualView, boolean selected) {
+            virtualView.mAlpha = selected ? VirtualView.ALPHA_SELECTED : VirtualView.ALPHA_NOT_SELECTED;
+        }
+
+        /**
+         * Handle a hover over a virtual view.
+         *
+         * @param virtualView The virtual view over which is hovered.
+         * @param event The event to dispatch.
+         * @return Whether the event was handled.
+         */
+        private boolean onHoverVirtualView(VirtualView virtualView, MotionEvent event) {
+            // The implementation of hover event dispatch can be implemented
+            // in any way that is found suitable. However, each virtual View
+            // should fire a corresponding accessibility event whose source
+            // is that virtual view. Accessibility services get the event source
+            // as the entry point of the APIs for querying the window content.
+            final int action = event.getAction();
+            switch (action) {
+                case MotionEvent.ACTION_HOVER_ENTER: {
+                    sendAccessibilityEventForVirtualView(virtualView,
+                            AccessibilityEvent.TYPE_VIEW_HOVER_ENTER);
+                } break;
+                case MotionEvent.ACTION_HOVER_EXIT: {
+                    sendAccessibilityEventForVirtualView(virtualView,
+                            AccessibilityEvent.TYPE_VIEW_HOVER_EXIT);
+                } break;
+            }
+            return true;
+        }
+
+        /**
+         * Sends a properly initialized accessibility event for a virtual view..
+         *
+         * @param virtualView The virtual view.
+         * @param eventType The type of the event to send.
+         */
+        private void sendAccessibilityEventForVirtualView(VirtualView virtualView, int eventType) {
+            // If touch exploration, i.e. the user gets feedback while touching
+            // the screen, is enabled we fire accessibility events.
+            if (mAccessibilityManager.isTouchExplorationEnabled()) {
+                AccessibilityEvent event = AccessibilityEvent.obtain(eventType);
+                event.setPackageName(getContext().getPackageName());
+                event.setClassName(virtualView.getClass().getName());
+                event.setSource(VirtualSubtreeRootView.this, virtualView.mId);
+                event.getText().add(virtualView.mText);
+                getParent().requestSendAccessibilityEvent(VirtualSubtreeRootView.this, event);
+            }
+        }
+
+        /**
+         * Finds a virtual view given its id.
+         *
+         * @param id The virtual view id.
+         * @return The found virtual view.
+         */
+        private VirtualView findVirtualViewById(int id) {
+            List<VirtualView> children = mChildren;
+            final int childCount = children.size();
+            for (int i = 0; i < childCount; i++) {
+                VirtualView child = children.get(i);
+                if (child.mId == id) {
+                    return child;
+                }
+            }
+            return null;
+        }
+
+        /**
+         * Represents a virtual View.
+         */
+        private class VirtualView {
+            public static final int ALPHA_SELECTED = 255;
+            public static final int ALPHA_NOT_SELECTED = 127;
+
+            public final int mId;
+            public final int mColor;
+            public final Rect mBounds;
+            public final String mText;
+            public int mAlpha;
+
+            public VirtualView(int id, Rect bounds, int color, String text) {
+                mId = id;
+                mColor = color;
+                mBounds = bounds;
+                mText = text;
+                mAlpha = ALPHA_NOT_SELECTED;
+            }
+        }
+
+        /**
+         * This is the provider that exposes the virtual View tree to accessibility
+         * services. From the perspective of an accessibility service the
+         * {@link AccessibilityNodeInfo}s it receives while exploring the sub-tree
+         * rooted at this View will be the same as the ones it received while
+         * exploring a View containing a sub-tree composed of real Views.
+         */
+        private class VirtualDescendantsProvider extends AccessibilityNodeProvider {
+
+            /**
+             * {@inheritDoc}
+             */
+            @Override
+            public AccessibilityNodeInfo createAccessibilityNodeInfo(int virtualViewId) {
+                AccessibilityNodeInfo info = null;
+                if (virtualViewId == View.NO_ID) {
+                    // We are requested to create an AccessibilityNodeInfo describing
+                    // this View, i.e. the root of the virtual sub-tree. Note that the
+                    // host View has an AccessibilityNodeProvider which means that this
+                    // provider is responsible for creating the node info for that root.
+                    info = AccessibilityNodeInfo.obtain(VirtualSubtreeRootView.this);
+                    onInitializeAccessibilityNodeInfo(info);
+                    // Add the virtual children of the root View.
+                    List<VirtualView> children = mChildren;
+                    final int childCount = children.size();
+                    for (int i = 0; i < childCount; i++) {
+                        VirtualView child = children.get(i);
+                        info.addChild(VirtualSubtreeRootView.this, child.mId);
+                    }
+                } else {
+                    // Find the view that corresponds to the given id.
+                    VirtualView virtualView = findVirtualViewById(virtualViewId);
+                    if (virtualView == null) {
+                        return null;
+                    }
+                    // Obtain and initialize an AccessibilityNodeInfo with
+                    // information about the virtual view.
+                    info = AccessibilityNodeInfo.obtain();
+                    info.addAction(AccessibilityNodeInfo.ACTION_SELECT);
+                    info.addAction(AccessibilityNodeInfo.ACTION_CLEAR_SELECTION);
+                    info.setPackageName(getContext().getPackageName());
+                    info.setClassName(virtualView.getClass().getName());
+                    info.setSource(VirtualSubtreeRootView.this, virtualViewId);
+                    info.setBoundsInParent(virtualView.mBounds);
+                    info.setParent(VirtualSubtreeRootView.this);
+                    info.setText(virtualView.mText);
+                }
+                return info;
+            }
+
+            /**
+             * {@inheritDoc}
+             */
+            @Override
+            public List<AccessibilityNodeInfo> findAccessibilityNodeInfosByText(String searched,
+                    int virtualViewId) {
+                if (TextUtils.isEmpty(searched)) {
+                    return Collections.emptyList();
+                }
+                String searchedLowerCase = searched.toLowerCase();
+                List<AccessibilityNodeInfo> result = null;
+                if (virtualViewId == View.NO_ID) {
+                    // If the search is from the root, i.e. this View, go over the virtual
+                    // children and look for ones that contain the searched string since
+                    // this View does not contain text itself.
+                    List<VirtualView> children = mChildren;
+                    final int childCount = children.size();
+                    for (int i = 0; i < childCount; i++) {
+                        VirtualView child = children.get(i);
+                        String textToLowerCase = child.mText.toLowerCase();
+                        if (textToLowerCase.contains(searchedLowerCase)) {
+                            if (result == null) {
+                                result = new ArrayList<AccessibilityNodeInfo>();
+                            }
+                            result.add(createAccessibilityNodeInfo(child.mId));
+                        }
+                    }
+                } else {
+                    // If the search is from a virtual view, find the view. Since the tree
+                    // is one level deep we add a node info for the child to the result if
+                    // the child contains the searched text.
+                    VirtualView virtualView = findVirtualViewById(virtualViewId);
+                    if (virtualView != null) {
+                        String textToLowerCase = virtualView.mText.toLowerCase();
+                        if (textToLowerCase.contains(searchedLowerCase)) {
+                            result = new ArrayList<AccessibilityNodeInfo>();
+                            result.add(createAccessibilityNodeInfo(virtualViewId));
+                        }
+                    }
+                }
+                if (result == null) {
+                    return Collections.emptyList();
+                }
+                return result;
+            }
+
+            /**
+             * {@inheritDoc}
+             */
+            @Override
+            public boolean performAccessibilityAction(int action, int virtualViewId) {
+                if (virtualViewId == View.NO_ID) {
+                    // Perform the action on the host View.
+                    switch (action) {
+                        case AccessibilityNodeInfo.ACTION_SELECT:
+                            if (!isSelected()) {
+                                setSelected(true);
+                                return isSelected();
+                            }
+                            break;
+                        case AccessibilityNodeInfo.ACTION_CLEAR_SELECTION:
+                            if (isSelected()) {
+                                setSelected(false);
+                                return !isSelected();
+                            }
+                            break;
+                    }
+                } else {
+                    // Find the view that corresponds to the given id.
+                    VirtualView child = findVirtualViewById(virtualViewId);
+                    if (child == null) {
+                        return false;
+                    }
+                    // Perform the action on a virtual view.
+                    switch (action) {
+                        case AccessibilityNodeInfo.ACTION_SELECT:
+                            setVirtualViewSelected(child, true);
+                            invalidate();
+                            return true;
+                        case AccessibilityNodeInfo.ACTION_CLEAR_SELECTION:
+                            setVirtualViewSelected(child, false);
+                            invalidate();
+                            return true;
+                    }
+                }
+                return false;
+            }
+        }
+    }
+}
diff --git a/samples/ApiDemos/src/com/example/android/apis/accessibility/_index.html b/samples/ApiDemos/src/com/example/android/apis/accessibility/_index.html
index 713d913..df54e96 100644
--- a/samples/ApiDemos/src/com/example/android/apis/accessibility/_index.html
+++ b/samples/ApiDemos/src/com/example/android/apis/accessibility/_index.html
@@ -1,3 +1,4 @@
+<h3 id="Accessibility">Accessibility</h3>
 <dl>
   <dt><a href="ClockBackService.html">Accessibility Service</a></dt>
   <dd>
@@ -20,6 +21,12 @@
     xml files, and adding additional information to AccessibilityEvents using
     AccessibilityRecords.
   </dd>
+  <dt><a href="AccessibilityNodeProviderActivity.html">Accessibility Node Provider</a></dt>
+  <dd>Demonstrates how to develop an accessibility node provider which manages a virtual
+    View tree reported to accessibility services. The virtual subtree is rooted at a View
+    that draws complex content and reports itself as a tree of virtual views, thus conveying
+    its logical structure.
+  </dd>
 </dl>
 
 <dl>
diff --git a/sdk/doc_source.prop_template b/sdk/doc_source.prop_template
new file mode 100644
index 0000000..d3cdfd5
--- /dev/null
+++ b/sdk/doc_source.prop_template
@@ -0,0 +1,4 @@
+Pkg.UserSrc=false
+Pkg.Revision=1
+AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
+AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
diff --git a/sdk/doc_source.properties b/sdk/doc_source.properties
deleted file mode 100644
index 5b3dce6..0000000
--- a/sdk/doc_source.properties
+++ /dev/null
@@ -1,5 +0,0 @@
-Pkg.UserSrc=false
-Pkg.Revision=1
-AndroidVersion.ApiLevel=15
-#AndroidVersion.CodeName=
-
diff --git a/sdk/images_armeabi-v7a_source.prop_template b/sdk/images_armeabi-v7a_source.prop_template
new file mode 100644
index 0000000..ae67647
--- /dev/null
+++ b/sdk/images_armeabi-v7a_source.prop_template
@@ -0,0 +1,6 @@
+Pkg.Desc=Android SDK Platform ${PLATFORM_VERSION}
+Pkg.UserSrc=false
+Pkg.Revision=1
+AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
+AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
+SystemImage.Abi=armeabi-v7a
diff --git a/sdk/images_armeabi-v7a_source.properties b/sdk/images_armeabi-v7a_source.properties
deleted file mode 100644
index 9280fdd..0000000
--- a/sdk/images_armeabi-v7a_source.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-Pkg.Desc=Android SDK Platform 4.1
-Pkg.UserSrc=false
-Pkg.Revision=1
-AndroidVersion.ApiLevel=15
-#AndroidVersion.CodeName=
-SystemImage.Abi=armeabi-v7a
diff --git a/sdk/images_armeabi_source.prop_template b/sdk/images_armeabi_source.prop_template
new file mode 100644
index 0000000..b100e53
--- /dev/null
+++ b/sdk/images_armeabi_source.prop_template
@@ -0,0 +1,6 @@
+Pkg.Desc=Android SDK Platform ${PLATFORM_VERSION}
+Pkg.UserSrc=false
+Pkg.Revision=1
+AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
+AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
+SystemImage.Abi=armeabi
diff --git a/sdk/images_armeabi_source.properties b/sdk/images_armeabi_source.properties
deleted file mode 100644
index 63fc677..0000000
--- a/sdk/images_armeabi_source.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-Pkg.Desc=Android SDK Platform 4.1
-Pkg.UserSrc=false
-Pkg.Revision=1
-AndroidVersion.ApiLevel=15
-#AndroidVersion.CodeName=
-SystemImage.Abi=armeabi
diff --git a/sdk/images_x86_source.prop_template b/sdk/images_x86_source.prop_template
new file mode 100644
index 0000000..62e2e0d
--- /dev/null
+++ b/sdk/images_x86_source.prop_template
@@ -0,0 +1,6 @@
+Pkg.Desc=Android SDK Platform ${PLATFORM_VERSION}
+Pkg.UserSrc=false
+Pkg.Revision=1
+AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
+AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
+SystemImage.Abi=x86
diff --git a/sdk/images_x86_source.properties b/sdk/images_x86_source.properties
deleted file mode 100644
index bb82577..0000000
--- a/sdk/images_x86_source.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-Pkg.Desc=Android SDK Platform 4.1
-Pkg.UserSrc=false
-Pkg.Revision=1
-AndroidVersion.ApiLevel=15
-#AndroidVersion.CodeName=
-SystemImage.Abi=x86
diff --git a/sdk/platform_source.prop_template b/sdk/platform_source.prop_template
new file mode 100644
index 0000000..17a5e4e
--- /dev/null
+++ b/sdk/platform_source.prop_template
@@ -0,0 +1,9 @@
+Pkg.Desc=Android SDK Platform ${PLATFORM_VERSION}
+Pkg.UserSrc=false
+Platform.Version=${PLATFORM_VERSION}
+Pkg.Revision=1
+AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
+AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
+Layoutlib.Api=7
+Layoutlib.Revision=1
+Platform.MinToolsRev=15
diff --git a/sdk/platform_source.properties b/sdk/platform_source.properties
deleted file mode 100644
index 0e70356..0000000
--- a/sdk/platform_source.properties
+++ /dev/null
@@ -1,9 +0,0 @@
-Pkg.Desc=Android SDK Platform 4.1
-Pkg.UserSrc=false
-Platform.Version=4.1
-Pkg.Revision=1
-AndroidVersion.ApiLevel=15
-#AndroidVersion.CodeName=
-Layoutlib.Api=7
-Layoutlib.Revision=1
-Platform.MinToolsRev=15
diff --git a/sdk/source_source.prop_template b/sdk/source_source.prop_template
new file mode 100644
index 0000000..d3cdfd5
--- /dev/null
+++ b/sdk/source_source.prop_template
@@ -0,0 +1,4 @@
+Pkg.UserSrc=false
+Pkg.Revision=1
+AndroidVersion.ApiLevel=${PLATFORM_SDK_VERSION}
+AndroidVersion.CodeName=${PLATFORM_VERSION_CODENAME}
diff --git a/sdk/source_source.properties b/sdk/source_source.properties
deleted file mode 100644
index d0b390b..0000000
--- a/sdk/source_source.properties
+++ /dev/null
@@ -1,4 +0,0 @@
-Pkg.UserSrc=false
-Pkg.Revision=1
-AndroidVersion.ApiLevel=15
-#AndroidVersion.CodeName=
diff --git a/sdk/support_source.properties b/sdk/support_source.properties
index 1aeb2c1..efe507a 100644
--- a/sdk/support_source.properties
+++ b/sdk/support_source.properties
@@ -3,4 +3,3 @@
 Extra.Vendor=android
 Extra.Path=support
 Extra.OldPaths=compatibility
-
diff --git a/tools/a3dconvert/SimpleMesh.h b/tools/a3dconvert/SimpleMesh.h
index c87bb7d..91b4823 100644
--- a/tools/a3dconvert/SimpleMesh.h
+++ b/tools/a3dconvert/SimpleMesh.h
@@ -106,13 +106,18 @@
             uint32_t vertexPos = i*vertexSize;
             float *vertexPtr = dataPtr + vertexPos;
 
+            uint32_t elemIndex = 0;
             for (uint32_t c = 0; c < mChannels.size(); c ++) {
                 // Skip empty channels
                 if (mChannels[c].mData.size() == 0) {
                     continue;
                 }
+                // This will address vector element alignment issues
+                uint32_t elemlOffset = vertexDataElem->getFieldOffsetBytes(elemIndex)/sizeof(float);
+                elemIndex ++;
+                float *channelPtr = vertexPtr + elemlOffset;
                 for (uint32_t cStride = 0; cStride < mChannels[c].mStride; cStride ++) {
-                    *(vertexPtr++) = mChannels[c].mData[i * mChannels[c].mStride + cStride];
+                    *(channelPtr++) = mChannels[c].mData[i * mChannels[c].mStride + cStride];
                 }
             }
         }
diff --git a/tools/a3dconvert/a3dconvert.cpp b/tools/a3dconvert/a3dconvert.cpp
index 3535b17..4e48642 100644
--- a/tools/a3dconvert/a3dconvert.cpp
+++ b/tools/a3dconvert/a3dconvert.cpp
@@ -44,21 +44,11 @@
 
 // We only care to implement allocation memory initialization and destruction
 // because we need no other renderscript hal features for serialization
-static RsdHalFunctions FunctionTable = {
-    NULL, NULL, NULL, NULL, NULL, NULL, NULL,
-    { NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,NULL },
-    {
-        rsdAllocationInit,
-        rsdAllocationDestroy,
-        NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
-    },
-    { NULL, NULL, NULL }, { NULL, NULL, NULL }, { NULL, NULL, NULL },
-    { NULL, NULL, NULL }, { NULL, NULL, NULL }, { NULL, NULL },
-    { NULL, NULL, NULL},
-};
-
-// No-op initizlizer for rs context hal since we only
+static RsdHalFunctions FunctionTable;
 bool rsdHalInit(Context *rsc, uint32_t version_major, uint32_t version_minor) {
+    memset(&FunctionTable, 0, sizeof(FunctionTable));
+    FunctionTable.allocation.init = rsdAllocationInit;
+    FunctionTable.allocation.destroy = rsdAllocationDestroy;
     rsc->mHal.funcs = FunctionTable;
     return true;
 }
diff --git a/tools/emulator/system/camera/CallbackNotifier.cpp b/tools/emulator/system/camera/CallbackNotifier.cpp
index f974b86..f908e15 100755
--- a/tools/emulator/system/camera/CallbackNotifier.cpp
+++ b/tools/emulator/system/camera/CallbackNotifier.cpp
@@ -80,7 +80,7 @@
     const char* strs[lCameraMessagesNum];
     const int translated = GetMessageStrings(msg, strs, lCameraMessagesNum);
     for (int n = 0; n < translated; n++) {
-        LOGV("    %s", strs[n]);
+        ALOGV("    %s", strs[n]);
     }
 }
 
@@ -113,7 +113,7 @@
                                     camera_request_memory get_memory,
                                     void* user)
 {
-    LOGV("%s: %p, %p, %p, %p (%p)",
+    ALOGV("%s: %p, %p, %p, %p (%p)",
          __FUNCTION__, notify_cb, data_cb, data_cb_timestamp, get_memory, user);
 
     Mutex::Autolock locker(&mObjectLock);
@@ -126,29 +126,29 @@
 
 void CallbackNotifier::enableMessage(uint msg_type)
 {
-    LOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
+    ALOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
     PrintMessages(msg_type);
 
     Mutex::Autolock locker(&mObjectLock);
     mMessageEnabler |= msg_type;
-    LOGV("**** Currently enabled messages:");
+    ALOGV("**** Currently enabled messages:");
     PrintMessages(mMessageEnabler);
 }
 
 void CallbackNotifier::disableMessage(uint msg_type)
 {
-    LOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
+    ALOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
     PrintMessages(msg_type);
 
     Mutex::Autolock locker(&mObjectLock);
     mMessageEnabler &= ~msg_type;
-    LOGV("**** Currently enabled messages:");
+    ALOGV("**** Currently enabled messages:");
     PrintMessages(mMessageEnabler);
 }
 
 status_t CallbackNotifier::enableVideoRecording(int fps)
 {
-    LOGV("%s: FPS = %d", __FUNCTION__, fps);
+    ALOGV("%s: FPS = %d", __FUNCTION__, fps);
 
     Mutex::Autolock locker(&mObjectLock);
     mVideoRecEnabled = true;
@@ -160,7 +160,7 @@
 
 void CallbackNotifier::disableVideoRecording()
 {
-    LOGV("%s:", __FUNCTION__);
+    ALOGV("%s:", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     mVideoRecEnabled = false;
diff --git a/tools/emulator/system/camera/EmulatedCamera.cpp b/tools/emulator/system/camera/EmulatedCamera.cpp
index 02b4683..25847a4 100755
--- a/tools/emulator/system/camera/EmulatedCamera.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera.cpp
@@ -28,7 +28,7 @@
 #include <cutils/log.h>
 #include <ui/Rect.h>
 #include "EmulatedCamera.h"
-#include "EmulatedFakeCameraDevice.h"
+//#include "EmulatedFakeCameraDevice.h"
 #include "Converters.h"
 
 /* Defines whether we should trace parameter changes. */
@@ -99,12 +99,8 @@
      * Fake required parameters.
      */
 
-    mParameters.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
-                    "320x240,0x0");
-    mParameters.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, "6");
-    mParameters.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, "-6");
-    mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, "0.5");
-    mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, "0");
+    mParameters.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, "320x240,0x0");
+
     mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, "512");
     mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, "384");
     mParameters.set(CameraParameters::KEY_JPEG_QUALITY, "90");
@@ -137,10 +133,22 @@
                     CameraParameters::PIXEL_FORMAT_JPEG);
     mParameters.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
 
-    /*
-     * Not supported features
-     */
+    /* Sets the default exposure compensation support to be disabled. */
+    mParameters.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, "0");
+    mParameters.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, "0");
 
+    /* Sets auto white balance as default. */
+    getCameraDevice()->initializeWhiteBalanceModes(
+            CameraParameters::WHITE_BALANCE_AUTO, 1.0f, 1.0f);
+    mParameters.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE,
+                    CameraParameters::WHITE_BALANCE_AUTO);
+    mParameters.set(CameraParameters::KEY_WHITE_BALANCE,
+                    CameraParameters::WHITE_BALANCE_AUTO);
+    getCameraDevice()->setWhiteBalanceMode(
+            mParameters.get(CameraParameters::KEY_WHITE_BALANCE));
+
+    /* Not supported features
+     */
     mParameters.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES,
                     CameraParameters::FOCUS_MODE_FIXED);
     mParameters.set(CameraParameters::KEY_FOCUS_MODE,
@@ -172,7 +180,7 @@
 
 status_t EmulatedCamera::connectCamera(hw_device_t** device)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     status_t res = EINVAL;
     EmulatedCameraDevice* const camera_dev = getCameraDevice();
@@ -191,14 +199,14 @@
 
 status_t EmulatedCamera::closeCamera()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     return cleanupCamera();
 }
 
 status_t EmulatedCamera::getCameraInfo(struct camera_info* info)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     const char* valstr = NULL;
 
@@ -301,7 +309,7 @@
 
 status_t EmulatedCamera::setAutoFocus()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     /* TODO: Future enhancements. */
     return NO_ERROR;
@@ -309,7 +317,7 @@
 
 status_t EmulatedCamera::cancelAutoFocus()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     /* TODO: Future enhancements. */
     return NO_ERROR;
@@ -317,7 +325,7 @@
 
 status_t EmulatedCamera::takePicture()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     status_t res;
     int width, height;
@@ -394,20 +402,23 @@
 
 status_t EmulatedCamera::cancelPicture()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     return NO_ERROR;
 }
 
 status_t EmulatedCamera::setParameters(const char* parms)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
     PrintParamDiff(mParameters, parms);
 
     CameraParameters new_param;
     String8 str8_param(parms);
     new_param.unflatten(str8_param);
 
+    /*
+     * Check for new exposure compensation parameter.
+     */
     int new_exposure_compensation = new_param.getInt(
             CameraParameters::KEY_EXPOSURE_COMPENSATION);
     const int min_exposure_compensation = new_param.getInt(
@@ -435,6 +446,24 @@
                     exposure_value);
         }
     }
+
+    const char* new_white_balance = new_param.get(
+            CameraParameters::KEY_WHITE_BALANCE);
+    const char* supported_white_balance = new_param.get(
+            CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+
+    if ((supported_white_balance != NULL) && (new_white_balance != NULL) &&
+        (strstr(supported_white_balance, new_white_balance) != NULL)) {
+
+        const char* current_white_balance = mParameters.get(
+                CameraParameters::KEY_WHITE_BALANCE);
+        if ((current_white_balance == NULL) ||
+            (strcmp(current_white_balance, new_white_balance) != 0)) {
+            ALOGV("Setting white balance to %s", new_white_balance);
+            getCameraDevice()->setWhiteBalanceMode(new_white_balance);
+        }
+    }
+
     mParameters = new_param;
 
     return NO_ERROR;
@@ -469,7 +498,7 @@
 
 status_t EmulatedCamera::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
 {
-    LOGV("%s: cmd = %d, arg1 = %d, arg2 = %d", __FUNCTION__, cmd, arg1, arg2);
+    ALOGV("%s: cmd = %d, arg1 = %d, arg2 = %d", __FUNCTION__, cmd, arg1, arg2);
 
     /* TODO: Future enhancements. */
     return 0;
@@ -477,14 +506,14 @@
 
 void EmulatedCamera::releaseCamera()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     cleanupCamera();
 }
 
 status_t EmulatedCamera::dumpCamera(int fd)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     /* TODO: Future enhancements. */
     return -EINVAL;
@@ -496,7 +525,7 @@
 
 status_t EmulatedCamera::doStartPreview()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     EmulatedCameraDevice* camera_dev = getCameraDevice();
     if (camera_dev->isStarted()) {
@@ -584,7 +613,7 @@
 
 status_t EmulatedCamera::doStopPreview()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     status_t res = NO_ERROR;
     if (mPreviewWindow.isPreviewEnabled()) {
diff --git a/tools/emulator/system/camera/EmulatedCameraCommon.h b/tools/emulator/system/camera/EmulatedCameraCommon.h
index 907985a..c1d575c 100755
--- a/tools/emulator/system/camera/EmulatedCameraCommon.h
+++ b/tools/emulator/system/camera/EmulatedCameraCommon.h
@@ -34,12 +34,12 @@
     /* Constructor that prints an "entry" trace message. */
     explicit HWERoutineTracker(const char* name)
             : mName(name) {
-        LOGV("Entering %s", mName);
+        ALOGV("Entering %s", mName);
     }
 
     /* Destructor that prints a "leave" trace message. */
     ~HWERoutineTracker() {
-        LOGV("Leaving %s", mName);
+        ALOGV("Leaving %s", mName);
     }
 
 private:
diff --git a/tools/emulator/system/camera/EmulatedCameraDevice.cpp b/tools/emulator/system/camera/EmulatedCameraDevice.cpp
index 71464d2..c633892 100755
--- a/tools/emulator/system/camera/EmulatedCameraDevice.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraDevice.cpp
@@ -29,25 +29,33 @@
 #include <sys/select.h>
 #include <cmath>
 #include "EmulatedCameraDevice.h"
-#include "Converters.h"
 
 namespace android {
 
+const float GAMMA_CORRECTION = 2.2f;
 EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
     : mObjectLock(),
       mCurFrameTimestamp(0),
       mCameraHAL(camera_hal),
       mCurrentFrame(NULL),
       mExposureCompensation(1.0f),
+      mWhiteBalanceScale(NULL),
+      mSupportedWhiteBalanceScale(),
       mState(ECDS_CONSTRUCTED)
 {
 }
 
 EmulatedCameraDevice::~EmulatedCameraDevice()
 {
+    ALOGV("EmulatedCameraDevice destructor");
     if (mCurrentFrame != NULL) {
         delete[] mCurrentFrame;
     }
+    for (int i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
+        if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
+            delete[] mSupportedWhiteBalanceScale.valueAt(i);
+        }
+    }
 }
 
 /****************************************************************************
@@ -76,7 +84,7 @@
 
 status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     if (!isStarted()) {
         LOGE("%s: Device is not started", __FUNCTION__);
@@ -91,7 +99,7 @@
 
 status_t EmulatedCameraDevice::stopDeliveringFrames()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     if (!isStarted()) {
         LOGW("%s: Device is not started", __FUNCTION__);
@@ -104,14 +112,47 @@
 }
 
 void EmulatedCameraDevice::setExposureCompensation(const float ev) {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     if (!isStarted()) {
         LOGW("%s: Fake camera device is not started.", __FUNCTION__);
     }
 
-    mExposureCompensation = std::pow(2.0f, ev);
-    LOGV("New exposure compensation is %f", mExposureCompensation);
+    mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
+    ALOGV("New exposure compensation is %f", mExposureCompensation);
+}
+
+void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
+                                                       const float r_scale,
+                                                       const float b_scale) {
+    ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
+    float* value = new float[3];
+    value[0] = r_scale; value[1] = 1.0f; value[2] = b_scale;
+    mSupportedWhiteBalanceScale.add(String8(mode), value);
+}
+
+void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
+    ALOGV("%s with white balance %s", __FUNCTION__, mode);
+    mWhiteBalanceScale =
+            mSupportedWhiteBalanceScale.valueFor(String8(mode));
+}
+
+/* Computes the pixel value after adjusting the white balance to the current
+ * one. The input the y, u, v channel of the pixel and the adjusted value will
+ * be stored in place. The adjustment is done in RGB space.
+ */
+void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y,
+                                              uint8_t& u,
+                                              uint8_t& v) const {
+    float r_scale = mWhiteBalanceScale[0];
+    float b_scale = mWhiteBalanceScale[2];
+    int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
+    int g = YUV2G(y, u, v);
+    int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
+
+    y = RGB2Y(r, g, b);
+    u = RGB2U(r, g, b);
+    v = RGB2V(r, g, b);
 }
 
 status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
@@ -182,7 +223,7 @@
         LOGE("%s: Unable to allocate framebuffer", __FUNCTION__);
         return ENOMEM;
     }
-    LOGV("%s: Allocated %p %d bytes for %d pixels in %.4s[%dx%d] frame",
+    ALOGV("%s: Allocated %p %d bytes for %d pixels in %.4s[%dx%d] frame",
          __FUNCTION__, mCurrentFrame, mFrameBufferSize, mTotalPixels,
          reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
     return NO_ERROR;
@@ -205,7 +246,7 @@
 
 status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     if (!isInitialized()) {
         LOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
@@ -219,7 +260,7 @@
 
 status_t EmulatedCameraDevice::stopWorkerThread()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     if (!isInitialized()) {
         LOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
@@ -244,7 +285,7 @@
 
 status_t EmulatedCameraDevice::WorkerThread::readyToRun()
 {
-    LOGV("Starting emulated camera device worker thread...");
+    ALOGV("Starting emulated camera device worker thread...");
 
     LOGW_IF(mThreadControl >= 0 || mControlFD >= 0,
             "%s: Thread control FDs are opened", __FUNCTION__);
@@ -253,7 +294,7 @@
     if (pipe(thread_fds) == 0) {
         mThreadControl = thread_fds[1];
         mControlFD = thread_fds[0];
-        LOGV("Emulated device's worker thread has been started.");
+        ALOGV("Emulated device's worker thread has been started.");
         return NO_ERROR;
     } else {
         LOGE("%s: Unable to create thread control FDs: %d -> %s",
@@ -264,7 +305,7 @@
 
 status_t EmulatedCameraDevice::WorkerThread::stopThread()
 {
-    LOGV("Stopping emulated camera device's worker thread...");
+    ALOGV("Stopping emulated camera device's worker thread...");
 
     status_t res = EINVAL;
     if (mThreadControl >= 0) {
@@ -285,7 +326,7 @@
                     close(mControlFD);
                     mControlFD = -1;
                 }
-                LOGV("Emulated camera device's worker thread has been stopped.");
+                ALOGV("Emulated camera device's worker thread has been stopped.");
             } else {
                 LOGE("%s: requestExitAndWait failed: %d -> %s",
                      __FUNCTION__, res, strerror(-res));
@@ -339,7 +380,7 @@
         }
         /* THREAD_STOP is the only message expected here. */
         if (msg == THREAD_STOP) {
-            LOGV("%s: THREAD_STOP message is received", __FUNCTION__);
+            ALOGV("%s: THREAD_STOP message is received", __FUNCTION__);
             return EXIT_THREAD;
         } else {
             LOGE("Unknown worker thread message %d", msg);
diff --git a/tools/emulator/system/camera/EmulatedCameraDevice.h b/tools/emulator/system/camera/EmulatedCameraDevice.h
index 357c9e6..b099906 100755
--- a/tools/emulator/system/camera/EmulatedCameraDevice.h
+++ b/tools/emulator/system/camera/EmulatedCameraDevice.h
@@ -27,7 +27,10 @@
  */
 
 #include <utils/threads.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
 #include "EmulatedCameraCommon.h"
+#include "Converters.h"
 
 namespace android {
 
@@ -116,6 +119,21 @@
      */
     virtual status_t Initialize();
 
+    /* Initializes the white balance modes parameters.
+     * The parameters are passed by each individual derived camera API to
+     * represent that different camera manufacturers may have different
+     * preferences on the white balance parameters. Green channel in the RGB
+     * color space is fixed to keep the luminance to be reasonably constant.
+     *
+     * Param:
+     * mode the text describing the current white balance mode
+     * r_scale the scale factor for the R channel in RGB space
+     * b_scale the scale factor for the B channel in RGB space.
+     */
+    void initializeWhiteBalanceModes(const char* mode,
+                                     const float r_scale,
+                                     const float b_scale);
+
     /* Starts delivering frames captured from the camera device.
      * This method will start the worker thread that would be pulling frames from
      * the camera device, and will deliver the pulled frames back to the emulated
@@ -145,7 +163,11 @@
 
     /* Sets the exposure compensation for the camera device.
      */
-    virtual void setExposureCompensation(const float ev);
+    void setExposureCompensation(const float ev);
+
+    /* Sets the white balance mode for the device.
+     */
+    void setWhiteBalanceMode(const char* mode);
 
     /* Gets current framebuffer, converted into preview frame format.
      * This method must be called on a connected instance of this class with a
@@ -269,6 +291,24 @@
      */
     virtual void commonStopDevice();
 
+    /** Computes a luminance value after taking the exposure compensation.
+     * value into account.
+     *
+     * Param:
+     * inputY - The input luminance value.
+     * Return:
+     * The luminance value after adjusting the exposure compensation.
+     */
+    inline uint8_t changeExposure(const uint8_t& inputY) const {
+        return static_cast<uint8_t>(clamp(static_cast<float>(inputY) *
+                                    mExposureCompensation));
+    }
+
+    /** Computes the pixel value in YUV space after adjusting to the current
+     * white balance mode.
+     */
+    void changeWhiteBalance(uint8_t& y, uint8_t& u, uint8_t& v) const;
+
     /****************************************************************************
      * Worker thread management.
      * Typicaly when emulated camera device starts capturing frames from the
@@ -478,6 +518,10 @@
     /* Exposure compensation value */
     float                       mExposureCompensation;
 
+    float*                      mWhiteBalanceScale;
+
+    DefaultKeyedVector<String8, float*>      mSupportedWhiteBalanceScale;
+
     /* Defines possible states of the emulated camera device object.
      */
     enum EmulatedCameraDeviceState {
diff --git a/tools/emulator/system/camera/EmulatedCameraFactory.cpp b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
index 5c5c5de..dc84357 100755
--- a/tools/emulator/system/camera/EmulatedCameraFactory.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
@@ -86,7 +86,7 @@
         LOGD("Fake camera emulation is disabled.");
     }
 
-    LOGV("%d cameras are being emulated. Fake camera ID is %d",
+    ALOGV("%d cameras are being emulated. Fake camera ID is %d",
          mEmulatedCameraNum, mFakeCameraID);
 
     mConstructedOK = true;
@@ -114,7 +114,7 @@
 
 int EmulatedCameraFactory::cameraDeviceOpen(int camera_id, hw_device_t** device)
 {
-    LOGV("%s: id = %d", __FUNCTION__, camera_id);
+    ALOGV("%s: id = %d", __FUNCTION__, camera_id);
 
     *device = NULL;
 
@@ -134,7 +134,7 @@
 
 int EmulatedCameraFactory::getCameraInfo(int camera_id, struct camera_info* info)
 {
-    LOGV("%s: id = %d", __FUNCTION__, camera_id);
+    ALOGV("%s: id = %d", __FUNCTION__, camera_id);
 
     if (!isConstructedOK()) {
         LOGE("%s: EmulatedCameraFactory has failed to initialize", __FUNCTION__);
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera.cpp b/tools/emulator/system/camera/EmulatedFakeCamera.cpp
index 86b9d08..beb6329 100755
--- a/tools/emulator/system/camera/EmulatedFakeCamera.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera.cpp
@@ -61,6 +61,33 @@
                     gEmulatedCameraFactory.getFakeCameraOrientation());
 
     res = EmulatedCamera::Initialize();
+
+    mParameters.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, "6");
+    mParameters.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, "-6");
+    mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, "0.5");
+    mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, "0");
+    ALOGV("Set camera supported exposure values");
+
+    // Sets the white balance modes and the device-dependent scale factors.
+    mFakeCameraDevice.initializeWhiteBalanceModes(
+            CameraParameters::WHITE_BALANCE_INCANDESCENT, 1.38f, 0.60f);
+    mFakeCameraDevice.initializeWhiteBalanceModes(
+            CameraParameters::WHITE_BALANCE_DAYLIGHT, 1.09f, 0.92f);
+    mFakeCameraDevice.initializeWhiteBalanceModes(
+            CameraParameters::WHITE_BALANCE_TWILIGHT, 0.92f, 1.22f);
+
+    char supported_white_balance[1024];
+    snprintf(supported_white_balance, sizeof(supported_white_balance),
+             "%s,%s,%s,%s",
+             mParameters.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE),
+             CameraParameters::WHITE_BALANCE_INCANDESCENT,
+             CameraParameters::WHITE_BALANCE_DAYLIGHT,
+             CameraParameters::WHITE_BALANCE_TWILIGHT);
+    mParameters.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE,
+                    supported_white_balance);
+
+    ALOGV("Set camera supported white balance modes");
+
     if (res != NO_ERROR) {
         return res;
     }
diff --git a/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp b/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp
index 3666827..051c28a 100755
--- a/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp
@@ -46,11 +46,11 @@
 {
     // Makes the image with the original exposure compensation darker.
     // So the effects of changing the exposure compensation can be seen.
-    mBlackYUV.Y = mBlackYUV.Y / 4;
-    mWhiteYUV.Y = mWhiteYUV.Y / 4;
-    mRedYUV.Y = mRedYUV.Y / 4;
-    mGreenYUV.Y = mGreenYUV.Y / 4;
-    mBlueYUV.Y = mBlueYUV.Y / 4;
+    mBlackYUV.Y = mBlackYUV.Y / 2;
+    mWhiteYUV.Y = mWhiteYUV.Y / 2;
+    mRedYUV.Y = mRedYUV.Y / 2;
+    mGreenYUV.Y = mGreenYUV.Y / 2;
+    mBlueYUV.Y = mBlueYUV.Y / 2;
 }
 
 EmulatedFakeCameraDevice::~EmulatedFakeCameraDevice()
@@ -63,7 +63,7 @@
 
 status_t EmulatedFakeCameraDevice::connectDevice()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isInitialized()) {
@@ -83,7 +83,7 @@
 
 status_t EmulatedFakeCameraDevice::disconnectDevice()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isConnected()) {
@@ -105,7 +105,7 @@
                                                int height,
                                                uint32_t pix_fmt)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isConnected()) {
@@ -170,7 +170,7 @@
 
 status_t EmulatedFakeCameraDevice::stopDevice()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isStarted()) {
@@ -195,7 +195,7 @@
     WorkerThread::SelectRes res =
         getWorkerThread()->Select(-1, 1000000 / mEmulatedFPS);
     if (res == WorkerThread::EXIT_THREAD) {
-        LOGV("%s: Worker thread has been terminated.", __FUNCTION__);
+        ALOGV("%s: Worker thread has been terminated.", __FUNCTION__);
         return false;
     }
 
@@ -256,6 +256,9 @@
     uint8_t* U = U_pos;
     uint8_t* V = V_pos;
 
+    YUVPixel adjustedWhite = YUVPixel(mWhiteYUV);
+    changeWhiteBalance(adjustedWhite.Y, adjustedWhite.U, adjustedWhite.V);
+
     for(int y = 0; y < mFrameHeight; y++) {
         int countx = checkxremainder;
         bool current = black;
@@ -263,7 +266,7 @@
             if (current) {
                 mBlackYUV.get(Y, U, V);
             } else {
-                mWhiteYUV.get(Y, U, V);
+                adjustedWhite.get(Y, U, V);
             }
             *Y = changeExposure(*Y);
             Y[1] = *Y;
@@ -407,7 +410,7 @@
             }
         } else if (mCurrentFrameType == 0) {
             LOGD("********** Rotated to the CHECKERBOARD frame **********");
-        } else {
+        } else if (mCurrentFrameType == 1) {
             LOGD("********** Rotated to the STRIPED frame **********");
         }
     }
diff --git a/tools/emulator/system/camera/EmulatedFakeCameraDevice.h b/tools/emulator/system/camera/EmulatedFakeCameraDevice.h
index 383118b..f66f076 100755
--- a/tools/emulator/system/camera/EmulatedFakeCameraDevice.h
+++ b/tools/emulator/system/camera/EmulatedFakeCameraDevice.h
@@ -99,6 +99,7 @@
      ***************************************************************************/
 
 private:
+
     /* Draws a black and white checker board in the current frame buffer. */
     void drawCheckerboard();
 
@@ -110,10 +111,6 @@
      */
     void drawSquare(int x, int y, int size, const YUVPixel* color);
 
-    inline uint8_t changeExposure(uint8_t inputY) {
-        return static_cast<uint8_t>(static_cast<float>(inputY) *
-                                    mExposureCompensation);
-    }
 #if EFCD_ROTATE_FRAME
     void drawSolid(YUVPixel* color);
     void drawStripes();
diff --git a/tools/emulator/system/camera/EmulatedQemuCamera.cpp b/tools/emulator/system/camera/EmulatedQemuCamera.cpp
index 611b6b5..d1b2b78 100755
--- a/tools/emulator/system/camera/EmulatedQemuCamera.cpp
+++ b/tools/emulator/system/camera/EmulatedQemuCamera.cpp
@@ -45,7 +45,7 @@
                                         const char* frame_dims,
                                         const char* facing_dir)
 {
-    LOGV("%s:\n   Name=%s\n   Facing '%s'\n   Dimensions=%s",
+    ALOGV("%s:\n   Name=%s\n   Facing '%s'\n   Dimensions=%s",
          __FUNCTION__, device_name, facing_dir, frame_dims);
     /* Save dimensions. */
     mFrameDims = frame_dims;
@@ -105,7 +105,7 @@
     mParameters.setPreviewSize(x, y);
     mParameters.setPictureSize(x, y);
 
-    LOGV("%s: Qemu camera %s is initialized. Current frame is %dx%d",
+    ALOGV("%s: Qemu camera %s is initialized. Current frame is %dx%d",
          __FUNCTION__, device_name, x, y);
 
     return NO_ERROR;
diff --git a/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp b/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp
index 57dbc98..d95307d 100755
--- a/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp
+++ b/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp
@@ -58,7 +58,7 @@
     /* Initialize base class. */
     res = EmulatedCameraDevice::Initialize();
     if (res == NO_ERROR) {
-        LOGV("%s: Connected to the emulated camera service '%s'",
+        ALOGV("%s: Connected to the emulated camera service '%s'",
              __FUNCTION__, device_name);
         mDeviceName = device_name;
     } else {
@@ -74,7 +74,7 @@
 
 status_t EmulatedQemuCameraDevice::connectDevice()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isInitialized()) {
@@ -90,7 +90,7 @@
     /* Connect to the camera device via emulator. */
     const status_t res = mQemuClient.queryConnect();
     if (res == NO_ERROR) {
-        LOGV("%s: Connected to device '%s'",
+        ALOGV("%s: Connected to device '%s'",
              __FUNCTION__, (const char*)mDeviceName);
         mState = ECDS_CONNECTED;
     } else {
@@ -103,7 +103,7 @@
 
 status_t EmulatedQemuCameraDevice::disconnectDevice()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isConnected()) {
@@ -120,7 +120,7 @@
     /* Disconnect from the camera device via emulator. */
     const status_t res = mQemuClient.queryDisconnect();
     if (res == NO_ERROR) {
-        LOGV("%s: Disonnected from device '%s'",
+        ALOGV("%s: Disonnected from device '%s'",
              __FUNCTION__, (const char*)mDeviceName);
         mState = ECDS_INITIALIZED;
     } else {
@@ -135,7 +135,7 @@
                                                int height,
                                                uint32_t pix_fmt)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isConnected()) {
@@ -168,7 +168,7 @@
     /* Start the actual camera device. */
     res = mQemuClient.queryStart(mPixelFormat, mFrameWidth, mFrameHeight);
     if (res == NO_ERROR) {
-        LOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+        ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
              __FUNCTION__, (const char*)mDeviceName,
              reinterpret_cast<const char*>(&mPixelFormat),
              mFrameWidth, mFrameHeight);
@@ -184,7 +184,7 @@
 
 status_t EmulatedQemuCameraDevice::stopDevice()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     if (!isStarted()) {
@@ -202,7 +202,7 @@
         }
         EmulatedCameraDevice::commonStopDevice();
         mState = ECDS_CONNECTED;
-        LOGV("%s: Qemu camera device '%s' is stopped",
+        ALOGV("%s: Qemu camera device '%s' is stopped",
              __FUNCTION__, (const char*)mDeviceName);
     } else {
         LOGE("%s: Unable to stop device '%s'",
@@ -237,7 +237,7 @@
     WorkerThread::SelectRes res =
         getWorkerThread()->Select(-1, 1000000 / mEmulatedFPS);
     if (res == WorkerThread::EXIT_THREAD) {
-        LOGV("%s: Worker thread has been terminated.", __FUNCTION__);
+        ALOGV("%s: Worker thread has been terminated.", __FUNCTION__);
         return false;
     }
 
diff --git a/tools/emulator/system/camera/JpegCompressor.cpp b/tools/emulator/system/camera/JpegCompressor.cpp
index 0e538a1..ebcd915 100644
--- a/tools/emulator/system/camera/JpegCompressor.cpp
+++ b/tools/emulator/system/camera/JpegCompressor.cpp
@@ -44,7 +44,7 @@
                                               int height,
                                               int quality)
 {
-    LOGV("%s: %p[%dx%d]", __FUNCTION__, image, width, height);
+    ALOGV("%s: %p[%dx%d]", __FUNCTION__, image, width, height);
     void* pY = const_cast<void*>(image);
     int offsets[2];
     offsets[0] = 0;
@@ -52,7 +52,7 @@
     mStrides[0] = width;
     mStrides[1] = width;
     if (encode(&mStream, pY, width, height, offsets, quality)) {
-        LOGV("%s: Compressed JPEG: %d[%dx%d] -> %d bytes",
+        ALOGV("%s: Compressed JPEG: %d[%dx%d] -> %d bytes",
              __FUNCTION__, (width * height * 12) / 8, width, height, mStream.getOffset());
         return NO_ERROR;
     } else {
diff --git a/tools/emulator/system/camera/PreviewWindow.cpp b/tools/emulator/system/camera/PreviewWindow.cpp
index fb708d5..db48b10 100755
--- a/tools/emulator/system/camera/PreviewWindow.cpp
+++ b/tools/emulator/system/camera/PreviewWindow.cpp
@@ -49,7 +49,7 @@
 status_t PreviewWindow::setPreviewWindow(struct preview_stream_ops* window,
                                          int preview_fps)
 {
-    LOGV("%s: current: %p -> new: %p", __FUNCTION__, mPreviewWindow, window);
+    ALOGV("%s: current: %p -> new: %p", __FUNCTION__, mPreviewWindow, window);
 
     status_t res = NO_ERROR;
     Mutex::Autolock locker(&mObjectLock);
@@ -81,7 +81,7 @@
 
 status_t PreviewWindow::startPreview()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     mPreviewEnabled = true;
@@ -91,7 +91,7 @@
 
 void PreviewWindow::stopPreview()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     Mutex::Autolock locker(&mObjectLock);
     mPreviewEnabled = false;
@@ -117,7 +117,7 @@
         /* Need to set / adjust buffer geometry for the preview window.
          * Note that in the emulator preview window uses only RGB for pixel
          * formats. */
-        LOGV("%s: Adjusting preview windows %p geometry to %dx%d",
+        ALOGV("%s: Adjusting preview windows %p geometry to %dx%d",
              __FUNCTION__, mPreviewWindow, mPreviewFrameWidth,
              mPreviewFrameHeight);
         res = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
diff --git a/tools/emulator/system/camera/QemuClient.cpp b/tools/emulator/system/camera/QemuClient.cpp
index fd49585..fe761b0 100755
--- a/tools/emulator/system/camera/QemuClient.cpp
+++ b/tools/emulator/system/camera/QemuClient.cpp
@@ -212,7 +212,7 @@
 
 status_t QemuClient::connectClient(const char* param)
 {
-    LOGV("%s: '%s'", __FUNCTION__, param ? param : "");
+    ALOGV("%s: '%s'", __FUNCTION__, param ? param : "");
 
     /* Make sure that client is not connected already. */
     if (mPipeFD >= 0) {
@@ -247,7 +247,7 @@
 
 void QemuClient::disconnectClient()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     if (mPipeFD >= 0) {
         close(mPipeFD);
@@ -386,7 +386,7 @@
 
 status_t FactoryQemuClient::listCameras(char** list)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     QemuQuery query(mQueryList);
     if (doQuery(&query) || !query.isQuerySucceeded()) {
@@ -445,7 +445,7 @@
 
 status_t CameraQemuClient::queryConnect()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     QemuQuery query(mQueryConnect);
     doQuery(&query);
@@ -458,7 +458,7 @@
 
 status_t CameraQemuClient::queryDisconnect()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     QemuQuery query(mQueryDisconnect);
     doQuery(&query);
@@ -473,7 +473,7 @@
                                       int width,
                                       int height)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     char query_str[256];
     snprintf(query_str, sizeof(query_str), "%s dim=%dx%d pix=%d",
@@ -489,7 +489,7 @@
 
 status_t CameraQemuClient::queryStop()
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     QemuQuery query(mQueryStop);
     doQuery(&query);
@@ -505,7 +505,7 @@
                                       size_t vframe_size,
                                       size_t pframe_size)
 {
-    LOGV("%s", __FUNCTION__);
+    ALOGV("%s", __FUNCTION__);
 
     char query_str[256];
     snprintf(query_str, sizeof(query_str), "%s video=%d preview=%d",
diff --git a/tools/emulator/system/camera/media_profiles.xml b/tools/emulator/system/camera/media_profiles.xml
index ae1ce88..42ceb8d 100644
--- a/tools/emulator/system/camera/media_profiles.xml
+++ b/tools/emulator/system/camera/media_profiles.xml
@@ -75,6 +75,7 @@
 <!ATTLIST VideoEditorCap maxInputFrameHeight CDATA #REQUIRED>
 <!ATTLIST VideoEditorCap maxOutputFrameWidth CDATA #REQUIRED>
 <!ATTLIST VideoEditorCap maxOutputFrameHeight CDATA #REQUIRED>
+<!ATTLIST VideoEditorCap maxPrefetchYUVFrames CDATA #REQUIRED>
 <!ELEMENT ExportVideoProfile EMPTY>
 <!ATTLIST ExportVideoProfile name (h264|h263|m4v) #REQUIRED>
 <!ATTLIST ExportVideoProfile profile CDATA #REQUIRED>
@@ -374,9 +375,22 @@
     -->
     <VideoDecoderCap name="wmv" enabled="false"/>
     <AudioDecoderCap name="wma" enabled="false"/>
+
+    <!--
+        The VideoEditor Capability configuration:
+        - maxInputFrameWidth: maximum video width of imported video clip.
+        - maxInputFrameHeight: maximum video height of imported video clip.
+        - maxOutputFrameWidth: maximum video width of exported video clip.
+        - maxOutputFrameHeight: maximum video height of exported video clip.
+        - maxPrefetchYUVFrames: maximum prefetch YUV frames for encoder,
+        used to limit the amount of memory for prefetched YUV frames.
+        For this platform, it allows maximum ~1MB(~0.1MB per QVGA frame x 10
+        frames) memory.
+    -->
+
     <VideoEditorCap  maxInputFrameWidth="320"
         maxInputFrameHeight="240" maxOutputFrameWidth="320"
-        maxOutputFrameHeight="240"/>
+        maxOutputFrameHeight="240" maxPrefetchYUVFrames="10" />
     <!--
         The VideoEditor Export codec profile and level values
         correspond to the values in OMX_Video.h.
diff --git a/tools/emulator/system/sensors/sensors_qemu.c b/tools/emulator/system/sensors/sensors_qemu.c
index 9a776c7..9f5feca 100644
--- a/tools/emulator/system/sensors/sensors_qemu.c
+++ b/tools/emulator/system/sensors/sensors_qemu.c
@@ -206,7 +206,7 @@
 }
 
 static int
-control__close(struct hw_device_t *dev) 
+control__close(struct hw_device_t *dev)
 {
     SensorPoll*  ctl = (void*)dev;
     close(ctl->fd);
@@ -350,7 +350,7 @@
         }
 
         /* "temperature:<celsius>" */
-        if (sscanf(buff, "temperature:%g", params+0) == 2) {
+        if (sscanf(buff, "temperature:%g", params+0) == 1) {
             new_sensors |= SENSORS_TEMPERATURE;
             data->sensors[ID_TEMPERATURE].temperature = params[0];
             continue;
@@ -397,7 +397,7 @@
 }
 
 static int
-data__close(struct hw_device_t *dev) 
+data__close(struct hw_device_t *dev)
 {
     SensorPoll* data = (SensorPoll*)dev;
     if (data) {
@@ -545,7 +545,7 @@
 static struct sensor_t  sSensorList[MAX_NUM_SENSORS];
 
 static int sensors__get_sensors_list(struct sensors_module_t* module,
-        struct sensor_t const** list) 
+        struct sensor_t const** list)
 {
     int  fd = qemud_channel_open(SENSORS_SERVICE_NAME);
     char buffer[12];
diff --git a/tools/emulator/test-apps/GpsLocationTest/src/com/android/emulator/gps/test/GpsLocationTest.java b/tools/emulator/test-apps/GpsLocationTest/src/com/android/emulator/gps/test/GpsLocationTest.java
index c0ea0fa..6eb3834 100644
--- a/tools/emulator/test-apps/GpsLocationTest/src/com/android/emulator/gps/test/GpsLocationTest.java
+++ b/tools/emulator/test-apps/GpsLocationTest/src/com/android/emulator/gps/test/GpsLocationTest.java
@@ -17,24 +17,30 @@
 
 import android.content.Context;
 import android.location.Location;
+import android.location.LocationListener;
 import android.location.LocationManager;
+import android.os.Bundle;
+import android.os.HandlerThread;
 import android.test.AndroidTestCase;
 
+import junit.framework.Assert;
+
 /**
  * GPS Location Test
  *
  * Test the GPS API by verifying the previously set location
  */
-public class GpsLocationTest extends AndroidTestCase {
+public class GpsLocationTest extends AndroidTestCase implements LocationListener {
 
     private LocationManager locationManager;
-
+    private Location mLocation;
     /**
      * Prior to running this test the GPS location must be set to the following
      * longitude and latitude coordinates via the geo fix command
      */
     private static final double LONGITUDE = -122.08345770835876;
     private static final double LATITUDE = 37.41991859119417;
+    private static final int TIMEOUT = 5000;
 
     @Override
     protected void setUp() throws Exception {
@@ -48,9 +54,37 @@
      * via geo fix command
      */
     public void testCurrentLocationGivenLocation(){
-        Location lastLocation = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
-        assertNotNull(lastLocation);
-        assertEquals(lastLocation.getLongitude(), LONGITUDE);
-        assertEquals(lastLocation.getLatitude(), LATITUDE);
+        try{
+            synchronized ( this ){
+                HandlerThread handlerThread = new HandlerThread("testLocationUpdates");
+                handlerThread.start();
+                locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this,
+                        handlerThread.getLooper());
+                this.wait(TIMEOUT);
+            }
+        }catch ( InterruptedException ie){
+            ie.printStackTrace();
+            Assert.fail();
+        }
+        assertNotNull(mLocation);
+        assertEquals(new Float(LONGITUDE), new Float(mLocation.getLongitude()));
+        assertEquals(new Float(LATITUDE), new Float(mLocation.getLatitude()));
+        locationManager.removeUpdates(this);
+    }
+
+    public void onLocationChanged(Location location) {
+        synchronized ( this ){
+            mLocation=location;
+            this.notify();
+        }
+    }
+
+    public void onProviderDisabled(String arg0) {
+    }
+
+    public void onProviderEnabled(String arg0) {
+    }
+
+    public void onStatusChanged(String arg0, int arg1, Bundle arg2) {
     }
 }