Merge "Add support for timestamps into SurfaceTexture."
diff --git a/api/12.xml b/api/12.xml
index 7835c7f..3785c97 100644
--- a/api/12.xml
+++ b/api/12.xml
@@ -73375,8 +73375,6 @@
 </parameter>
 <parameter name="event" type="android.drm.DrmEvent">
 </parameter>
-<parameter name="attributes" type="java.util.HashMap&lt;java.lang.String, java.lang.Object&gt;">
-</parameter>
 </method>
 </interface>
 <interface name="DrmManagerClient.OnInfoListener"
diff --git a/api/current.xml b/api/current.xml
index b5cb881..4150f16 100644
--- a/api/current.xml
+++ b/api/current.xml
@@ -72665,6 +72665,22 @@
 <parameter name="message" type="java.lang.String">
 </parameter>
 </constructor>
+<constructor name="DrmErrorEvent"
+ type="android.drm.DrmErrorEvent"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="uniqueId" type="int">
+</parameter>
+<parameter name="type" type="int">
+</parameter>
+<parameter name="message" type="java.lang.String">
+</parameter>
+<parameter name="attributes" type="java.util.HashMap&lt;java.lang.String, java.lang.Object&gt;">
+</parameter>
+</constructor>
 <field name="TYPE_ACQUIRE_DRM_INFO_FAILED"
  type="int"
  transient="false"
@@ -72775,7 +72791,36 @@
 </parameter>
 <parameter name="message" type="java.lang.String">
 </parameter>
+<parameter name="attributes" type="java.util.HashMap&lt;java.lang.String, java.lang.Object&gt;">
+</parameter>
 </constructor>
+<constructor name="DrmEvent"
+ type="android.drm.DrmEvent"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="protected"
+>
+<parameter name="uniqueId" type="int">
+</parameter>
+<parameter name="type" type="int">
+</parameter>
+<parameter name="message" type="java.lang.String">
+</parameter>
+</constructor>
+<method name="getAttribute"
+ return="java.lang.Object"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="key" type="java.lang.String">
+</parameter>
+</method>
 <method name="getMessage"
  return="java.lang.String"
  abstract="false"
@@ -72809,6 +72854,17 @@
  visibility="public"
 >
 </method>
+<field name="DRM_INFO_OBJECT"
+ type="java.lang.String"
+ transient="false"
+ volatile="false"
+ value="&quot;drm_info_object&quot;"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
 <field name="DRM_INFO_STATUS_OBJECT"
  type="java.lang.String"
  transient="false"
@@ -72985,6 +73041,22 @@
 <parameter name="message" type="java.lang.String">
 </parameter>
 </constructor>
+<constructor name="DrmInfoEvent"
+ type="android.drm.DrmInfoEvent"
+ static="false"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="uniqueId" type="int">
+</parameter>
+<parameter name="type" type="int">
+</parameter>
+<parameter name="message" type="java.lang.String">
+</parameter>
+<parameter name="attributes" type="java.util.HashMap&lt;java.lang.String, java.lang.Object&gt;">
+</parameter>
+</constructor>
 <field name="TYPE_ACCOUNT_ALREADY_REGISTERED"
  type="int"
  transient="false"
@@ -73767,8 +73839,6 @@
 </parameter>
 <parameter name="event" type="android.drm.DrmEvent">
 </parameter>
-<parameter name="attributes" type="java.util.HashMap&lt;java.lang.String, java.lang.Object&gt;">
-</parameter>
 </method>
 </interface>
 <interface name="DrmManagerClient.OnInfoListener"
@@ -213330,6 +213400,17 @@
  visibility="public"
 >
 </field>
+<field name="KEYCODE_3D_MODE"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="206"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
 <field name="KEYCODE_4"
  type="int"
  transient="false"
@@ -214485,6 +214566,17 @@
  visibility="public"
 >
 </field>
+<field name="KEYCODE_LANGUAGE_SWITCH"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="204"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
 <field name="KEYCODE_LEFT_BRACKET"
  type="int"
  transient="false"
@@ -214507,6 +214599,17 @@
  visibility="public"
 >
 </field>
+<field name="KEYCODE_MANNER_MODE"
+ type="int"
+ transient="false"
+ volatile="false"
+ value="205"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
 <field name="KEYCODE_MEDIA_CLOSE"
  type="int"
  transient="false"
@@ -220968,7 +221071,7 @@
  deprecated="not deprecated"
  visibility="public"
 >
-<parameter name="ev" type="android.view.MotionEvent">
+<parameter name="event" type="android.view.MotionEvent">
 </parameter>
 </method>
 <method name="clear"
@@ -243907,7 +244010,7 @@
  synchronized="false"
  static="false"
  final="false"
- deprecated="not deprecated"
+ deprecated="deprecated"
  visibility="public"
 >
 </method>
diff --git a/core/java/android/bluetooth/BluetoothDeviceProfileState.java b/core/java/android/bluetooth/BluetoothDeviceProfileState.java
index 9855709..f4693c2 100644
--- a/core/java/android/bluetooth/BluetoothDeviceProfileState.java
+++ b/core/java/android/bluetooth/BluetoothDeviceProfileState.java
@@ -679,7 +679,6 @@
         @Override
         protected boolean processMessage(Message message) {
             log("IncomingA2dp State->Processing Message: " + message.what);
-            Message deferMsg = new Message();
             switch(message.what) {
                 case CONNECT_HFP_OUTGOING:
                     deferMessage(message);
diff --git a/core/java/android/util/JsonReader.java b/core/java/android/util/JsonReader.java
index 8f44895..563c500 100644
--- a/core/java/android/util/JsonReader.java
+++ b/core/java/android/util/JsonReader.java
@@ -86,7 +86,11 @@
  *
  *   public List<Message> readJsonStream(InputStream in) throws IOException {
  *     JsonReader reader = new JsonReader(new InputStreamReader(in, "UTF-8"));
- *     return readMessagesArray(reader);
+ *     try {
+ *       return readMessagesArray(reader);
+ *     } finally {
+ *       reader.close();
+ *     }
  *   }
  *
  *   public List<Message> readMessagesArray(JsonReader reader) throws IOException {
diff --git a/core/java/android/util/LruCache.java b/core/java/android/util/LruCache.java
index 834dac3..5540000 100644
--- a/core/java/android/util/LruCache.java
+++ b/core/java/android/util/LruCache.java
@@ -304,7 +304,8 @@
     }
 
     /**
-     * Returns the number of times {@link #get} returned a value.
+     * Returns the number of times {@link #get} returned a value that was
+     * already present in the cache.
      */
     public synchronized final int hitCount() {
         return hitCount;
diff --git a/core/java/android/view/KeyEvent.java b/core/java/android/view/KeyEvent.java
index 81d5a6e..8070c6a 100755
--- a/core/java/android/view/KeyEvent.java
+++ b/core/java/android/view/KeyEvent.java
@@ -566,6 +566,19 @@
     public static final int KEYCODE_BUTTON_15       = 202;
     /** Key code constant: Generic Game Pad Button #16.*/
     public static final int KEYCODE_BUTTON_16       = 203;
+    /** Key code constant: Language Switch key.
+     * Toggles the current input language such as switching between English and Japanese on
+     * a QWERTY keyboard.  On some devices, the same function may be performed by
+     * pressing Shift+Spacebar. */
+    public static final int KEYCODE_LANGUAGE_SWITCH = 204;
+    /** Key code constant: Manner Mode key.
+     * Toggles silent or vibrate mode on and off to make the device behave more politely
+     * in certain settings such as on a crowded train.  On some devices, the key may only
+     * operate when long-pressed. */
+    public static final int KEYCODE_MANNER_MODE     = 205;
+    /** Key code constant: 3D Mode key.
+     * Toggles the display between 2D and 3D mode. */
+    public static final int KEYCODE_3D_MODE         = 206;
 
     private static final int LAST_KEYCODE           = KEYCODE_BUTTON_16;
 
@@ -791,6 +804,9 @@
         names.append(KEYCODE_BUTTON_14, "KEYCODE_BUTTON_14");
         names.append(KEYCODE_BUTTON_15, "KEYCODE_BUTTON_15");
         names.append(KEYCODE_BUTTON_16, "KEYCODE_BUTTON_16");
+        names.append(KEYCODE_LANGUAGE_SWITCH, "KEYCODE_LANGUAGE_SWITCH");
+        names.append(KEYCODE_MANNER_MODE, "KEYCODE_MANNER_MODE");
+        names.append(KEYCODE_3D_MODE, "KEYCODE_3D_MODE");
     };
 
     // Symbolic names of all metakeys in bit order from least significant to most significant.
diff --git a/core/java/android/view/ScaleGestureDetector.java b/core/java/android/view/ScaleGestureDetector.java
index 5521e92..d638e70 100644
--- a/core/java/android/view/ScaleGestureDetector.java
+++ b/core/java/android/view/ScaleGestureDetector.java
@@ -156,6 +156,7 @@
     private float mRightSlopEdge;
     private float mBottomSlopEdge;
     private boolean mSloppyGesture;
+    private boolean mInvalidGesture;
 
     // Pointer IDs currently responsible for the two fingers controlling the gesture
     private int mActiveId0;
@@ -177,6 +178,8 @@
             reset(); // Start fresh
         }
 
+        if (mInvalidGesture) return false;
+
         if (!mGestureInProgress) {
             switch (action) {
             case MotionEvent.ACTION_DOWN: {
@@ -518,6 +521,15 @@
         final int currIndex0 = curr.findPointerIndex(mActiveId0);
         final int currIndex1 = curr.findPointerIndex(mActiveId1);
 
+        if (prevIndex0 < 0 || prevIndex1 < 0 || currIndex0 < 0 || currIndex1 < 0) {
+            mInvalidGesture = true;
+            Log.e(TAG, "Invalid MotionEvent stream detected.", new Throwable());
+            if (mGestureInProgress) {
+                mListener.onScaleEnd(this);
+            }
+            return;
+        }
+
         final float px0 = prev.getX(prevIndex0);
         final float py0 = prev.getY(prevIndex0);
         final float px1 = prev.getX(prevIndex1);
@@ -556,6 +568,7 @@
         mGestureInProgress = false;
         mActiveId0 = -1;
         mActiveId1 = -1;
+        mInvalidGesture = false;
     }
 
     /**
diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java
index 5a96efd..c729ccd 100644
--- a/core/java/android/view/View.java
+++ b/core/java/android/view/View.java
@@ -5241,6 +5241,10 @@
         final int viewFlags = mViewFlags;
 
         if ((viewFlags & ENABLED_MASK) == DISABLED) {
+            if (event.getAction() == MotionEvent.ACTION_UP && (mPrivateFlags & PRESSED) != 0) {
+                mPrivateFlags &= ~PRESSED;
+                refreshDrawableState();
+            }
             // A disabled view that is clickable still consumes the touch
             // events, it just doesn't respond to them.
             return (((viewFlags & CLICKABLE) == CLICKABLE ||
diff --git a/core/java/android/webkit/HTML5VideoFullScreen.java b/core/java/android/webkit/HTML5VideoFullScreen.java
new file mode 100644
index 0000000..6be988e
--- /dev/null
+++ b/core/java/android/webkit/HTML5VideoFullScreen.java
@@ -0,0 +1,320 @@
+
+package android.webkit;
+
+import android.content.Context;
+import android.media.MediaPlayer;
+import android.media.Metadata;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.MotionEvent;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.webkit.HTML5VideoView;
+import android.webkit.HTML5VideoViewProxy;
+import android.widget.FrameLayout;
+import android.widget.MediaController;
+import android.widget.MediaController.MediaPlayerControl;
+
+
+/**
+ * @hide This is only used by the browser
+ */
+public class HTML5VideoFullScreen extends HTML5VideoView
+    implements MediaPlayerControl, MediaPlayer.OnPreparedListener,
+    View.OnTouchListener {
+
+    // Add this sub-class to handle the resizing when rotating screen.
+    private class VideoSurfaceView extends SurfaceView {
+
+        public VideoSurfaceView(Context context) {
+            super(context);
+        }
+
+        @Override
+        protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+            int width = getDefaultSize(mVideoWidth, widthMeasureSpec);
+            int height = getDefaultSize(mVideoHeight, heightMeasureSpec);
+            if (mVideoWidth > 0 && mVideoHeight > 0) {
+                if ( mVideoWidth * height  > width * mVideoHeight ) {
+                    height = width * mVideoHeight / mVideoWidth;
+                } else if ( mVideoWidth * height  < width * mVideoHeight ) {
+                    width = height * mVideoWidth / mVideoHeight;
+                }
+            }
+            setMeasuredDimension(width, height);
+        }
+    }
+
+    // This view will contain the video.
+    private VideoSurfaceView mVideoSurfaceView;
+
+    // We need the full screen state to decide which surface to render to and
+    // when to create the MediaPlayer accordingly.
+    static final int FULLSCREEN_OFF               = 0;
+    static final int FULLSCREEN_SURFACECREATING   = 1;
+    static final int FULLSCREEN_SURFACECREATED    = 2;
+
+    private int mFullScreenMode;
+    // The Media Controller only used for full screen mode
+    private MediaController mMediaController;
+
+    // SurfaceHolder for full screen
+    private SurfaceHolder mSurfaceHolder = null;
+
+    // Data only for MediaController
+    private boolean mCanSeekBack;
+    private boolean mCanSeekForward;
+    private boolean mCanPause;
+    private int mCurrentBufferPercentage;
+
+    // The progress view.
+    private static View mProgressView;
+    // The container for the progress view and video view
+    private static FrameLayout mLayout;
+
+    // The video size will be ready when prepared. Used to make sure the aspect
+    // ratio is correct.
+    private int mVideoWidth;
+    private int mVideoHeight;
+
+    SurfaceHolder.Callback mSHCallback = new SurfaceHolder.Callback()
+    {
+        public void surfaceChanged(SurfaceHolder holder, int format,
+                                    int w, int h)
+        {
+            if (mPlayer != null && mMediaController != null
+                    && mCurrentState == STATE_PREPARED) {
+                if (mMediaController.isShowing()) {
+                    // ensure the controller will get repositioned later
+                    mMediaController.hide();
+                }
+                mMediaController.show();
+            }
+        }
+
+        public void surfaceCreated(SurfaceHolder holder)
+        {
+            mSurfaceHolder = holder;
+            mFullScreenMode = FULLSCREEN_SURFACECREATED;
+
+            prepareForFullScreen();
+        }
+
+        public void surfaceDestroyed(SurfaceHolder holder)
+        {
+            // after we return from this we can't use the surface any more
+            mSurfaceHolder = null;
+            // The current Video View will be destroy when we play a new video.
+        }
+    };
+
+    private SurfaceView getSurfaceView() {
+        return mVideoSurfaceView;
+    }
+
+    HTML5VideoFullScreen(Context context, int videoLayerId, int position,
+            boolean autoStart) {
+        mVideoSurfaceView = new VideoSurfaceView(context);
+        mFullScreenMode = FULLSCREEN_OFF;
+        mVideoWidth = 0;
+        mVideoHeight = 0;
+        init(videoLayerId, position, autoStart);
+    }
+
+    private void setMediaController(MediaController m) {
+        mMediaController  = m;
+        attachMediaController();
+    }
+
+    private void attachMediaController() {
+        if (mPlayer != null && mMediaController != null) {
+            mMediaController.setMediaPlayer(this);
+            mMediaController.setAnchorView(mVideoSurfaceView);
+            //Will be enabled when prepared
+            mMediaController.setEnabled(false);
+        }
+    }
+
+    @Override
+    public void decideDisplayMode() {
+        mPlayer.setDisplay(mSurfaceHolder);
+    }
+
+    private void prepareForFullScreen() {
+        // So in full screen, we reset the MediaPlayer
+        mPlayer.reset();
+        setMediaController(new MediaController(mProxy.getContext()));
+
+        prepareDataAndDisplayMode(mProxy);
+    }
+
+
+    private void toggleMediaControlsVisiblity() {
+        if (mMediaController.isShowing()) {
+            mMediaController.hide();
+        } else {
+            mMediaController.show();
+        }
+    }
+
+    @Override
+    public void onPrepared(MediaPlayer mp) {
+        super.onPrepared(mp);
+
+        mVideoSurfaceView.setOnTouchListener(this);
+        // Get the capabilities of the player for this stream
+        Metadata data = mp.getMetadata(MediaPlayer.METADATA_ALL,
+                MediaPlayer.BYPASS_METADATA_FILTER);
+        if (data != null) {
+            mCanPause = !data.has(Metadata.PAUSE_AVAILABLE)
+                    || data.getBoolean(Metadata.PAUSE_AVAILABLE);
+            mCanSeekBack = !data.has(Metadata.SEEK_BACKWARD_AVAILABLE)
+                    || data.getBoolean(Metadata.SEEK_BACKWARD_AVAILABLE);
+            mCanSeekForward = !data.has(Metadata.SEEK_FORWARD_AVAILABLE)
+                    || data.getBoolean(Metadata.SEEK_FORWARD_AVAILABLE);
+        } else {
+            mCanPause = mCanSeekBack = mCanSeekForward = true;
+        }
+
+        // mMediaController status depends on the Metadata result, so put it
+        // after reading the MetaData
+        if (mMediaController != null) {
+            mMediaController.setEnabled(true);
+            // If paused , should show the controller for ever!
+            if (getAutostart())
+                mMediaController.show();
+            else
+                mMediaController.show(0);
+        }
+
+        if (mProgressView != null) {
+            mProgressView.setVisibility(View.GONE);
+            mLayout.removeView(mProgressView);
+            mProgressView = null;
+        }
+
+        mVideoWidth = mp.getVideoWidth();
+        mVideoHeight = mp.getVideoHeight();
+        // This will trigger the onMeasure to get the display size right.
+        mVideoSurfaceView.getHolder().setFixedSize(mVideoWidth, mVideoHeight);
+    }
+
+
+    private final WebChromeClient.CustomViewCallback mCallback =
+        new WebChromeClient.CustomViewCallback() {
+            public void onCustomViewHidden() {
+                // It listens to SurfaceHolder.Callback.SurfaceDestroyed event
+                // which happens when the video view is detached from its parent
+                // view. This happens in the WebChromeClient before this method
+                // is invoked.
+                mTimer.cancel();
+                mTimer = null;
+
+                pauseAndDispatch(mProxy);
+
+                mLayout.removeView(getSurfaceView());
+
+                if (mProgressView != null) {
+                    mLayout.removeView(mProgressView);
+                    mProgressView = null;
+                }
+                mLayout = null;
+                // Re enable plugin views.
+                mProxy.getWebView().getViewManager().showAll();
+
+                mProxy = null;
+            }
+        };
+
+    @Override
+    public void enterFullScreenVideoState(int layerId,
+            HTML5VideoViewProxy proxy, WebView webView) {
+        mFullScreenMode = FULLSCREEN_SURFACECREATING;
+        mCurrentBufferPercentage = 0;
+        mPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
+        mProxy = proxy;
+
+        mVideoSurfaceView.getHolder().addCallback(mSHCallback);
+        mVideoSurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+        mVideoSurfaceView.setFocusable(true);
+        mVideoSurfaceView.setFocusableInTouchMode(true);
+        mVideoSurfaceView.requestFocus();
+
+        // Create a FrameLayout that will contain the VideoView and the
+        // progress view (if any).
+        mLayout = new FrameLayout(mProxy.getContext());
+        FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
+                            ViewGroup.LayoutParams.WRAP_CONTENT,
+                            ViewGroup.LayoutParams.WRAP_CONTENT,
+                            Gravity.CENTER);
+
+        mLayout.addView(getSurfaceView(), layoutParams);
+
+        mLayout.setVisibility(View.VISIBLE);
+
+        WebChromeClient client = webView.getWebChromeClient();
+        client.onShowCustomView(mLayout, mCallback);
+        // Plugins like Flash will draw over the video so hide
+        // them while we're playing.
+        mProxy.getWebView().getViewManager().hideAll();
+
+        mProgressView = client.getVideoLoadingProgressView();
+        if (mProgressView != null) {
+            mLayout.addView(mProgressView, layoutParams);
+            mProgressView.setVisibility(View.VISIBLE);
+        }
+
+    }
+
+    /**
+     * @return true when we are in full screen mode, even the surface not fully
+     * created.
+     */
+    public boolean isFullScreenMode() {
+        return true;
+    }
+
+    // MediaController FUNCTIONS:
+    @Override
+    public boolean canPause() {
+        return mCanPause;
+    }
+
+    @Override
+    public boolean canSeekBackward() {
+        return mCanSeekBack;
+    }
+
+    @Override
+    public boolean canSeekForward() {
+        return mCanSeekForward;
+    }
+
+    @Override
+    public int getBufferPercentage() {
+        if (mPlayer != null) {
+            return mCurrentBufferPercentage;
+        }
+    return 0;
+    }
+
+    // Other listeners functions:
+    private MediaPlayer.OnBufferingUpdateListener mBufferingUpdateListener =
+        new MediaPlayer.OnBufferingUpdateListener() {
+        public void onBufferingUpdate(MediaPlayer mp, int percent) {
+            mCurrentBufferPercentage = percent;
+        }
+    };
+
+    @Override
+    public boolean onTouch(View v, MotionEvent event) {
+        if (mFullScreenMode >= FULLSCREEN_SURFACECREATED
+                && mMediaController != null) {
+            toggleMediaControlsVisiblity();
+        }
+        return false;
+    }
+
+}
diff --git a/core/java/android/webkit/HTML5VideoInline.java b/core/java/android/webkit/HTML5VideoInline.java
new file mode 100644
index 0000000..f1d9189
--- /dev/null
+++ b/core/java/android/webkit/HTML5VideoInline.java
@@ -0,0 +1,99 @@
+
+package android.webkit;
+
+import android.graphics.SurfaceTexture;
+import android.media.MediaPlayer;
+import android.webkit.HTML5VideoView;
+import android.webkit.HTML5VideoViewProxy;
+import android.opengl.GLES20;
+
+/**
+ * @hide This is only used by the browser
+ */
+public class HTML5VideoInline extends HTML5VideoView{
+
+    // Due to the fact that SurfaceTexture consume a lot of memory, we make it
+    // as static. m_textureNames is the texture bound with this SurfaceTexture.
+    private static SurfaceTexture mSurfaceTexture = null;
+    private static int[] mTextureNames;
+
+    // Only when the video is prepared, we render using SurfaceTexture.
+    // This in fact is used to avoid showing the obsolete content when
+    // switching videos.
+    private static boolean mReadyToUseSurfTex = false;
+
+    // Video control FUNCTIONS:
+    @Override
+    public void start() {
+        super.start();
+        if (mCurrentState == STATE_PREPARED) {
+            mReadyToUseSurfTex = true;
+        }
+    }
+
+    HTML5VideoInline(int videoLayerId, int position,
+            boolean autoStart) {
+        init(videoLayerId, position, autoStart);
+        mReadyToUseSurfTex = false;
+    }
+
+    @Override
+    public void decideDisplayMode() {
+        mPlayer.setTexture(getSurfaceTextureInstance());
+    }
+
+    // Normally called immediately after setVideoURI. But for full screen,
+    // this should be after surface holder created
+    @Override
+    public void prepareDataAndDisplayMode(HTML5VideoViewProxy proxy) {
+        super.prepareDataAndDisplayMode(proxy);
+        setFrameAvailableListener(proxy);
+    }
+
+    // Pause the play and update the play/pause button
+    @Override
+    public void pauseAndDispatch(HTML5VideoViewProxy proxy) {
+        super.pauseAndDispatch(proxy);
+        mReadyToUseSurfTex = false;
+    }
+
+    // Inline Video specific FUNCTIONS:
+
+    @Override
+    public SurfaceTexture getSurfaceTexture() {
+        return mSurfaceTexture;
+    }
+
+    @Override
+    public void deleteSurfaceTexture() {
+        mSurfaceTexture = null;
+        return;
+    }
+
+    // SurfaceTexture is a singleton here , too
+    private SurfaceTexture getSurfaceTextureInstance() {
+        // Create the surface texture.
+        if (mSurfaceTexture == null)
+        {
+            mTextureNames = new int[1];
+            GLES20.glGenTextures(1, mTextureNames, 0);
+            mSurfaceTexture = new SurfaceTexture(mTextureNames[0]);
+        }
+        return mSurfaceTexture;
+    }
+
+    @Override
+    public int getTextureName() {
+        return mTextureNames[0];
+    }
+
+    @Override
+    public boolean getReadyToUseSurfTex() {
+        return mReadyToUseSurfTex;
+    }
+
+    private void setFrameAvailableListener(SurfaceTexture.OnFrameAvailableListener l) {
+        mSurfaceTexture.setOnFrameAvailableListener(l);
+    }
+
+}
diff --git a/core/java/android/webkit/HTML5VideoView.java b/core/java/android/webkit/HTML5VideoView.java
index 2312160..663497c 100644
--- a/core/java/android/webkit/HTML5VideoView.java
+++ b/core/java/android/webkit/HTML5VideoView.java
@@ -4,72 +4,93 @@
 import android.graphics.SurfaceTexture;
 import android.media.MediaPlayer;
 import android.util.Log;
+import android.view.SurfaceView;
 import android.webkit.HTML5VideoViewProxy;
-import android.widget.MediaController;
-import android.opengl.GLES20;
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
 
 /**
  * @hide This is only used by the browser
  */
 public class HTML5VideoView implements MediaPlayer.OnPreparedListener{
-    // Due to the fact that SurfaceTexture consume a lot of memory, we make it
-    // as static. m_textureNames is the texture bound with this SurfaceTexture.
-    private static SurfaceTexture mSurfaceTexture = null;
-    private static int[] mTextureNames;
 
-    // Only when the video is prepared, we render using SurfaceTexture.
-    // This in fact is used to avoid showing the obsolete content when
-    // switching videos.
-    private static boolean mReadyToUseSurfTex = false;
+    protected static final String LOGTAG = "HTML5VideoView";
+
+    protected static final String COOKIE = "Cookie";
+    protected static final String HIDE_URL_LOGS = "x-hide-urls-from-log";
 
     // For handling the seekTo before prepared, we need to know whether or not
     // the video is prepared. Therefore, we differentiate the state between
     // prepared and not prepared.
     // When the video is not prepared, we will have to save the seekTo time,
     // and use it when prepared to play.
-    private static final int STATE_NOTPREPARED        = 0;
-    private static final int STATE_PREPARED           = 1;
+    protected static final int STATE_NOTPREPARED        = 0;
+    protected static final int STATE_PREPARED           = 1;
 
-    // We only need state for handling seekTo
-    private int mCurrentState;
+    protected int mCurrentState;
 
-    // Basically for calling back the OnPrepared in the proxy
-    private HTML5VideoViewProxy mProxy;
+    protected HTML5VideoViewProxy mProxy;
 
     // Save the seek time when not prepared. This can happen when switching
     // video besides initial load.
-    private int mSaveSeekTime;
+    protected int mSaveSeekTime;
 
     // This is used to find the VideoLayer on the native side.
-    private int mVideoLayerId;
+    protected int mVideoLayerId;
 
     // Every video will have one MediaPlayer. Given the fact we only have one
     // SurfaceTexture, there is only one MediaPlayer in action. Every time we
     // switch videos, a new instance of MediaPlayer will be created in reset().
-    private MediaPlayer mPlayer;
+    // Switching between inline and full screen will also create a new instance.
+    protected MediaPlayer mPlayer;
 
-    private static HTML5VideoView mInstance = new HTML5VideoView();
+    // This will be set up every time we create the Video View object.
+    // Set to true only when switching into full screen while playing
+    protected boolean mAutostart;
 
-    // Video control FUNCTIONS:
+    // We need to save such info.
+    protected String mUri;
+    protected Map<String, String> mHeaders;
+
+    // The timer for timeupate events.
+    // See http://www.whatwg.org/specs/web-apps/current-work/#event-media-timeupdate
+    protected static Timer mTimer;
+
+    // The spec says the timer should fire every 250 ms or less.
+    private static final int TIMEUPDATE_PERIOD = 250;  // ms
+
+    // common Video control FUNCTIONS:
     public void start() {
         if (mCurrentState == STATE_PREPARED) {
             mPlayer.start();
-            mReadyToUseSurfTex = true;
         }
     }
 
     public void pause() {
-        mPlayer.pause();
+        if (mCurrentState == STATE_PREPARED && mPlayer.isPlaying()) {
+            mPlayer.pause();
+        }
+        if (mTimer != null) {
+            mTimer.purge();
+        }
     }
 
     public int getDuration() {
-        return mPlayer.getDuration();
+        if (mCurrentState == STATE_PREPARED) {
+            return mPlayer.getDuration();
+        } else {
+            return -1;
+        }
     }
 
     public int getCurrentPosition() {
-        return mPlayer.getCurrentPosition();
+        if (mCurrentState == STATE_PREPARED) {
+            return mPlayer.getCurrentPosition();
+        }
+        return 0;
     }
 
     public void seekTo(int pos) {
@@ -88,53 +109,50 @@
     }
 
     public void stopPlayback() {
-        mPlayer.stop();
+        if (mCurrentState == STATE_PREPARED) {
+            mPlayer.stop();
+        }
     }
 
-    private void reset(int videoLayerId) {
+    public boolean getAutostart() {
+        return mAutostart;
+    }
+
+    // Every time we start a new Video, we create a VideoView and a MediaPlayer
+    public void init(int videoLayerId, int position, boolean autoStart) {
         mPlayer = new MediaPlayer();
         mCurrentState = STATE_NOTPREPARED;
         mProxy = null;
         mVideoLayerId = videoLayerId;
-        mReadyToUseSurfTex = false;
+        mSaveSeekTime = position;
+        mAutostart = autoStart;
     }
 
-    public static HTML5VideoView getInstance(int videoLayerId) {
-        // Every time we switch between the videos, a new MediaPlayer will be
-        // created. Make sure we call the m_player.release() when it is done.
-        mInstance.reset(videoLayerId);
-        return mInstance;
+    protected HTML5VideoView() {
     }
 
-    private HTML5VideoView() {
-        // This is a singleton across WebViews (i.e. Tabs).
-        // HTML5VideoViewProxy will reset the internal state every time a new
-        // video start.
-    }
-
-    public void setMediaController(MediaController m) {
-        this.setMediaController(m);
-    }
-
-    public void setVideoURI(String uri, Map<String, String> headers) {
-        // When switching players, surface texture will be reused.
-        mPlayer.setTexture(getSurfaceTextureInstance());
-
-        // When there is exception, we could just bail out silently.
-        // No Video will be played though. Write the stack for debug
-        try {
-            mPlayer.setDataSource(uri, headers);
-            mPlayer.prepareAsync();
-        } catch (IllegalArgumentException e) {
-            e.printStackTrace();
-        } catch (IllegalStateException e) {
-            e.printStackTrace();
-        } catch (IOException e) {
-            e.printStackTrace();
+    protected static Map<String, String> generateHeaders(String url,
+            HTML5VideoViewProxy proxy) {
+        boolean isPrivate = proxy.getWebView().isPrivateBrowsingEnabled();
+        String cookieValue = CookieManager.getInstance().getCookie(url, isPrivate);
+        Map<String, String> headers = new HashMap<String, String>();
+        if (cookieValue != null) {
+            headers.put(COOKIE, cookieValue);
         }
+        if (isPrivate) {
+            headers.put(HIDE_URL_LOGS, "true");
+        }
+
+        return headers;
     }
 
-    // TODO [FULL SCREEN SUPPORT]
+    public void setVideoURI(String uri, HTML5VideoViewProxy proxy) {
+        // When switching players, surface texture will be reused.
+        mUri = uri;
+        mHeaders = generateHeaders(uri, proxy);
+
+        mTimer = new Timer();
+    }
 
     // Listeners setup FUNCTIONS:
     public void setOnCompletionListener(HTML5VideoViewProxy proxy) {
@@ -150,43 +168,47 @@
         mPlayer.setOnPreparedListener(this);
     }
 
-    // Inline Video specific FUNCTIONS:
+    // Normally called immediately after setVideoURI. But for full screen,
+    // this should be after surface holder created
+    public void prepareDataAndDisplayMode(HTML5VideoViewProxy proxy) {
+        // SurfaceTexture will be created lazily here for inline mode
+        decideDisplayMode();
 
-    public SurfaceTexture getSurfaceTexture() {
-        return mSurfaceTexture;
-    }
+        setOnCompletionListener(proxy);
+        setOnPreparedListener(proxy);
+        setOnErrorListener(proxy);
 
-    public void deleteSurfaceTexture() {
-        mSurfaceTexture = null;
-        return;
-    }
-
-    // SurfaceTexture is a singleton here , too
-    private SurfaceTexture getSurfaceTextureInstance() {
-        // Create the surface texture.
-        if (mSurfaceTexture == null)
-        {
-            mTextureNames = new int[1];
-            GLES20.glGenTextures(1, mTextureNames, 0);
-            mSurfaceTexture = new SurfaceTexture(mTextureNames[0]);
+        // When there is exception, we could just bail out silently.
+        // No Video will be played though. Write the stack for debug
+        try {
+            mPlayer.setDataSource(mUri, mHeaders);
+            mPlayer.prepareAsync();
+        } catch (IllegalArgumentException e) {
+            e.printStackTrace();
+        } catch (IllegalStateException e) {
+            e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
         }
-        return mSurfaceTexture;
     }
 
-    public int getTextureName() {
-        return mTextureNames[0];
-    }
 
+    // Common code
     public int getVideoLayerId() {
         return mVideoLayerId;
     }
 
-    public boolean getReadyToUseSurfTex() {
-        return mReadyToUseSurfTex;
-    }
+    private static final class TimeupdateTask extends TimerTask {
+        private HTML5VideoViewProxy mProxy;
 
-    public void setFrameAvailableListener(SurfaceTexture.OnFrameAvailableListener l) {
-        mSurfaceTexture.setOnFrameAvailableListener(l);
+        public TimeupdateTask(HTML5VideoViewProxy proxy) {
+            mProxy = proxy;
+        }
+
+        @Override
+        public void run() {
+            mProxy.onTimeupdate();
+        }
     }
 
     @Override
@@ -195,6 +217,9 @@
         seekTo(mSaveSeekTime);
         if (mProxy != null)
             mProxy.onPrepared(mp);
+
+        mTimer.schedule(new TimeupdateTask(mProxy), TIMEUPDATE_PERIOD, TIMEUPDATE_PERIOD);
+
     }
 
     // Pause the play and update the play/pause button
@@ -205,7 +230,35 @@
                 proxy.dispatchOnPaused();
             }
         }
-        mReadyToUseSurfTex = false;
+    }
+
+    // Below are functions that are different implementation on inline and full-
+    // screen mode. Some are specific to one type, but currently are called
+    // directly from the proxy.
+    public void enterFullScreenVideoState(int layerId,
+            HTML5VideoViewProxy proxy, WebView webView) {
+    }
+
+    public boolean isFullScreenMode() {
+        return false;
+    }
+
+    public void decideDisplayMode() {
+    }
+
+    public boolean getReadyToUseSurfTex() {
+        return false;
+    }
+
+    public SurfaceTexture getSurfaceTexture() {
+        return null;
+    }
+
+    public void deleteSurfaceTexture() {
+    }
+
+    public int getTextureName() {
+        return 0;
     }
 
 }
diff --git a/core/java/android/webkit/HTML5VideoViewProxy.java b/core/java/android/webkit/HTML5VideoViewProxy.java
index b614d8f..d3fcfa5 100644
--- a/core/java/android/webkit/HTML5VideoViewProxy.java
+++ b/core/java/android/webkit/HTML5VideoViewProxy.java
@@ -21,29 +21,16 @@
 import android.graphics.BitmapFactory;
 import android.graphics.SurfaceTexture;
 import android.media.MediaPlayer;
-import android.media.MediaPlayer.OnPreparedListener;
-import android.media.MediaPlayer.OnCompletionListener;
-import android.media.MediaPlayer.OnErrorListener;
 import android.net.http.EventHandler;
 import android.net.http.Headers;
 import android.net.http.RequestHandle;
 import android.net.http.RequestQueue;
 import android.net.http.SslCertificate;
 import android.net.http.SslError;
-import android.net.Uri;
-import android.os.Bundle;
 import android.os.Handler;
 import android.os.Looper;
 import android.os.Message;
 import android.util.Log;
-import android.view.MotionEvent;
-import android.view.Gravity;
-import android.view.View;
-import android.view.ViewGroup;
-import android.widget.AbsoluteLayout;
-import android.widget.FrameLayout;
-import android.widget.MediaController;
-import android.widget.VideoView;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -51,8 +38,6 @@
 import java.net.URL;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Timer;
-import java.util.TimerTask;
 
 /**
  * <p>Proxy for HTML5 video views.
@@ -78,9 +63,6 @@
     private static final int POSTER_FETCHED    = 202;
     private static final int PAUSED            = 203;
 
-    private static final String COOKIE = "Cookie";
-    private static final String HIDE_URL_LOGS = "x-hide-urls-from-log";
-
     // Timer thread -> UI thread
     private static final int TIMEUPDATE = 300;
 
@@ -104,38 +86,19 @@
         // The VideoView instance. This is a singleton for now, at least until
         // http://b/issue?id=1973663 is fixed.
         private static HTML5VideoView mHTML5VideoView;
-        // The progress view.
-        private static View mProgressView;
-        // The container for the progress view and video view
-        private static FrameLayout mLayout;
-        // The timer for timeupate events.
-        // See http://www.whatwg.org/specs/web-apps/current-work/#event-media-timeupdate
-        private static Timer mTimer;
-        private static final class TimeupdateTask extends TimerTask {
-            private HTML5VideoViewProxy mProxy;
 
-            public TimeupdateTask(HTML5VideoViewProxy proxy) {
-                mProxy = proxy;
-            }
-
-            public void run() {
-                mProxy.onTimeupdate();
-            }
-        }
-        // The spec says the timer should fire every 250 ms or less.
-        private static final int TIMEUPDATE_PERIOD = 250;  // ms
         private static boolean isVideoSelfEnded = false;
         // By using the baseLayer and the current video Layer ID, we can
         // identify the exact layer on the UI thread to use the SurfaceTexture.
         private static int mBaseLayer = 0;
 
-        // TODO: [FULL SCREEN SUPPORT]
-
         // Every time webView setBaseLayer, this will be called.
         // When we found the Video layer, then we set the Surface Texture to it.
         // Otherwise, we may want to delete the Surface Texture to save memory.
         public static void setBaseLayer(int layer) {
-            if (mHTML5VideoView != null) {
+            // Don't do this for full screen mode.
+            if (mHTML5VideoView != null
+                    && !mHTML5VideoView.isFullScreenMode()) {
                 mBaseLayer = layer;
                 SurfaceTexture surfTexture = mHTML5VideoView.getSurfaceTexture();
                 int textureName = mHTML5VideoView.getTextureName();
@@ -165,16 +128,47 @@
             }
         }
 
+        public static void enterFullScreenVideo(int layerId, String url,
+                HTML5VideoViewProxy proxy, WebView webView) {
+                // Save the inline video info and inherit it in the full screen
+                int savePosition = 0;
+                boolean savedIsPlaying = false;
+                if (mHTML5VideoView != null) {
+                    // If we are playing the same video, then it is better to
+                    // save the current position.
+                    if (layerId == mHTML5VideoView.getVideoLayerId()) {
+                        savePosition = mHTML5VideoView.getCurrentPosition();
+                        savedIsPlaying = mHTML5VideoView.isPlaying();
+                    }
+                    mHTML5VideoView.pauseAndDispatch(mCurrentProxy);
+                    mHTML5VideoView.release();
+                }
+                mHTML5VideoView = new HTML5VideoFullScreen(proxy.getContext(),
+                        layerId, savePosition, savedIsPlaying);
+                mCurrentProxy = proxy;
+
+                mHTML5VideoView.setVideoURI(url, mCurrentProxy);
+
+                mHTML5VideoView.enterFullScreenVideoState(layerId, proxy, webView);
+        }
+
         // This is on the UI thread.
         // When native tell Java to play, we need to check whether or not it is
         // still the same video by using videoLayerId and treat it differently.
         public static void play(String url, int time, HTML5VideoViewProxy proxy,
                 WebChromeClient client, int videoLayerId) {
             int currentVideoLayerId = -1;
-            if (mHTML5VideoView != null)
-                currentVideoLayerId = mHTML5VideoView.getVideoLayerId();
+            boolean backFromFullScreenMode = false;
 
-            if (currentVideoLayerId != videoLayerId
+            if (mHTML5VideoView != null) {
+                currentVideoLayerId = mHTML5VideoView.getVideoLayerId();
+                if (mHTML5VideoView instanceof HTML5VideoFullScreen) {
+                    backFromFullScreenMode = true;
+                }
+            }
+
+            if (backFromFullScreenMode
+                ||  currentVideoLayerId != videoLayerId
                 || mHTML5VideoView.getSurfaceTexture() == null) {
                 // Here, we handle the case when switching to a new video,
                 // either inside a WebView or across WebViews
@@ -186,35 +180,11 @@
                     // release the media player to avoid finalize error
                     mHTML5VideoView.release();
                 }
-                // HTML5VideoView is singleton, however, the internal state will
-                // be reset since we are switching from one video to another.
-                // Then we need to set up all the source/listener etc...
-                mHTML5VideoView = HTML5VideoView.getInstance(videoLayerId);
-
                 mCurrentProxy = proxy;
+                mHTML5VideoView = new HTML5VideoInline(videoLayerId, time, false);
 
-                // TODO: [FULL SCREEN SUPPORT]
-
-                boolean isPrivate = mCurrentProxy.getWebView().isPrivateBrowsingEnabled();
-                String cookieValue = CookieManager.getInstance().getCookie(url, isPrivate);
-                Map<String, String> headers = new HashMap<String, String>();
-                if (cookieValue != null) {
-                    headers.put(COOKIE, cookieValue);
-                }
-                if (isPrivate) {
-                    headers.put(HIDE_URL_LOGS, "true");
-                }
-
-                mHTML5VideoView.setVideoURI(url, headers);
-                mHTML5VideoView.setOnCompletionListener(proxy);
-                mHTML5VideoView.setOnPreparedListener(proxy);
-                mHTML5VideoView.setOnErrorListener(proxy);
-                mHTML5VideoView.setFrameAvailableListener(proxy);
-
-                mHTML5VideoView.seekTo(time);
-
-                mTimer = new Timer();
-
+                mHTML5VideoView.setVideoURI(url, mCurrentProxy);
+                mHTML5VideoView.prepareDataAndDisplayMode(proxy);
             } else if (mCurrentProxy == proxy) {
                 // Here, we handle the case when we keep playing with one video
                 if (!mHTML5VideoView.isPlaying()) {
@@ -222,7 +192,8 @@
                     mHTML5VideoView.start();
                 }
             } else if (mCurrentProxy != null) {
-                // Some other video is already playing. Notify the caller that its playback ended.
+                // Some other video is already playing. Notify the caller that
+                // its playback ended.
                 proxy.dispatchOnEnded();
             }
         }
@@ -249,14 +220,14 @@
         public static void pause(HTML5VideoViewProxy proxy) {
             if (mCurrentProxy == proxy && mHTML5VideoView != null) {
                 mHTML5VideoView.pause();
-                mTimer.purge();
             }
         }
 
         public static void onPrepared() {
-            mHTML5VideoView.start();
-            mTimer.schedule(new TimeupdateTask(mCurrentProxy), TIMEUPDATE_PERIOD, TIMEUPDATE_PERIOD);
-            // TODO: [FULL SCREEN SUPPORT]
+            if (!mHTML5VideoView.isFullScreenMode() ||
+                    mHTML5VideoView.isFullScreenMode() &&
+                    mHTML5VideoView.getAutostart() )
+                mHTML5VideoView.start();
         }
 
         public static void end() {
@@ -349,8 +320,6 @@
                     VideoPlayer.isVideoSelfEnded = true;
                 VideoPlayer.end();
                 break;
-                // TODO: [FULL SCREEN SUPPORT]
-                // For full screen case, end may need hide the view.
             case ERROR: {
                 WebChromeClient client = mWebView.getWebChromeClient();
                 if (client != null) {
@@ -665,7 +634,7 @@
         mPosterDownloader.start();
     }
 
-    // These two function are called from UI thread only by WebView.
+    // These three function are called from UI thread only by WebView.
     public void setBaseLayer(int layer) {
         VideoPlayer.setBaseLayer(layer);
     }
@@ -673,6 +642,11 @@
     public void pauseAndDispatch() {
         VideoPlayer.pauseAndDispatch();
     }
+
+    public void enterFullScreenVideo(int layerId, String url) {
+        VideoPlayer.enterFullScreenVideo(layerId, url, this, mWebView);
+    }
+
     /**
      * The factory for HTML5VideoViewProxy instances.
      * @param webViewCore is the WebViewCore that is requesting the proxy.
diff --git a/core/java/android/webkit/WebView.java b/core/java/android/webkit/WebView.java
index dfdae53..0c1e39e 100644
--- a/core/java/android/webkit/WebView.java
+++ b/core/java/android/webkit/WebView.java
@@ -1356,10 +1356,10 @@
         return mTitleBar != null ? mTitleBar.getHeight() : 0;
     }
 
-    /*
+    /**
      * Return the amount of the titlebarview (if any) that is visible
      *
-     * @hide
+     * @deprecated This method is now obsolete.
      */
     public int getVisibleTitleHeight() {
         // need to restrict mScrollY due to over scroll
@@ -2102,7 +2102,7 @@
     public void clearView() {
         mContentWidth = 0;
         mContentHeight = 0;
-        setBaseLayer(0, null, false);
+        setBaseLayer(0, null, false, false);
         mWebViewCore.sendMessage(EventHub.CLEAR_CONTENT);
     }
 
@@ -4030,10 +4030,12 @@
         }
     }
 
-    void setBaseLayer(int layer, Region invalRegion, boolean showVisualIndicator) {
+    void setBaseLayer(int layer, Region invalRegion, boolean showVisualIndicator,
+            boolean isPictureAfterFirstLayout) {
         if (mNativeClass == 0)
             return;
-        nativeSetBaseLayer(layer, invalRegion, showVisualIndicator);
+        nativeSetBaseLayer(layer, invalRegion, showVisualIndicator,
+                isPictureAfterFirstLayout);
         if (mHTML5VideoViewProxy != null) {
             mHTML5VideoViewProxy.setBaseLayer(layer);
         }
@@ -4119,8 +4121,11 @@
         if (mNativeClass != 0 && nativeEvaluateLayersAnimations()) {
             UIAnimationsRunning = true;
             // If we have unfinished (or unstarted) animations,
-            // we ask for a repaint.
-            invalidate();
+            // we ask for a repaint. We only need to do this in software
+            // rendering (with hardware rendering we already have a different
+            // method of requesting a repaint)
+            if (!canvas.isHardwareAccelerated())
+                invalidate();
         }
 
         // decide which adornments to draw
@@ -7165,14 +7170,15 @@
     private class TouchEventQueue {
         private long mNextTouchSequence = Long.MIN_VALUE + 1;
         private long mLastHandledTouchSequence = Long.MIN_VALUE;
-        private long mIgnoreUntilSequence = Long.MIN_VALUE;
+        private long mIgnoreUntilSequence = Long.MIN_VALUE + 1;
         private QueuedTouch mTouchEventQueue;
         private QueuedTouch mQueuedTouchRecycleBin;
         private int mQueuedTouchRecycleCount;
+        private long mLastEventTime = Long.MAX_VALUE;
         private static final int MAX_RECYCLED_QUEUED_TOUCH = 15;
 
         // milliseconds until we abandon hope of getting all of a previous gesture
-        private static final int QUEUED_GESTURE_TIMEOUT = 2000;
+        private static final int QUEUED_GESTURE_TIMEOUT = 1000;
 
         private QueuedTouch obtainQueuedTouch() {
             if (mQueuedTouchRecycleBin != null) {
@@ -7206,7 +7212,7 @@
         public void reset() {
             mNextTouchSequence = Long.MIN_VALUE + 1;
             mLastHandledTouchSequence = Long.MIN_VALUE;
-            mIgnoreUntilSequence = Long.MIN_VALUE;
+            mIgnoreUntilSequence = Long.MIN_VALUE + 1;
             while (mTouchEventQueue != null) {
                 QueuedTouch recycleMe = mTouchEventQueue;
                 mTouchEventQueue = mTouchEventQueue.mNext;
@@ -7240,7 +7246,9 @@
                 return;
             }
 
-            dropStaleGestures(ted.mMotionEvent, ted.mSequence);
+            if (dropStaleGestures(ted.mMotionEvent, ted.mSequence)) {
+                return;
+            }
 
             if (mLastHandledTouchSequence + 1 == ted.mSequence) {
                 handleQueuedTouchEventData(ted);
@@ -7275,7 +7283,9 @@
         public void enqueueTouchEvent(MotionEvent ev) {
             final long sequence = nextTouchSequence();
 
-            dropStaleGestures(ev, sequence);
+            if (dropStaleGestures(ev, sequence)) {
+                return;
+            }
 
             if (mLastHandledTouchSequence + 1 == sequence) {
                 handleQueuedMotionEvent(ev);
@@ -7298,16 +7308,30 @@
             }
         }
 
-        private void dropStaleGestures(MotionEvent ev, long sequence) {
-            if (mTouchEventQueue == null) return;
+        private boolean dropStaleGestures(MotionEvent ev, long sequence) {
+            if (ev != null && ev.getAction() == MotionEvent.ACTION_MOVE && !mConfirmMove) {
+                // This is to make sure that we don't attempt to process a tap
+                // or long press when webkit takes too long to get back to us.
+                // The movement will be properly confirmed when we process the
+                // enqueued event later.
+                final int dx = Math.round(ev.getX()) - mLastTouchX;
+                final int dy = Math.round(ev.getY()) - mLastTouchY;
+                if (dx * dx + dy * dy > mTouchSlopSquare) {
+                    mPrivateHandler.removeMessages(SWITCH_TO_SHORTPRESS);
+                    mPrivateHandler.removeMessages(SWITCH_TO_LONGPRESS);
+                }
+            }
 
-            MotionEvent nextQueueEvent = mTouchEventQueue.mTed != null ?
-                    mTouchEventQueue.mTed.mMotionEvent : mTouchEventQueue.mEvent;
+            if (mTouchEventQueue == null) {
+                return sequence <= mLastHandledTouchSequence;
+            }
 
-            if (ev != null && ev.getAction() == MotionEvent.ACTION_DOWN && nextQueueEvent != null) {
+            // If we have a new down event and it's been a while since the last event
+            // we saw, just reset and keep going.
+            if (ev != null && ev.getAction() == MotionEvent.ACTION_DOWN) {
                 long eventTime = ev.getEventTime();
-                long nextQueueTime = nextQueueEvent.getEventTime();
-                if (eventTime > nextQueueTime + QUEUED_GESTURE_TIMEOUT) {
+                long lastHandledEventTime = mLastEventTime;
+                if (eventTime > lastHandledEventTime + QUEUED_GESTURE_TIMEOUT) {
                     Log.w(LOGTAG, "Got ACTION_DOWN but still waiting on stale event. " +
                             "Ignoring previous queued events.");
                     QueuedTouch qd = mTouchEventQueue;
@@ -7321,17 +7345,18 @@
                 }
             }
 
-            if (mIgnoreUntilSequence > mLastHandledTouchSequence) {
+            if (mIgnoreUntilSequence - 1 > mLastHandledTouchSequence) {
                 QueuedTouch qd = mTouchEventQueue;
-                while (qd != null && qd.mSequence < mIgnoreUntilSequence &&
-                        qd.mSequence < sequence) {
-                    mLastHandledTouchSequence = qd.mSequence;
+                while (qd != null && qd.mSequence < mIgnoreUntilSequence) {
                     QueuedTouch recycleMe = qd;
                     qd = qd.mNext;
                     recycleQueuedTouch(recycleMe);
                 }
                 mTouchEventQueue = qd;
+                mLastHandledTouchSequence = mIgnoreUntilSequence - 1;
             }
+
+            return sequence <= mLastHandledTouchSequence;
         }
 
         private void handleQueuedTouch(QueuedTouch qt) {
@@ -7344,6 +7369,7 @@
         }
 
         private void handleQueuedMotionEvent(MotionEvent ev) {
+            mLastEventTime = ev.getEventTime();
             int action = ev.getActionMasked();
             if (ev.getPointerCount() > 1) {  // Multi-touch
                 handleMultiTouchInWebView(ev);
@@ -7361,6 +7387,9 @@
         }
 
         private void handleQueuedTouchEventData(TouchEventData ted) {
+            if (ted.mMotionEvent != null) {
+                mLastEventTime = ted.mMotionEvent.getEventTime();
+            }
             if (!ted.mReprocess) {
                 if (ted.mAction == MotionEvent.ACTION_DOWN
                         && mPreventDefault == PREVENT_DEFAULT_MAYBE_YES) {
@@ -7620,11 +7649,12 @@
                 case NEW_PICTURE_MSG_ID: {
                     // called for new content
                     final WebViewCore.DrawData draw = (WebViewCore.DrawData) msg.obj;
-                    setBaseLayer(draw.mBaseLayer, draw.mInvalRegion,
-                            getSettings().getShowVisualIndicator());
-                    final Point viewSize = draw.mViewSize;
                     WebViewCore.ViewState viewState = draw.mViewState;
                     boolean isPictureAfterFirstLayout = viewState != null;
+                    setBaseLayer(draw.mBaseLayer, draw.mInvalRegion,
+                            getSettings().getShowVisualIndicator(),
+                            isPictureAfterFirstLayout);
+                    final Point viewSize = draw.mViewSize;
                     if (isPictureAfterFirstLayout) {
                         // Reset the last sent data here since dealing with new page.
                         mLastWidthSent = 0;
@@ -7842,7 +7872,11 @@
 
                 case ENTER_FULLSCREEN_VIDEO:
                     int layerId = msg.arg1;
-                    Log.v(LOGTAG, "Display the video layer " + layerId + " fullscreen");
+
+                    String url = (String) msg.obj;
+                    if (mHTML5VideoViewProxy != null) {
+                        mHTML5VideoViewProxy.enterFullScreenVideo(layerId, url);
+                    }
                     break;
 
                 case SHOW_FULLSCREEN: {
@@ -8679,7 +8713,7 @@
     private native void     nativeSetFindIsUp(boolean isUp);
     private native void     nativeSetHeightCanMeasure(boolean measure);
     private native void     nativeSetBaseLayer(int layer, Region invalRegion,
-            boolean showVisualIndicator);
+            boolean showVisualIndicator, boolean isPictureAfterFirstLayout);
     private native void     nativeShowCursorTimed();
     private native void     nativeReplaceBaseContent(int content);
     private native void     nativeCopyBaseContentToPicture(Picture pict);
diff --git a/core/java/android/webkit/WebViewCore.java b/core/java/android/webkit/WebViewCore.java
index bed77ef..3b989dc 100644
--- a/core/java/android/webkit/WebViewCore.java
+++ b/core/java/android/webkit/WebViewCore.java
@@ -483,10 +483,12 @@
     /**
      * Notify the webview that we want to display the video layer fullscreen.
      */
-    protected void enterFullscreenForVideoLayer(int layerId) {
+    protected void enterFullscreenForVideoLayer(int layerId, String url) {
         if (mWebView == null) return;
-        Message.obtain(mWebView.mPrivateHandler,
-                       WebView.ENTER_FULLSCREEN_VIDEO, layerId, 0).sendToTarget();
+        Message message = Message.obtain(mWebView.mPrivateHandler,
+                       WebView.ENTER_FULLSCREEN_VIDEO, layerId, 0);
+        message.obj = url;
+        message.sendToTarget();
     }
 
     //-------------------------------------------------------------------------
diff --git a/core/res/res/values/attrs.xml b/core/res/res/values/attrs.xml
index 71a8b2a..c81f8c0 100755
--- a/core/res/res/values/attrs.xml
+++ b/core/res/res/values/attrs.xml
@@ -1374,6 +1374,9 @@
         <enum name="KEYCODE_BUTTON_14" value="201" />
         <enum name="KEYCODE_BUTTON_15" value="202" />
         <enum name="KEYCODE_BUTTON_16" value="203" />
+        <enum name="KEYCODE_LANGUAGE_SWITCH" value="204" />
+        <enum name="KEYCODE_MANNER_MODE" value="205" />
+        <enum name="KEYCODE_3D_MODE" value="206" />
     </attr>
 
     <!-- ***************************************************************** -->
diff --git a/drm/common/DrmInfoEvent.cpp b/drm/common/DrmInfoEvent.cpp
index 8d115a8..27a5a2d 100644
--- a/drm/common/DrmInfoEvent.cpp
+++ b/drm/common/DrmInfoEvent.cpp
@@ -19,7 +19,7 @@
 
 using namespace android;
 
-DrmInfoEvent::DrmInfoEvent(int uniqueId, int infoType, const String8& message)
+DrmInfoEvent::DrmInfoEvent(int uniqueId, int infoType, const String8 message)
     : mUniqueId(uniqueId),
       mInfoType(infoType),
       mMessage(message) {
@@ -34,7 +34,7 @@
     return mInfoType;
 }
 
-const String8& DrmInfoEvent::getMessage() const {
+const String8 DrmInfoEvent::getMessage() const {
     return mMessage;
 }
 
diff --git a/drm/java/android/drm/DrmErrorEvent.java b/drm/java/android/drm/DrmErrorEvent.java
index 90adb47f..7cc9a87 100644
--- a/drm/java/android/drm/DrmErrorEvent.java
+++ b/drm/java/android/drm/DrmErrorEvent.java
@@ -16,6 +16,8 @@
 
 package android.drm;
 
+import java.util.HashMap;
+
 /**
  * This is an entity class which would be passed to caller in
  * {@link DrmManagerClient.OnErrorListener#onError(DrmManagerClient, DrmErrorEvent)}
@@ -62,11 +64,25 @@
      * constructor to create DrmErrorEvent object with given parameters
      *
      * @param uniqueId Unique session identifier
-     * @param type Type of information
+     * @param type Type of the event. It could be one of the types defined above
      * @param message Message description
      */
     public DrmErrorEvent(int uniqueId, int type, String message) {
         super(uniqueId, type, message);
     }
+
+    /**
+     * constructor to create DrmErrorEvent object with given parameters
+     *
+     * @param uniqueId Unique session identifier
+     * @param type Type of the event. It could be one of the types defined above
+     * @param message Message description
+     * @param attributes Attributes for extensible information. Could be any
+     * information provided by the plugin
+     */
+    public DrmErrorEvent(int uniqueId, int type, String message,
+                            HashMap<String, Object> attributes) {
+        super(uniqueId, type, message, attributes);
+    }
 }
 
diff --git a/drm/java/android/drm/DrmEvent.java b/drm/java/android/drm/DrmEvent.java
index f7bc5cd..eba458b 100644
--- a/drm/java/android/drm/DrmEvent.java
+++ b/drm/java/android/drm/DrmEvent.java
@@ -16,6 +16,8 @@
 
 package android.drm;
 
+import java.util.HashMap;
+
 /**
  * This is the base class which would be used to notify the caller
  * about any event occurred in DRM framework.
@@ -33,11 +35,36 @@
     public static final int TYPE_DRM_INFO_PROCESSED = 1002;
 
     public static final String DRM_INFO_STATUS_OBJECT = "drm_info_status_object";
+    public static final String DRM_INFO_OBJECT = "drm_info_object";
 
     private final int mUniqueId;
     private final int mType;
     private String mMessage = "";
 
+    private HashMap<String, Object> mAttributes = new HashMap<String, Object>();
+
+    /**
+     * constructor for DrmEvent class
+     *
+     * @param uniqueId Unique session identifier
+     * @param type Type of information
+     * @param message Message description
+     * @param attributes Attributes for extensible information
+     */
+    protected DrmEvent(int uniqueId, int type, String message,
+                            HashMap<String, Object> attributes) {
+        mUniqueId = uniqueId;
+        mType = type;
+
+        if (null != message) {
+            mMessage = message;
+        }
+
+        if (null != attributes) {
+            mAttributes = attributes;
+        }
+    }
+
     /**
      * constructor for DrmEvent class
      *
@@ -80,5 +107,15 @@
     public String getMessage() {
         return mMessage;
     }
+
+    /**
+     * Returns the attribute corresponding to the specified key
+     *
+     * @return one of the attributes or null if no mapping for
+     * the key is found
+     */
+    public Object getAttribute(String key) {
+        return mAttributes.get(key);
+    }
 }
 
diff --git a/drm/java/android/drm/DrmInfoEvent.java b/drm/java/android/drm/DrmInfoEvent.java
index 72f37ea..190199a 100644
--- a/drm/java/android/drm/DrmInfoEvent.java
+++ b/drm/java/android/drm/DrmInfoEvent.java
@@ -16,6 +16,8 @@
 
 package android.drm;
 
+import java.util.HashMap;
+
 /**
  * This is an entity class which would be passed to caller in
  * {@link DrmManagerClient.OnInfoListener#onInfo(DrmManagerClient, DrmInfoEvent)}
@@ -54,11 +56,25 @@
      * constructor to create DrmInfoEvent object with given parameters
      *
      * @param uniqueId Unique session identifier
-     * @param type Type of information
+     * @param type Type of the event. It could be one of the types defined above
      * @param message Message description
      */
     public DrmInfoEvent(int uniqueId, int type, String message) {
         super(uniqueId, type, message);
     }
+
+    /**
+     * constructor to create DrmInfoEvent object with given parameters
+     *
+     * @param uniqueId Unique session identifier
+     * @param type Type of the event. It could be one of the types defined above
+     * @param message Message description
+     * @param attributes Attributes for extensible information. Could be any
+     * information provided by the plugin
+     */
+    public DrmInfoEvent(int uniqueId, int type, String message,
+                            HashMap<String, Object> attributes) {
+        super(uniqueId, type, message, attributes);
+    }
 }
 
diff --git a/drm/java/android/drm/DrmManagerClient.java b/drm/java/android/drm/DrmManagerClient.java
index aa56159..f7479b5 100644
--- a/drm/java/android/drm/DrmManagerClient.java
+++ b/drm/java/android/drm/DrmManagerClient.java
@@ -81,10 +81,8 @@
          *
          * @param client DrmManagerClient instance
          * @param event instance which wraps type and message
-         * @param attributes resultant values in key and value pair.
          */
-        public void onEvent(DrmManagerClient client, DrmEvent event,
-                HashMap<String, Object> attributes);
+        public void onEvent(DrmManagerClient client, DrmEvent event);
     }
 
     /**
@@ -128,12 +126,17 @@
             case ACTION_PROCESS_DRM_INFO: {
                 final DrmInfo drmInfo = (DrmInfo) msg.obj;
                 DrmInfoStatus status = _processDrmInfo(mUniqueId, drmInfo);
+
+                attributes.put(DrmEvent.DRM_INFO_STATUS_OBJECT, status);
+                attributes.put(DrmEvent.DRM_INFO_OBJECT, drmInfo);
+
                 if (null != status && DrmInfoStatus.STATUS_OK == status.statusCode) {
-                    attributes.put(DrmEvent.DRM_INFO_STATUS_OBJECT, status);
-                    event = new DrmEvent(mUniqueId, getEventType(status.infoType), null);
+                    event = new DrmEvent(mUniqueId,
+                            getEventType(status.infoType), null, attributes);
                 } else {
                     int infoType = (null != status) ? status.infoType : drmInfo.getInfoType();
-                    error = new DrmErrorEvent(mUniqueId, getErrorType(infoType), null);
+                    error = new DrmErrorEvent(mUniqueId,
+                            getErrorType(infoType), null, attributes);
                 }
                 break;
             }
@@ -151,7 +154,7 @@
                 return;
             }
             if (null != mOnEventListener && null != event) {
-                mOnEventListener.onEvent(DrmManagerClient.this, event, attributes);
+                mOnEventListener.onEvent(DrmManagerClient.this, event);
             }
             if (null != mOnErrorListener && null != error) {
                 mOnErrorListener.onError(DrmManagerClient.this, error);
diff --git a/graphics/java/android/renderscript/BaseObj.java b/graphics/java/android/renderscript/BaseObj.java
index 669beac..8ce1d9a 100644
--- a/graphics/java/android/renderscript/BaseObj.java
+++ b/graphics/java/android/renderscript/BaseObj.java
@@ -75,11 +75,17 @@
      * @param name The name to assign to the object.
      */
     public void setName(String name) {
+        if (name == null) {
+            throw new RSIllegalArgumentException(
+                "setName requires a string of non-zero length.");
+        }
         if(name.length() < 1) {
-            throw new RSIllegalArgumentException("setName does not accept a zero length string.");
+            throw new RSIllegalArgumentException(
+                "setName does not accept a zero length string.");
         }
         if(mName != null) {
-            throw new RSIllegalArgumentException("setName object already has a name.");
+            throw new RSIllegalArgumentException(
+                "setName object already has a name.");
         }
 
         try {
@@ -106,9 +112,9 @@
     }
 
     /**
-     * destroy disconnects the object from the native object effectivly
+     * destroy disconnects the object from the native object effectively
      * rendering this java object dead.  The primary use is to force immediate
-     * cleanup of resources when its believed the GC will not respond quickly
+     * cleanup of resources when it is believed the GC will not respond quickly
      * enough.
      */
     synchronized public void destroy() {
diff --git a/graphics/java/android/renderscript/RenderScript.java b/graphics/java/android/renderscript/RenderScript.java
index 1b10c5c..b51279a 100644
--- a/graphics/java/android/renderscript/RenderScript.java
+++ b/graphics/java/android/renderscript/RenderScript.java
@@ -83,17 +83,17 @@
                  int alphaMin, int alphaPref,
                  int depthMin, int depthPref,
                  int stencilMin, int stencilPref,
-                 int samplesMin, int samplesPref, float samplesQ);
+                 int samplesMin, int samplesPref, float samplesQ, int dpi);
     synchronized int nContextCreateGL(int dev, int ver,
                  int colorMin, int colorPref,
                  int alphaMin, int alphaPref,
                  int depthMin, int depthPref,
                  int stencilMin, int stencilPref,
-                 int samplesMin, int samplesPref, float samplesQ) {
+                 int samplesMin, int samplesPref, float samplesQ, int dpi) {
         return rsnContextCreateGL(dev, ver, colorMin, colorPref,
                                   alphaMin, alphaPref, depthMin, depthPref,
                                   stencilMin, stencilPref,
-                                  samplesMin, samplesPref, samplesQ);
+                                  samplesMin, samplesPref, samplesQ, dpi);
     }
     native int  rsnContextCreate(int dev, int ver);
     synchronized int nContextCreate(int dev, int ver) {
diff --git a/graphics/java/android/renderscript/RenderScriptGL.java b/graphics/java/android/renderscript/RenderScriptGL.java
index 4359795..d4b5434 100644
--- a/graphics/java/android/renderscript/RenderScriptGL.java
+++ b/graphics/java/android/renderscript/RenderScriptGL.java
@@ -165,13 +165,14 @@
         mWidth = 0;
         mHeight = 0;
         mDev = nDeviceCreate();
+        int dpi = ctx.getResources().getDisplayMetrics().densityDpi;
         mContext = nContextCreateGL(mDev, 0,
                                     mSurfaceConfig.mColorMin, mSurfaceConfig.mColorPref,
                                     mSurfaceConfig.mAlphaMin, mSurfaceConfig.mAlphaPref,
                                     mSurfaceConfig.mDepthMin, mSurfaceConfig.mDepthPref,
                                     mSurfaceConfig.mStencilMin, mSurfaceConfig.mStencilPref,
                                     mSurfaceConfig.mSamplesMin, mSurfaceConfig.mSamplesPref,
-                                    mSurfaceConfig.mSamplesQ);
+                                    mSurfaceConfig.mSamplesQ, dpi);
         if (mContext == 0) {
             throw new RSDriverException("Failed to create RS context.");
         }
diff --git a/graphics/jni/android_renderscript_RenderScript.cpp b/graphics/jni/android_renderscript_RenderScript.cpp
index f86343a..2afd74c 100644
--- a/graphics/jni/android_renderscript_RenderScript.cpp
+++ b/graphics/jni/android_renderscript_RenderScript.cpp
@@ -164,7 +164,8 @@
                  int alphaMin, int alphaPref,
                  int depthMin, int depthPref,
                  int stencilMin, int stencilPref,
-                 int samplesMin, int samplesPref, float samplesQ)
+                 int samplesMin, int samplesPref, float samplesQ,
+                 int dpi)
 {
     RsSurfaceConfig sc;
     sc.alphaMin = alphaMin;
@@ -178,7 +179,7 @@
     sc.samplesQ = samplesQ;
 
     LOG_API("nContextCreateGL");
-    return (jint)rsContextCreateGL((RsDevice)dev, ver, sc);
+    return (jint)rsContextCreateGL((RsDevice)dev, ver, sc, dpi);
 }
 
 static void
@@ -1213,7 +1214,7 @@
 
 // All methods below are thread protected in java.
 {"rsnContextCreate",                 "(II)I",                                 (void*)nContextCreate },
-{"rsnContextCreateGL",               "(IIIIIIIIIIIIF)I",                      (void*)nContextCreateGL },
+{"rsnContextCreateGL",               "(IIIIIIIIIIIIFI)I",                     (void*)nContextCreateGL },
 {"rsnContextFinish",                 "(I)V",                                  (void*)nContextFinish },
 {"rsnContextSetPriority",            "(II)V",                                 (void*)nContextSetPriority },
 {"rsnContextSetSurface",             "(IIILandroid/view/Surface;)V",          (void*)nContextSetSurface },
diff --git a/include/drm/DrmInfoEvent.h b/include/drm/DrmInfoEvent.h
index add33d3..dfca228 100644
--- a/include/drm/DrmInfoEvent.h
+++ b/include/drm/DrmInfoEvent.h
@@ -77,7 +77,7 @@
      * @param[in] infoType Type of information
      * @param[in] message Message description
      */
-    DrmInfoEvent(int uniqueId, int infoType, const String8& message);
+    DrmInfoEvent(int uniqueId, int infoType, const String8 message);
 
     /**
      * Destructor for DrmInfoEvent
@@ -104,12 +104,12 @@
      *
      * @return Message description
      */
-    const String8& getMessage() const;
+    const String8 getMessage() const;
 
 private:
     int mUniqueId;
     int mInfoType;
-    const String8& mMessage;
+    const String8 mMessage;
 };
 
 };
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index a969796..f13e9bb 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #ifndef A_CODEC_H_
 
 #define A_CODEC_H_
diff --git a/include/media/stagefright/MediaDebug.h b/include/media/stagefright/MediaDebug.h
index c8a8f00e7..2ca9667 100644
--- a/include/media/stagefright/MediaDebug.h
+++ b/include/media/stagefright/MediaDebug.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #ifndef MEDIA_DEBUG_H_
 
 #define MEDIA_DEBUG_H_
diff --git a/include/media/stagefright/foundation/AHandlerReflector.h b/include/media/stagefright/foundation/AHandlerReflector.h
index 857866a..9d201b5 100644
--- a/include/media/stagefright/foundation/AHandlerReflector.h
+++ b/include/media/stagefright/foundation/AHandlerReflector.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #ifndef A_HANDLER_REFLECTOR_H_
 
 #define A_HANDLER_REFLECTOR_H_
diff --git a/include/media/stagefright/foundation/AHierarchicalStateMachine.h b/include/media/stagefright/foundation/AHierarchicalStateMachine.h
index b5786fb..d2e6b28 100644
--- a/include/media/stagefright/foundation/AHierarchicalStateMachine.h
+++ b/include/media/stagefright/foundation/AHierarchicalStateMachine.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #ifndef A_HIERARCHICAL_STATE_MACHINE_H_
 
 #define A_HIERARCHICAL_STATE_MACHINE_H_
diff --git a/include/private/hwui/DrawGlInfo.h b/include/private/hwui/DrawGlInfo.h
new file mode 100644
index 0000000..1e9912b
--- /dev/null
+++ b/include/private/hwui/DrawGlInfo.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HWUI_DRAW_GL_INFO_H
+#define ANDROID_HWUI_DRAW_GL_INFO_H
+
+namespace android {
+namespace uirenderer {
+
+/**
+ * Structure used by OpenGLRenderer::callDrawGLFunction() to pass and
+ * receive data from OpenGL functors.
+ */
+struct DrawGlInfo {
+    // Input: current clip rect
+    int clipLeft;
+    int clipTop;
+    int clipRight;
+    int clipBottom;
+
+    // Input: is the render target an FBO
+    bool isLayer;
+
+    // Input: current transform matrix, in OpenGL format
+    float transform[16];
+
+    // Output: dirty region to redraw
+    float dirtyLeft;
+    float dirtyTop;
+    float dirtyRight;
+    float dirtyBottom;
+}; // struct DrawGlInfo
+
+}; // namespace uirenderer
+}; // namespace android
+
+#endif // ANDROID_HWUI_DRAW_GL_INFO_H
diff --git a/include/ui/KeycodeLabels.h b/include/ui/KeycodeLabels.h
index b912e9b..8383957 100755
--- a/include/ui/KeycodeLabels.h
+++ b/include/ui/KeycodeLabels.h
@@ -228,6 +228,9 @@
     { "BUTTON_14", 201 },
     { "BUTTON_15", 202 },
     { "BUTTON_16", 203 },
+    { "LANGUAGE_SWITCH", 204 },
+    { "MANNER_MODE", 205 },
+    { "3D_MODE", 206 },
 
     // NOTE: If you add a new keycode here you must also add it to several other files.
     //       Refer to frameworks/base/core/java/android/view/KeyEvent.java for the full list.
diff --git a/include/utils/Functor.h b/include/utils/Functor.h
index 565f4a3..e24ded4 100644
--- a/include/utils/Functor.h
+++ b/include/utils/Functor.h
@@ -25,8 +25,7 @@
 public:
     Functor() {}
     virtual ~Functor() {}
-    virtual status_t operator ()() { return true; }
-    virtual status_t operator ()(float* data, uint32_t len) { return true; }
+    virtual status_t operator ()(int what, void* data) { return NO_ERROR; }
 };
 
 }; // namespace android
diff --git a/include/utils/Vector.h b/include/utils/Vector.h
index ec851bd..6fd307f 100644
--- a/include/utils/Vector.h
+++ b/include/utils/Vector.h
@@ -162,6 +162,9 @@
      inline status_t        sort(compar_t cmp);
      inline status_t        sort(compar_r_t cmp, void* state);
 
+     // for debugging only
+     inline size_t getItemSize() const { return itemSize(); }
+
 protected:
     virtual void    do_construct(void* storage, size_t num) const;
     virtual void    do_destroy(void* storage, size_t num) const;
diff --git a/libs/hwui/OpenGLRenderer.cpp b/libs/hwui/OpenGLRenderer.cpp
index e01e072..d9d7d23 100644
--- a/libs/hwui/OpenGLRenderer.cpp
+++ b/libs/hwui/OpenGLRenderer.cpp
@@ -26,6 +26,8 @@
 #include <utils/Log.h>
 #include <utils/StopWatch.h>
 
+#include <private/hwui/DrawGlInfo.h>
+
 #include <ui/Rect.h>
 
 #include "OpenGLRenderer.h"
@@ -216,21 +218,29 @@
         setScissorFromClip();
     }
 
+    Rect clip(*mSnapshot->clipRect);
+    clip.snapToPixelBoundaries();
+
 #if RENDER_LAYERS_AS_REGIONS
     // Since we don't know what the functor will draw, let's dirty
     // tne entire clip region
     if (hasLayer()) {
-        Rect clip(*mSnapshot->clipRect);
-        clip.snapToPixelBoundaries();
         dirtyLayerUnchecked(clip, getRegion());
     }
 #endif
 
-    float bounds[4];
-    status_t result = (*functor)(&bounds[0], 4);
+    DrawGlInfo info;
+    info.clipLeft = clip.left;
+    info.clipTop = clip.top;
+    info.clipRight = clip.right;
+    info.clipBottom = clip.bottom;
+    info.isLayer = hasLayer();
+    getSnapshot()->transform->copyTo(&info.transform[0]);
+
+    status_t result = (*functor)(0, &info);
 
     if (result != 0) {
-        Rect localDirty(bounds[0], bounds[1], bounds[2], bounds[3]);
+        Rect localDirty(info.dirtyLeft, info.dirtyTop, info.dirtyRight, info.dirtyBottom);
         dirty.unionWith(localDirty);
     }
 
diff --git a/libs/rs/RenderScript.h b/libs/rs/RenderScript.h
index bb5e4aa..ffa9a8c 100644
--- a/libs/rs/RenderScript.h
+++ b/libs/rs/RenderScript.h
@@ -76,7 +76,8 @@
 void rsDeviceSetConfig(RsDevice, RsDeviceParam, int32_t value);
 
 RsContext rsContextCreate(RsDevice, uint32_t version);
-RsContext rsContextCreateGL(RsDevice, uint32_t version, RsSurfaceConfig sc);
+RsContext rsContextCreateGL(RsDevice, uint32_t version,
+                            RsSurfaceConfig sc, uint32_t dpi);
 void rsContextDestroy(RsContext);
 
 enum RsMessageToClientType {
diff --git a/libs/rs/rsContext.cpp b/libs/rs/rsContext.cpp
index f9e29f1..a7c0180 100644
--- a/libs/rs/rsContext.cpp
+++ b/libs/rs/rsContext.cpp
@@ -429,6 +429,8 @@
     mStateFont.getFontColor(&oldR, &oldG, &oldB, &oldA);
     uint32_t bufferLen = strlen(buffer);
 
+    ObjectBaseRef<Font> lastFont(getFont());
+    setFont(NULL);
     float shadowCol = 0.1f;
     mStateFont.setFontColor(shadowCol, shadowCol, shadowCol, 1.0f);
     mStateFont.renderText(buffer, bufferLen, 5, getHeight() - 6);
@@ -436,6 +438,7 @@
     mStateFont.setFontColor(1.0f, 0.7f, 0.0f, 1.0f);
     mStateFont.renderText(buffer, bufferLen, 4, getHeight() - 7);
 
+    setFont(lastFont.get());
     mStateFont.setFontColor(oldR, oldG, oldB, oldA);
 }
 
@@ -630,6 +633,7 @@
     mPaused = false;
     mObjHead = NULL;
     mError = RS_ERROR_NONE;
+    mDPI = 96;
 }
 
 Context * Context::createContext(Device *dev, const RsSurfaceConfig *sc) {
@@ -1078,10 +1082,12 @@
     return rsc;
 }
 
-RsContext rsContextCreateGL(RsDevice vdev, uint32_t version, RsSurfaceConfig sc) {
+RsContext rsContextCreateGL(RsDevice vdev, uint32_t version,
+                            RsSurfaceConfig sc, uint32_t dpi) {
     LOGV("rsContextCreateGL %p", vdev);
     Device * dev = static_cast<Device *>(vdev);
     Context *rsc = Context::createContext(dev, &sc);
+    rsc->setDPI(dpi);
     LOGV("rsContextCreateGL ret %p ", rsc);
     return rsc;
 }
diff --git a/libs/rs/rsContext.h b/libs/rs/rsContext.h
index c5e32a6..50f63df 100644
--- a/libs/rs/rsContext.h
+++ b/libs/rs/rsContext.h
@@ -218,6 +218,8 @@
 
     void launchThreads(WorkerCallback_t cbk, void *data);
     uint32_t getWorkerPoolSize() const {return (uint32_t)mWorkers.mCount;}
+    uint32_t getDPI() const {return mDPI;}
+    void setDPI(uint32_t dpi) {mDPI = dpi;}
 
 protected:
     Device *mDev;
@@ -258,6 +260,7 @@
         float EXT_texture_max_aniso;
     } mGL;
 
+    uint32_t mDPI;
     uint32_t mWidth;
     uint32_t mHeight;
     int32_t mThreadPriority;
diff --git a/libs/rs/rsFont.cpp b/libs/rs/rsFont.cpp
index 1c1bc98..01dbab8 100644
--- a/libs/rs/rsFont.cpp
+++ b/libs/rs/rsFont.cpp
@@ -733,7 +733,7 @@
             String8 fullPath(getenv("ANDROID_ROOT"));
             fullPath += fontsDir;
 
-            mDefault.set(Font::create(mRSC, fullPath.string(), 16, 96));
+            mDefault.set(Font::create(mRSC, fullPath.string(), 8, mRSC->getDPI()));
         }
         currentFont = mDefault.get();
     }
diff --git a/libs/ui/Region.cpp b/libs/ui/Region.cpp
index 1994f6a..a060a5f 100644
--- a/libs/ui/Region.cpp
+++ b/libs/ui/Region.cpp
@@ -56,6 +56,9 @@
 Region::Region(const Region& rhs)
     : mBounds(rhs.mBounds), mStorage(rhs.mStorage)
 {
+#if VALIDATE_REGIONS
+    validate(rhs, "rhs copy-ctor");
+#endif
 }
 
 Region::Region(const Rect& rhs)
@@ -76,7 +79,8 @@
 Region& Region::operator = (const Region& rhs)
 {
 #if VALIDATE_REGIONS
-    validate(rhs, "operator=");
+    validate(*this, "this->operator=");
+    validate(rhs, "rhs.operator=");
 #endif
     mBounds = rhs.mBounds;
     mStorage = rhs.mStorage;
@@ -366,6 +370,12 @@
         const Region& lhs,
         const Region& rhs, int dx, int dy)
 {
+#if VALIDATE_REGIONS
+    validate(lhs, "boolean_operation (before): lhs");
+    validate(rhs, "boolean_operation (before): rhs");
+    validate(dst, "boolean_operation (before): dst");
+#endif
+
     size_t lhs_count;
     Rect const * const lhs_rects = lhs.getArray(&lhs_count);
 
diff --git a/media/java/android/media/videoeditor/VideoEditorImpl.java b/media/java/android/media/videoeditor/VideoEditorImpl.java
index 78557ee..2105deb 100755
--- a/media/java/android/media/videoeditor/VideoEditorImpl.java
+++ b/media/java/android/media/videoeditor/VideoEditorImpl.java
@@ -904,6 +904,10 @@
             throw new IllegalArgumentException("Surface could not be retrieved from Surface holder");
         }
 
+        if (surface.isValid() == false) {
+            throw new IllegalStateException("Surface is not valid");
+        }
+
         if (timeMs < 0) {
             throw new IllegalArgumentException("requested time not correct");
         } else if (timeMs > mDurationMs) {
@@ -1627,6 +1631,10 @@
             throw new IllegalArgumentException("Surface could not be retrieved from surface holder");
         }
 
+        if (surface.isValid() == false) {
+            throw new IllegalStateException("Surface is not valid");
+        }
+
         if (listener == null) {
             throw new IllegalArgumentException();
         }
@@ -1863,6 +1871,10 @@
             throw new IllegalArgumentException("Surface could not be retrieved from surface holder");
         }
 
+        if (surface.isValid() == false) {
+            throw new IllegalStateException("Surface is not valid");
+        }
+
         if (mMANativeHelper != null) {
             mMANativeHelper.clearPreviewSurface(surface);
         } else {
diff --git a/media/java/android/mtp/MtpServer.java b/media/java/android/mtp/MtpServer.java
index fe734e1..006fa6d 100644
--- a/media/java/android/mtp/MtpServer.java
+++ b/media/java/android/mtp/MtpServer.java
@@ -24,6 +24,9 @@
  */
 public class MtpServer {
 
+    private final Object mLock = new Object();
+    private boolean mStarted;
+
     private static final String TAG = "MtpServer";
 
     static {
@@ -35,11 +38,19 @@
     }
 
     public void start() {
-        native_start();
+        synchronized (mLock) {
+            native_start();
+            mStarted = true;
+        }
     }
 
     public void stop() {
-        native_stop();
+        synchronized (mLock) {
+            if (mStarted) {
+                native_stop();
+                mStarted = false;
+            }
+        }
     }
 
     public void sendObjectAdded(int handle) {
diff --git a/media/jni/mediaeditor/VideoEditorMain.cpp b/media/jni/mediaeditor/VideoEditorMain.cpp
index 8cda14e..11e2a5e 100755
--- a/media/jni/mediaeditor/VideoEditorMain.cpp
+++ b/media/jni/mediaeditor/VideoEditorMain.cpp
@@ -557,6 +557,10 @@
 
     Surface* const p = (Surface*)pEnv->GetIntField(surface, surface_native);
     sp<Surface> previewSurface = sp<Surface>(p);
+    // Validate the mSurface's mNativeSurface field
+    videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+                                                (NULL == previewSurface.get()),
+                                                "mNativeSurface is null");
 
     frameStr.pBuffer = M4OSA_NULL;
     frameStr.timeMs = 0;
@@ -634,6 +638,10 @@
 
     Surface* const p = (Surface*)pEnv->GetIntField(mSurface, surface_native);
     sp<Surface> previewSurface = sp<Surface>(p);
+    // Validate the mSurface's mNativeSurface field
+    videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+                                                (NULL == previewSurface.get()),
+                                                "mNativeSurface is null");
 
     /* Determine the total number of clips, total duration*/
     uiNumberOfClipsInStoryBoard = pContext->pEditSettings->uiClipNumber;
@@ -2058,6 +2066,10 @@
     Surface* const p = (Surface*)pEnv->GetIntField(mSurface, surface_native);
 
     sp<Surface> previewSurface = sp<Surface>(p);
+    // Validate the mSurface's mNativeSurface field
+    videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+                                                (NULL == previewSurface.get()),
+                                                "mNativeSurface is null");
 
     result =  pContext->mPreviewController->setSurface(previewSurface);
     videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index d6a1757..d3aab08 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -1,3 +1,19 @@
+/*
+ * Copyright (c) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "IOMX"
 #include <utils/Log.h>
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index ca5bc38..e6f3a33 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -722,8 +722,10 @@
     profiles->mCamcorderProfiles.add(highTimeLapseProfile);
     profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
 
-    // We only have the back-facing camera support by default.
-    profiles->mCameraIds.add(0);
+    // For emulator and other legacy devices which does not have a
+    // media_profiles.xml file, We assume that the default camera id
+    // is 0 and that is the only camera available.
+    profiles->mCameraIds.push(0);
 }
 
 /*static*/ void
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index e277121..c5cbd23 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "StagefrightPlayer"
 #include <utils/Log.h>
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d590ab9..346d0bb 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACodec"
 
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 0de1988..7a00d7a 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -750,8 +750,6 @@
     mFlags |= PLAYING;
     mFlags |= FIRST_FRAME;
 
-    bool deferredAudioSeek = false;
-
     if (mDecryptHandle != NULL) {
         int64_t position;
         getPosition(&position);
@@ -767,10 +765,11 @@
 
                 mTimeSource = mAudioPlayer;
 
-                deferredAudioSeek = true;
-
-                mWatchForAudioSeekComplete = false;
-                mWatchForAudioEOS = true;
+                // If there was a seek request before we ever started,
+                // honor the request now.
+                // Make sure to do this before starting the audio player
+                // to avoid a race condition.
+                seekAudioIfNecessary_l();
             }
         }
 
@@ -808,12 +807,6 @@
         }
     }
 
-    if (deferredAudioSeek) {
-        // If there was a seek request while we were paused
-        // and we're just starting up again, honor the request now.
-        seekAudioIfNecessary_l();
-    }
-
     if (mFlags & AT_EOS) {
         // Legacy behaviour, if a stream finishes playing and then
         // is started again, we play from the start...
@@ -1126,7 +1119,6 @@
 
         mWatchForAudioSeekComplete = true;
         mWatchForAudioEOS = true;
-        mSeekNotificationSent = false;
 
         if (mDecryptHandle != NULL) {
             mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
@@ -1251,11 +1243,11 @@
         // If we're playing video only, report seek complete now,
         // otherwise audio player will notify us later.
         notifyListener_l(MEDIA_SEEK_COMPLETE);
+        mSeekNotificationSent = true;
     }
 
     mFlags |= FIRST_FRAME;
     mSeeking = NO_SEEK;
-    mSeekNotificationSent = false;
 
     if (mDecryptHandle != NULL) {
         mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
diff --git a/media/libstagefright/codecs/avc/common/include/avc_types.h b/media/libstagefright/codecs/avc/common/include/avc_types.h
index 73cad89..ec8b6de 100644
--- a/media/libstagefright/codecs/avc/common/include/avc_types.h
+++ b/media/libstagefright/codecs/avc/common/include/avc_types.h
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 #ifndef AVC_TYPES_H_
 
 #define AVC_TYPES_H_
diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
index 30286d8..3b3f786 100644
--- a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
+++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #include <media/stagefright/foundation/AHierarchicalStateMachine.h>
 
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index efe7496..fe74a42 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 #ifndef MPEG2_TS_EXTRACTOR_H_
 
 #define MPEG2_TS_EXTRACTOR_H_
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaTestUtil.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaTestUtil.java
index 0183b5d..beb2927 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaTestUtil.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaTestUtil.java
@@ -17,9 +17,13 @@
 package com.android.mediaframeworktest;
 
 import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Writer;
 
 import android.os.Debug;
 import android.os.Environment;
+import android.util.Log;
 
 /**
  *
@@ -30,10 +34,13 @@
     private MediaTestUtil(){
     }
 
+    private static String TAG = "MediaTestUtil";
     private static final String STORAGE_PATH =
         Environment.getExternalStorageDirectory().toString();
+    private static int mMediaStartMemory = 0;
+    private static int mDrmStartMemory = 0;
 
-    //Catpure the heapdump for memory leaksage analysis\
+    //Catpure the heapdump for memory leaksage analysis
     public static void getNativeHeapDump (String name) throws Exception {
         System.gc();
         System.runFinalization();
@@ -42,4 +49,103 @@
         Debug.dumpNativeHeap(o.getFD());
         o.close();
     }
-}
\ No newline at end of file
+
+    public static String captureMemInfo(String type) {
+        String cm = "ps ";
+        cm += type;
+        String memoryUsage = null;
+
+        int ch;
+        try {
+            Process p = Runtime.getRuntime().exec(cm);
+            InputStream in = p.getInputStream();
+            StringBuffer sb = new StringBuffer(512);
+            while ((ch = in.read()) != -1) {
+                sb.append((char) ch);
+            }
+            memoryUsage = sb.toString();
+        } catch (IOException e) {
+            Log.v(TAG, e.toString());
+        }
+        String[] poList = memoryUsage.split("\r|\n|\r\n");
+        String memusage = poList[1].concat("\n");
+        return memusage;
+    }
+
+    public static int getMediaServerVsize() {
+        String memoryUsage = captureMemInfo("mediaserver");
+        String[] poList2 = memoryUsage.split("\t|\\s+");
+        String vsize = poList2[3];
+        int vsizevalue = Integer.parseInt(vsize);
+        Log.v(TAG, "VSIZE = " + vsizevalue);
+        return vsizevalue;
+    }
+
+    public static int getDrmServerVsize() {
+        String memoryUsage = captureMemInfo("drmserver");
+        String[] poList2 = memoryUsage.split("\t|\\s+");
+        String vsize = poList2[3];
+        int vsizevalue = Integer.parseInt(vsize);
+        Log.v(TAG, "VSIZE = " + vsizevalue);
+        return vsizevalue;
+    }
+
+    // Write the ps mediaserver output to the file
+    public static void getMediaServerMemoryLog(Writer output, int writeCount, int totalCount)
+            throws Exception {
+        String memusage = null;
+
+        if (writeCount == 0) {
+            mMediaStartMemory = getMediaServerVsize();
+            output.write("Start memory : " + mMediaStartMemory + "\n");
+        }
+        memusage = captureMemInfo("mediaserver");
+        output.write(memusage);
+    }
+
+    // Write the ps drmserver output to the file
+    public static void getDrmServerMemoryLog(Writer output, int writeCount, int totalCount)
+            throws Exception {
+        String memusage = null;
+
+        if (writeCount == 0) {
+            mDrmStartMemory = getDrmServerVsize();
+            output.write("Start memory : " + mDrmStartMemory + "\n");
+        }
+        memusage = captureMemInfo("drmserver");
+        output.write(memusage);
+    }
+
+    // Write the ps drmserver output to the file
+    public static void getDrmServerMemorySummary(Writer output, String tag) throws Exception {
+
+        getTestMemorySummary(output, tag, "drmMem");
+    }
+
+    // Write the ps drmserver output to the file
+    public static void getMediaServerMemorySummary(Writer output, String tag) throws Exception {
+
+        getTestMemorySummary(output, tag, "mediaMem");
+    }
+
+    public static void getTestMemorySummary(Writer output, String tag, String type)
+            throws Exception {
+
+        int endMemory = 0;
+        int memDiff = 0;
+
+        if (type == "mediaMem") {
+            endMemory = getMediaServerVsize();
+            memDiff = endMemory - mMediaStartMemory;
+        } else if (type == "drmMem") {
+            endMemory = getDrmServerVsize();
+            memDiff = endMemory - mDrmStartMemory;
+        }
+        output.write("End Memory :" + endMemory + "\n");
+        if (memDiff < 0) {
+            memDiff = 0;
+        }
+        output.write(tag + " total diff = " + memDiff);
+        output.write("\n\n");
+    }
+}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/VideoEditorExportTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/VideoEditorExportTest.java
index 37b1f54..74d4766 100755
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/VideoEditorExportTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/VideoEditorExportTest.java
@@ -44,6 +44,7 @@
 
 import com.android.mediaframeworktest.MediaFrameworkTest;
 import android.test.suitebuilder.annotation.LargeTest;
+import android.test.suitebuilder.annotation.Suppress;
 import com.android.mediaframeworktest.VideoEditorHelper;
 
 public class VideoEditorExportTest extends
@@ -701,6 +702,7 @@
      *
      * @throws Exception
      */
+    @Suppress
     @LargeTest
     public void testExportDuration1Hour() throws Exception {
         final String videoItemFilename1 = INPUT_FILE_PATH +
diff --git a/native/include/android/keycodes.h b/native/include/android/keycodes.h
index c4a7eff..5d49775 100644
--- a/native/include/android/keycodes.h
+++ b/native/include/android/keycodes.h
@@ -247,6 +247,9 @@
     AKEYCODE_BUTTON_14       = 201,
     AKEYCODE_BUTTON_15       = 202,
     AKEYCODE_BUTTON_16       = 203,
+    AKEYCODE_LANGUAGE_SWITCH = 204,
+    AKEYCODE_MANNER_MODE     = 205,
+    AKEYCODE_3D_MODE         = 206,
 
     // NOTE: If you add a new keycode here you must also add it to several other files.
     //       Refer to frameworks/base/core/java/android/view/KeyEvent.java for the full list.
diff --git a/opengl/libs/EGL/egl.cpp b/opengl/libs/EGL/egl.cpp
index 7ce86b3..75f7078 100644
--- a/opengl/libs/EGL/egl.cpp
+++ b/opengl/libs/EGL/egl.cpp
@@ -340,7 +340,10 @@
     }
     
     if (gEGLDebugLevel > 0)
-        StartDebugServer();
+    {
+        property_get("debug.egl.debug_port", value, "5039");
+        StartDebugServer(atoi(value));
+    }
 }
 
 static void setGLHooksThreadSpecific(gl_hooks_t const *value) {
@@ -350,7 +353,6 @@
     } else if (gEGLDebugLevel > 0 && value != &gHooksNoContext) {
         setGlTraceThreadSpecific(value);
         setGlThreadSpecific(&gHooksDebug);
-        LOGD("\n* setGLHooksThreadSpecific gHooksDebug");
     } else {
         setGlThreadSpecific(value);
     }
@@ -2119,14 +2121,15 @@
     if (!validate_display_context(dpy, ctx))
         return EGL_FALSE;
 
+    EGLBoolean result = EGL_FALSE;
     egl_context_t * const c = get_context(ctx);
-
     if (c->cnx->egl.eglDestroySyncKHR) {
-        return c->cnx->egl.eglDestroySyncKHR(
+        result = c->cnx->egl.eglDestroySyncKHR(
                 dp->disp[c->impl].dpy, syncObject->sync);
+        if (result)
+            _s.terminate();
     }
-
-    return EGL_FALSE;
+    return result;
 }
 
 EGLint eglClientWaitSyncKHR(EGLDisplay dpy, EGLSyncKHR sync, EGLint flags, EGLTimeKHR timeout)
diff --git a/opengl/libs/GLES2_dbg/generate_debugger_message_proto.py b/opengl/libs/GLES2_dbg/generate_debugger_message_proto.py
index b14885b..48a29da 100755
--- a/opengl/libs/GLES2_dbg/generate_debugger_message_proto.py
+++ b/opengl/libs/GLES2_dbg/generate_debugger_message_proto.py
@@ -98,9 +98,6 @@
     output.write("        SETPROP = %d;\n" % (i))
     i += 1
     
-    output.write("        CAPTURE = %d;\n" % (i))
-    i += 1
-
     output.write("""    }
     required Function function = 2 [default = NEG]; // type/function of message
     enum Type
diff --git a/opengl/libs/GLES2_dbg/src/dbgcontext.cpp b/opengl/libs/GLES2_dbg/src/dbgcontext.cpp
index 68514df..5c418258 100644
--- a/opengl/libs/GLES2_dbg/src/dbgcontext.cpp
+++ b/opengl/libs/GLES2_dbg/src/dbgcontext.cpp
@@ -65,8 +65,9 @@
 
 void DbgContext::glUseProgram(GLuint program)
 {
-    assert(GL_NO_ERROR == hooks->gl.glGetError());
-
+    while (GLenum error = hooks->gl.glGetError())
+        LOGD("DbgContext::glUseProgram: before glGetError() = 0x%.4X", error);
+        
     this->program = program;
 
     GLint activeAttributes = 0;
@@ -106,6 +107,9 @@
             maxAttrib = slot;
     }
     delete name;
+    
+    while (GLenum error = hooks->gl.glGetError())
+        LOGD("DbgContext::glUseProgram: after glGetError() = 0x%.4X", error);
 }
 
 static bool HasNonVBOAttribs(const DbgContext * const ctx)
diff --git a/opengl/libs/GLES2_dbg/src/debugger_message.pb.cpp b/opengl/libs/GLES2_dbg/src/debugger_message.pb.cpp
index 4083c44..1f404c2 100644
--- a/opengl/libs/GLES2_dbg/src/debugger_message.pb.cpp
+++ b/opengl/libs/GLES2_dbg/src/debugger_message.pb.cpp
@@ -229,7 +229,6 @@
     case 188:
     case 189:
     case 190:
-    case 191:
       return true;
     default:
       return false;
@@ -428,7 +427,6 @@
 const Message_Function Message::CONTINUE;
 const Message_Function Message::SKIP;
 const Message_Function Message::SETPROP;
-const Message_Function Message::CAPTURE;
 const Message_Function Message::Function_MIN;
 const Message_Function Message::Function_MAX;
 const int Message::Function_ARRAYSIZE;
diff --git a/opengl/libs/GLES2_dbg/src/debugger_message.pb.h b/opengl/libs/GLES2_dbg/src/debugger_message.pb.h
index 3f2b842..59e7bab 100644
--- a/opengl/libs/GLES2_dbg/src/debugger_message.pb.h
+++ b/opengl/libs/GLES2_dbg/src/debugger_message.pb.h
@@ -226,12 +226,11 @@
   Message_Function_NEG = 187,
   Message_Function_CONTINUE = 188,
   Message_Function_SKIP = 189,
-  Message_Function_SETPROP = 190,
-  Message_Function_CAPTURE = 191
+  Message_Function_SETPROP = 190
 };
 bool Message_Function_IsValid(int value);
 const Message_Function Message_Function_Function_MIN = Message_Function_glActiveTexture;
-const Message_Function Message_Function_Function_MAX = Message_Function_CAPTURE;
+const Message_Function Message_Function_Function_MAX = Message_Function_SETPROP;
 const int Message_Function_Function_ARRAYSIZE = Message_Function_Function_MAX + 1;
 
 enum Message_Type {
@@ -488,7 +487,6 @@
   static const Function CONTINUE = Message_Function_CONTINUE;
   static const Function SKIP = Message_Function_SKIP;
   static const Function SETPROP = Message_Function_SETPROP;
-  static const Function CAPTURE = Message_Function_CAPTURE;
   static inline bool Function_IsValid(int value) {
     return Message_Function_IsValid(value);
   }
diff --git a/opengl/libs/GLES2_dbg/src/egl.cpp b/opengl/libs/GLES2_dbg/src/egl.cpp
index b3979a3..27c7f7e 100644
--- a/opengl/libs/GLES2_dbg/src/egl.cpp
+++ b/opengl/libs/GLES2_dbg/src/egl.cpp
@@ -19,7 +19,7 @@
 EGLBoolean Debug_eglSwapBuffers(EGLDisplay dpy, EGLSurface draw)
 {
     glesv2debugger::Message msg;
-    const bool expectResponse = true;
+    const bool expectResponse = false;
     struct : public FunctionCall {
         EGLDisplay dpy;
         EGLSurface draw;
diff --git a/opengl/libs/GLES2_dbg/src/header.h b/opengl/libs/GLES2_dbg/src/header.h
index cbd448a..b79cc0f 100644
--- a/opengl/libs/GLES2_dbg/src/header.h
+++ b/opengl/libs/GLES2_dbg/src/header.h
@@ -113,7 +113,7 @@
     virtual ~FunctionCall() {}
 };
 
-// move these into DbgContext
+// move these into DbgContext as static
 extern bool capture;
 extern int timeMode; // SYSTEM_TIME_
 
@@ -121,8 +121,10 @@
 
 unsigned GetBytesPerPixel(const GLenum format, const GLenum type);
 
+// every Debug_gl* function calls this to send message to client and possibly receive commands
 int * MessageLoop(FunctionCall & functionCall, glesv2debugger::Message & msg,
                   const bool expectResponse, const glesv2debugger::Message_Function function);
+
 void Receive(glesv2debugger::Message & cmd);
 float Send(const glesv2debugger::Message & msg, glesv2debugger::Message & cmd);
 void SetProp(const glesv2debugger::Message & cmd);
diff --git a/opengl/libs/GLES2_dbg/src/server.cpp b/opengl/libs/GLES2_dbg/src/server.cpp
index 820e9de..03c3dae 100644
--- a/opengl/libs/GLES2_dbg/src/server.cpp
+++ b/opengl/libs/GLES2_dbg/src/server.cpp
@@ -38,7 +38,7 @@
     exit(1);
 }
 
-void StartDebugServer()
+void StartDebugServer(unsigned short port)
 {
     LOGD("GLESv2_dbg: StartDebugServer");
     if (serverSock >= 0)
@@ -53,8 +53,8 @@
     }
     /* Construct the server sockaddr_in structure */
     server.sin_family = AF_INET;                  /* Internet/IP */
-    server.sin_addr.s_addr = htonl(INADDR_ANY);   /* Incoming addr */
-    server.sin_port = htons(5039);       /* server port */
+    server.sin_addr.s_addr = htonl(INADDR_LOOPBACK);   /* Incoming addr */
+    server.sin_port = htons(port);       /* server port */
 
     /* Bind the server socket */
     socklen_t sizeofSockaddr_in = sizeof(sockaddr_in);
@@ -79,13 +79,6 @@
 
     LOGD("Client connected: %s\n", inet_ntoa(client.sin_addr));
 //    fcntl(clientSock, F_SETFL, O_NONBLOCK);
-
-    glesv2debugger::Message msg, cmd;
-    msg.set_context_id(0);
-    msg.set_function(glesv2debugger::Message_Function_ACK);
-    msg.set_type(glesv2debugger::Message_Type_Response);
-    msg.set_expect_response(false);
-    Send(msg, cmd);
 }
 
 void StopDebugServer()
@@ -130,6 +123,27 @@
     cmd.ParseFromArray(buffer, len);
 }
 
+bool TryReceive(glesv2debugger::Message & cmd)
+{
+    fd_set readSet;
+    FD_ZERO(&readSet);
+    FD_SET(clientSock, &readSet);
+    timeval timeout;
+    timeout.tv_sec = timeout.tv_usec = 0;
+
+    int rc = select(clientSock + 1, &readSet, NULL, NULL, &timeout);
+    if (rc < 0)
+        Die("failed to select clientSock");
+
+    bool received = false;
+    if (FD_ISSET(clientSock, &readSet)) {
+        LOGD("TryReceive: avaiable for read");
+        Receive(cmd);
+        return true;
+    }
+    return false;
+}
+
 float Send(const glesv2debugger::Message & msg, glesv2debugger::Message & cmd)
 {
     static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
@@ -155,12 +169,18 @@
         Die("Failed to send message");
     }
 
+    // try to receive commands even though not expecting response,
+    // since client can send SETPROP commands anytime
     if (!msg.expect_response()) {
-        pthread_mutex_unlock(&mutex);
-        return t;
-    }
-
-    Receive(cmd);
+        if (TryReceive(cmd)) {
+            LOGD("Send: TryReceived");
+            if (glesv2debugger::Message_Function_SETPROP == cmd.function())
+                LOGD("Send: received SETPROP");
+            else
+                LOGD("Send: received something else");
+        }
+    } else
+        Receive(cmd);
 
     //LOGD("Message sent tid=%lu len=%d", pthread_self(), str.length());
     pthread_mutex_unlock(&mutex);
@@ -193,24 +213,26 @@
     msg.set_type(glesv2debugger::Message_Type_BeforeCall);
     msg.set_expect_response(expectResponse);
     msg.set_function(function);
-    Send(msg, cmd);
     if (!expectResponse)
         cmd.set_function(glesv2debugger::Message_Function_CONTINUE);
+    Send(msg, cmd);
     while (true) {
         msg.Clear();
         nsecs_t c0 = systemTime(timeMode);
         switch (cmd.function()) {
         case glesv2debugger::Message_Function_CONTINUE:
             ret = functionCall(&dbg->hooks->gl, msg);
+            while (GLenum error = dbg->hooks->gl.glGetError())
+                LOGD("Function=%u glGetError() = 0x%.4X", function, error);
             if (!msg.has_time()) // some has output data copy, so time inside call
                 msg.set_time((systemTime(timeMode) - c0) * 1e-6f);
             msg.set_context_id(reinterpret_cast<int>(dbg));
             msg.set_function(function);
             msg.set_type(glesv2debugger::Message_Type_AfterCall);
             msg.set_expect_response(expectResponse);
-            Send(msg, cmd);
             if (!expectResponse)
                 cmd.set_function(glesv2debugger::Message_Function_SKIP);
+            Send(msg, cmd);
             break;
         case glesv2debugger::Message_Function_SKIP:
             return const_cast<int *>(ret);
diff --git a/opengl/libs/GLES2_dbg/src/vertex.cpp b/opengl/libs/GLES2_dbg/src/vertex.cpp
index 52ce907..a73967f 100644
--- a/opengl/libs/GLES2_dbg/src/vertex.cpp
+++ b/opengl/libs/GLES2_dbg/src/vertex.cpp
@@ -41,10 +41,10 @@
     msg.set_arg6(reinterpret_cast<int>(pixels));
     //void * data = NULL;
     //unsigned encodedSize = 0;
-    Send(msg, cmd);
-    float t = 0;
     if (!expectResponse)
         cmd.set_function(glesv2debugger::Message_Function_CONTINUE);
+    Send(msg, cmd);
+    float t = 0;
     while (true) {
         msg.Clear();
         nsecs_t c0 = systemTime(timeMode);
@@ -61,6 +61,8 @@
             //msg.set_data(data, encodedSize);
             //free(data);
             c0 = systemTime(timeMode);
+            if (!expectResponse)
+                cmd.set_function(glesv2debugger::Message_Function_SKIP);
             t = Send(msg, cmd);
             msg.set_time((systemTime(timeMode) - c0) * 1e-6f);
             msg.set_clock(t);
@@ -69,11 +71,13 @@
             msg.set_expect_response(false);
             msg.set_type(glesv2debugger::Message_Type_AfterCall);
             //Send(msg, cmd);
-            if (!expectResponse)
-                cmd.set_function(glesv2debugger::Message_Function_SKIP);
             break;
         case glesv2debugger::Message_Function_SKIP:
             return;
+        case glesv2debugger::Message_Function_SETPROP:
+            SetProp(cmd);
+            Receive(cmd);
+            break;
         default:
             assert(0); //GenerateCall(msg, cmd);
             break;
@@ -104,9 +108,9 @@
     void * pixels = NULL;
     GLint readFormat = 0, readType = 0;
     int viewport[4] = {};
-    Send(msg, cmd);
     if (!expectResponse)
         cmd.set_function(glesv2debugger::Message_Function_CONTINUE);
+    Send(msg, cmd);
     while (true) {
         msg.Clear();
         nsecs_t c0 = systemTime(timeMode);
@@ -118,25 +122,26 @@
             msg.set_function(glesv2debugger::Message_Function_glDrawArrays);
             msg.set_type(glesv2debugger::Message_Type_AfterCall);
             msg.set_expect_response(expectResponse);
-            Send(msg, cmd);
-            if (capture)
-                cmd.set_function(glesv2debugger::Message_Function_CAPTURE);
-            else if (!expectResponse)
+            if (!expectResponse)
                 cmd.set_function(glesv2debugger::Message_Function_SKIP);
+            Send(msg, cmd);
+            if (capture) {
+                dbg->hooks->gl.glGetIntegerv(GL_VIEWPORT, viewport);
+                dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT, &readFormat);
+                dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_TYPE, &readType);
+                LOGD("glDrawArrays CAPTURE: x=%d y=%d width=%d height=%d format=0x%.4X type=0x%.4X",
+                     viewport[0], viewport[1], viewport[2], viewport[3], readFormat, readType);
+                pixels = malloc(viewport[2] * viewport[3] * 4);
+                Debug_glReadPixels(viewport[0], viewport[1], viewport[2], viewport[3],
+                                   readFormat, readType, pixels);
+                free(pixels);
+            }
             break;
         case glesv2debugger::Message_Function_SKIP:
             return;
-        case glesv2debugger::Message_Function_CAPTURE:
-            dbg->hooks->gl.glGetIntegerv(GL_VIEWPORT, viewport);
-            dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT, &readFormat);
-            dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_TYPE, &readType);
-            LOGD("glDrawArrays CAPTURE: x=%d y=%d width=%d height=%d format=0x%.4X type=0x%.4X",
-                 viewport[0], viewport[1], viewport[2], viewport[3], readFormat, readType);
-            pixels = malloc(viewport[2] * viewport[3] * 4);
-            Debug_glReadPixels(viewport[0], viewport[1], viewport[2], viewport[3],
-                               readFormat, readType, pixels);
-            free(pixels);
-            cmd.set_function(glesv2debugger::Message_Function_SKIP);
+        case glesv2debugger::Message_Function_SETPROP:
+            SetProp(cmd);
+            Receive(cmd);
             break;
         default:
             assert(0); //GenerateCall(msg, cmd);
@@ -189,9 +194,9 @@
     void * pixels = NULL;
     GLint readFormat = 0, readType = 0;
     int viewport[4] = {};
-    Send(msg, cmd);
     if (!expectResponse)
         cmd.set_function(glesv2debugger::Message_Function_CONTINUE);
+    Send(msg, cmd);
     while (true) {
         msg.Clear();
         nsecs_t c0 = systemTime(timeMode);
@@ -203,25 +208,26 @@
             msg.set_function(glesv2debugger::Message_Function_glDrawElements);
             msg.set_type(glesv2debugger::Message_Type_AfterCall);
             msg.set_expect_response(expectResponse);
-            Send(msg, cmd);
-            if (capture)
-                cmd.set_function(glesv2debugger::Message_Function_CAPTURE);
-            else if (!expectResponse)
+            if (!expectResponse)
                 cmd.set_function(glesv2debugger::Message_Function_SKIP);
+            Send(msg, cmd);
+            if (capture) {
+                dbg->hooks->gl.glGetIntegerv(GL_VIEWPORT, viewport);
+                dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT, &readFormat);
+                dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_TYPE, &readType);
+                LOGD("glDrawArrays CAPTURE: x=%d y=%d width=%d height=%d format=0x%.4X type=0x%.4X",
+                     viewport[0], viewport[1], viewport[2], viewport[3], readFormat, readType);
+                pixels = malloc(viewport[2] * viewport[3] * 4);
+                Debug_glReadPixels(viewport[0], viewport[1], viewport[2], viewport[3],
+                                   readFormat, readType, pixels);
+                free(pixels);
+            }
             break;
         case glesv2debugger::Message_Function_SKIP:
             return;
-        case glesv2debugger::Message_Function_CAPTURE:
-            dbg->hooks->gl.glGetIntegerv(GL_VIEWPORT, viewport);
-            dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT, &readFormat);
-            dbg->hooks->gl.glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_TYPE, &readType);
-            LOGD("glDrawArrays CAPTURE: x=%d y=%d width=%d height=%d format=0x%.4X type=0x%.4X",
-                 viewport[0], viewport[1], viewport[2], viewport[3], readFormat, readType);
-            pixels = malloc(viewport[2] * viewport[3] * 4);
-            Debug_glReadPixels(viewport[0], viewport[1], viewport[2], viewport[3],
-                               readFormat, readType, pixels);
-            free(pixels);
-            cmd.set_function(glesv2debugger::Message_Function_SKIP);
+        case glesv2debugger::Message_Function_SETPROP:
+            SetProp(cmd);
+            Receive(cmd);
             break;
         default:
             assert(0); //GenerateCall(msg, cmd);
diff --git a/opengl/libs/glesv2dbg.h b/opengl/libs/glesv2dbg.h
index b988eb7..8029dce 100644
--- a/opengl/libs/glesv2dbg.h
+++ b/opengl/libs/glesv2dbg.h
@@ -24,7 +24,7 @@
     DbgContext * CreateDbgContext(const unsigned version, const gl_hooks_t * const hooks);
     void DestroyDbgContext(DbgContext * const dbg);
     
-    void StartDebugServer(); // create and bind socket if haven't already
+    void StartDebugServer(unsigned short port); // create and bind socket if haven't already
     void StopDebugServer(); // close socket if open
     
 }; // namespace android
diff --git a/packages/SystemUI/res/values/strings.xml b/packages/SystemUI/res/values/strings.xml
index 6306d2e..446827b 100644
--- a/packages/SystemUI/res/values/strings.xml
+++ b/packages/SystemUI/res/values/strings.xml
@@ -129,7 +129,7 @@
     <string name="usb_accessory_confirm_prompt">Open <xliff:g id="activity">%1$s</xliff:g> when this USB accessory is connected?</string>
 
     <!-- Prompt for the USB accessory URI dialog [CHAR LIMIT=80] -->
-    <string name="usb_accessory_uri_prompt">Additional information for this USB accessory may be found at: <xliff:g id="url">%1$s</xliff:g></string>
+    <string name="usb_accessory_uri_prompt">No installed applications work with this USB accessory. Learn more about this accessory at <xliff:g id="url">%1$s</xliff:g></string>
 
     <!-- Title for USB accessory dialog.  Used when the name of the accessory cannot be determined.  [CHAR LIMIT=50] -->
     <string name="title_usb_accessory">USB accessory</string>
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java b/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java
index 6b9551b..70a78df 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java
@@ -812,7 +812,9 @@
 
         // the data direction overlay
         if (mLastDataDirectionOverlayIconId != dataDirectionOverlayIconId) {
-            Slog.d(TAG, "changing data overlay icon id to " + dataDirectionOverlayIconId);
+            if (DEBUG) {
+                Slog.d(TAG, "changing data overlay icon id to " + dataDirectionOverlayIconId);
+            }
             mLastDataDirectionOverlayIconId = dataDirectionOverlayIconId;
             N = mDataDirectionOverlayIconViews.size();
             for (int i=0; i<N; i++) {
diff --git a/services/input/EventHub.cpp b/services/input/EventHub.cpp
index e2da740..853dda4 100644
--- a/services/input/EventHub.cpp
+++ b/services/input/EventHub.cpp
@@ -1074,8 +1074,34 @@
     mDevices.removeAt(index);
     device->close();
 
-    device->next = mClosingDevices;
-    mClosingDevices = device;
+    // Unlink for opening devices list if it is present.
+    Device* pred = NULL;
+    bool found = false;
+    for (Device* entry = mOpeningDevices; entry != NULL; ) {
+        if (entry == device) {
+            found = true;
+            break;
+        }
+        pred = entry;
+        entry = entry->next;
+    }
+    if (found) {
+        // Unlink the device from the opening devices list then delete it.
+        // We don't need to tell the client that the device was closed because
+        // it does not even know it was opened in the first place.
+        LOGI("Device %s was immediately closed after opening.", device->path.string());
+        if (pred) {
+            pred->next = device->next;
+        } else {
+            mOpeningDevices = device->next;
+        }
+        delete device;
+    } else {
+        // Link into closing devices list.
+        // The device will be deleted later after we have informed the client.
+        device->next = mClosingDevices;
+        mClosingDevices = device;
+    }
     return 0;
 }
 
diff --git a/services/java/com/android/server/AppWidgetService.java b/services/java/com/android/server/AppWidgetService.java
index f28e2b1..a4a95a0 100644
--- a/services/java/com/android/server/AppWidgetService.java
+++ b/services/java/com/android/server/AppWidgetService.java
@@ -135,7 +135,7 @@
                 IRemoteViewsAdapterConnection.Stub.asInterface(mConnectionCb);
             try {
                 cb.onServiceConnected(service);
-            } catch (RemoteException e) {
+            } catch (Exception e) {
                 e.printStackTrace();
             }
         }
@@ -147,7 +147,7 @@
                 IRemoteViewsAdapterConnection.Stub.asInterface(mConnectionCb);
             try {
                 cb.onServiceDisconnected();
-            } catch (RemoteException e) {
+            } catch (Exception e) {
                 e.printStackTrace();
             }
         }
@@ -541,7 +541,7 @@
                     IRemoteViewsFactory.Stub.asInterface(service);
                 try {
                     cb.onDestroy(intent);
-                } catch (RemoteException e) {
+                } catch (Exception e) {
                     e.printStackTrace();
                 }
                 mContext.unbindService(this);
diff --git a/services/java/com/android/server/PackageManagerService.java b/services/java/com/android/server/PackageManagerService.java
index 3aebb5c..7553c2b 100644
--- a/services/java/com/android/server/PackageManagerService.java
+++ b/services/java/com/android/server/PackageManagerService.java
@@ -7058,8 +7058,8 @@
                 if (pkgSetting.notLaunched) {
                     if (pkgSetting.installerPackageName != null) {
                         sendPackageBroadcast(Intent.ACTION_PACKAGE_FIRST_LAUNCH,
-                                pkgSetting.installerPackageName, null,
-                                pkgSetting.name, null);
+                                pkgSetting.name, null,
+                                pkgSetting.installerPackageName, null);
                     }
                     pkgSetting.notLaunched = false;
                 }
diff --git a/services/surfaceflinger/Layer.cpp b/services/surfaceflinger/Layer.cpp
index 7a69097..78ee07e 100644
--- a/services/surfaceflinger/Layer.cpp
+++ b/services/surfaceflinger/Layer.cpp
@@ -860,11 +860,13 @@
     Mutex::Autolock _l(mLock);
 
     if (size < mNumBuffers) {
-        // Move the active texture into slot 0
-        BufferData activeBufferData = mBufferData[mActiveBufferIndex];
-        mBufferData[mActiveBufferIndex] = mBufferData[0];
-        mBufferData[0] = activeBufferData;
-        mActiveBufferIndex = 0;
+        // If there is an active texture, move it into slot 0 if needed
+        if (mActiveBufferIndex > 0) {
+            BufferData activeBufferData = mBufferData[mActiveBufferIndex];
+            mBufferData[mActiveBufferIndex] = mBufferData[0];
+            mBufferData[0] = activeBufferData;
+            mActiveBufferIndex = 0;
+        }
 
         // Free the buffers that are no longer needed.
         for (size_t i = size; i < mNumBuffers; i++) {
diff --git a/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeTypedArray.java b/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeTypedArray.java
index 138a455..b9f769f 100644
--- a/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeTypedArray.java
+++ b/tools/layoutlib/bridge/src/com/android/layoutlib/bridge/android/BridgeTypedArray.java
@@ -491,18 +491,11 @@
         if (ResourceHelper.stringToFloat(s, mValue)) {
             float f = mValue.getDimension(mBridgeResources.mMetrics);
 
-            if (f < 0) {
-                // negative values are not allowed in pixel dimensions
-                Bridge.getLog().error(LayoutLog.TAG_BROKEN,
-                        "Negative pixel dimension: " + s,
-                        null, null /*data*/);
-                return defValue;
-            }
-
+            final int res = (int)(f+0.5f);
+            if (res != 0) return res;
             if (f == 0) return 0;
-            if (f < 1) return 1;
-
-            return (int)(f+0.5f);
+            if (f > 0) return 1;
+            return defValue; // this is basically unreachable.
         }
 
         // looks like we were unable to resolve the dimension value