Merge "Fix CTS test for nine patch padding sanity check" into lmp-dev
diff --git a/api/current.txt b/api/current.txt
index 93360c1..5cafd6c 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -13236,10 +13236,10 @@
     method public void registerDisplayListener(android.hardware.display.DisplayManager.DisplayListener, android.os.Handler);
     method public void unregisterDisplayListener(android.hardware.display.DisplayManager.DisplayListener);
     field public static final java.lang.String DISPLAY_CATEGORY_PRESENTATION = "android.hardware.display.category.PRESENTATION";
+    field public static final int VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR = 16; // 0x10
     field public static final int VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY = 8; // 0x8
     field public static final int VIRTUAL_DISPLAY_FLAG_PRESENTATION = 2; // 0x2
     field public static final int VIRTUAL_DISPLAY_FLAG_PUBLIC = 1; // 0x1
-    field public static final int VIRTUAL_DISPLAY_FLAG_SCREEN_SHARE = 16; // 0x10
     field public static final int VIRTUAL_DISPLAY_FLAG_SECURE = 4; // 0x4
   }
 
@@ -16401,7 +16401,7 @@
   public final class MediaProjection {
     method public void addCallback(android.media.projection.MediaProjection.Callback, android.os.Handler);
     method public android.media.AudioRecord createAudioRecord(int, int, int, int);
-    method public android.hardware.display.VirtualDisplay createVirtualDisplay(java.lang.String, int, int, int, boolean, android.view.Surface, android.hardware.display.VirtualDisplay.Callbacks, android.os.Handler);
+    method public android.hardware.display.VirtualDisplay createVirtualDisplay(java.lang.String, int, int, int, int, android.view.Surface, android.hardware.display.VirtualDisplay.Callbacks, android.os.Handler);
     method public void removeCallback(android.media.projection.MediaProjection.Callback);
     method public void stop();
   }
diff --git a/core/java/android/bluetooth/BluetoothGattCallbackWrapper.java b/core/java/android/bluetooth/BluetoothGattCallbackWrapper.java
index 0eb9d21..da992f5 100644
--- a/core/java/android/bluetooth/BluetoothGattCallbackWrapper.java
+++ b/core/java/android/bluetooth/BluetoothGattCallbackWrapper.java
@@ -124,8 +124,7 @@
     }
 
     @Override
-    public void onFoundOrLost(boolean onFound, String address, int rssi, byte[] advData)
-            throws RemoteException {
+    public void onFoundOrLost(boolean onFound, ScanResult scanResult) throws RemoteException {
     }
 
 }
diff --git a/core/java/android/bluetooth/IBluetoothGattCallback.aidl b/core/java/android/bluetooth/IBluetoothGattCallback.aidl
index f14cce0..00b6b1b 100644
--- a/core/java/android/bluetooth/IBluetoothGattCallback.aidl
+++ b/core/java/android/bluetooth/IBluetoothGattCallback.aidl
@@ -69,6 +69,5 @@
                                   in AdvertiseSettings advertiseSettings);
     void onConfigureMTU(in String address, in int mtu, in int status);
     void onConnectionCongested(in String address, in boolean congested);
-    void onFoundOrLost(in boolean onFound, in String address, in int rssi,
-                             in byte[] advData);
+    void onFoundOrLost(in boolean onFound, in ScanResult scanResult);
 }
diff --git a/core/java/android/bluetooth/le/BluetoothLeScanner.java b/core/java/android/bluetooth/le/BluetoothLeScanner.java
index 45e466f..6667cc4 100644
--- a/core/java/android/bluetooth/le/BluetoothLeScanner.java
+++ b/core/java/android/bluetooth/le/BluetoothLeScanner.java
@@ -322,22 +322,27 @@
         }
 
         @Override
-        public void onFoundOrLost(boolean onFound, String address, int rssi,
-                byte[] advData) {
+        public void onFoundOrLost(final boolean onFound, final ScanResult scanResult) {
             if (DBG) {
-                Log.d(TAG, "onFoundOrLost() - Device=" + address);
+                Log.d(TAG, "onFoundOrLost() - onFound = " + onFound +
+                        " " + scanResult.toString());
             }
-            // ToDo: Fix issue with underlying reporting from chipset
-            BluetoothDevice device = BluetoothAdapter.getDefaultAdapter().getRemoteDevice(
-                    address);
-            long scanNanos = SystemClock.elapsedRealtimeNanos();
-            ScanResult result = new ScanResult(device, ScanRecord.parseFromBytes(advData), rssi,
-                    scanNanos);
-            if (onFound) {
-                mScanCallback.onScanResult(ScanSettings.CALLBACK_TYPE_FIRST_MATCH, result);
-            } else {
-                mScanCallback.onScanResult(ScanSettings.CALLBACK_TYPE_MATCH_LOST, result);
+
+            // Check null in case the scan has been stopped
+            synchronized (this) {
+                if (mClientIf <= 0) return;
             }
+            Handler handler = new Handler(Looper.getMainLooper());
+            handler.post(new Runnable() {
+                    @Override
+                public void run() {
+                    if (onFound) {
+                        mScanCallback.onScanResult(ScanSettings.CALLBACK_TYPE_FIRST_MATCH, scanResult);
+                    } else {
+                        mScanCallback.onScanResult(ScanSettings.CALLBACK_TYPE_MATCH_LOST, scanResult);
+                    }
+                }
+            });
         }
     }
 
diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java
index c461511..6a9d565 100644
--- a/core/java/android/hardware/camera2/CameraDevice.java
+++ b/core/java/android/hardware/camera2/CameraDevice.java
@@ -130,15 +130,6 @@
     public abstract String getId();
 
     /**
-     * <p>Set up a new output set of Surfaces for the camera device.</p>
-     *
-     * @deprecated Use {@link #createCaptureSession} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract void configureOutputs(List<Surface> outputs) throws CameraAccessException;
-
-    /**
      * <p>Create a new camera capture session by providing the target output set of Surfaces to the
      * camera device.</p>
      *
@@ -276,68 +267,6 @@
             throws CameraAccessException;
 
     /**
-     * <p>Submit a request for an image to be captured by this CameraDevice.</p>
-     *
-     * @deprecated Use {@link CameraCaptureSession#capture} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract int capture(CaptureRequest request, CaptureListener listener, Handler handler)
-            throws CameraAccessException;
-
-    /**
-     * Submit a list of requests to be captured in sequence as a burst.
-     *
-     * @deprecated Use {@link CameraCaptureSession#captureBurst} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract int captureBurst(List<CaptureRequest> requests, CaptureListener listener,
-            Handler handler) throws CameraAccessException;
-
-    /**
-     * Request endlessly repeating capture of images by this CameraDevice.
-     *
-     * @deprecated Use {@link CameraCaptureSession#setRepeatingRequest} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract int setRepeatingRequest(CaptureRequest request, CaptureListener listener,
-            Handler handler) throws CameraAccessException;
-
-    /**
-     * <p>Request endlessly repeating capture of a sequence of images by this
-     * CameraDevice.</p>
-     *
-     * @deprecated Use {@link CameraCaptureSession#setRepeatingBurst} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract int setRepeatingBurst(List<CaptureRequest> requests, CaptureListener listener,
-            Handler handler) throws CameraAccessException;
-
-    /**
-     * <p>Cancel any ongoing repeating capture set by either
-     * {@link #setRepeatingRequest setRepeatingRequest} or
-     * {@link #setRepeatingBurst}.
-     *
-     * @deprecated Use {@link CameraCaptureSession#stopRepeating} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract void stopRepeating() throws CameraAccessException;
-
-    /**
-     * Flush all captures currently pending and in-progress as fast as
-     * possible.
-     *
-     * @deprecated Use {@link CameraCaptureSession#abortCaptures} instead
-     * @hide
-     */
-    @Deprecated
-    public abstract void flush() throws CameraAccessException;
-
-    /**
      * Close the connection to this camera device as quickly as possible.
      *
      * <p>Immediately after this call, all calls to the camera device or active session interface
@@ -356,96 +285,6 @@
     public abstract void close();
 
     /**
-     * <p>A listener for tracking the progress of a {@link CaptureRequest}
-     * submitted to the camera device.</p>
-     *
-     * @deprecated Use {@link CameraCaptureSession.CaptureListener} instead
-     * @hide
-     */
-    @Deprecated
-    public static abstract class CaptureListener {
-
-        /**
-         * This constant is used to indicate that no images were captured for
-         * the request.
-         *
-         * @hide
-         */
-        public static final int NO_FRAMES_CAPTURED = -1;
-
-        /**
-         * This method is called when the camera device has started capturing
-         * the output image for the request, at the beginning of image exposure.
-         *
-         * @see android.media.MediaActionSound
-         */
-        public void onCaptureStarted(CameraDevice camera,
-                CaptureRequest request, long timestamp) {
-            // default empty implementation
-        }
-
-        /**
-         * This method is called when some results from an image capture are
-         * available.
-         *
-         * @hide
-         */
-        public void onCapturePartial(CameraDevice camera,
-                CaptureRequest request, CaptureResult result) {
-            // default empty implementation
-        }
-
-        /**
-         * This method is called when an image capture makes partial forward progress; some
-         * (but not all) results from an image capture are available.
-         *
-         */
-        public void onCaptureProgressed(CameraDevice camera,
-                CaptureRequest request, CaptureResult partialResult) {
-            // default empty implementation
-        }
-
-        /**
-         * This method is called when an image capture has fully completed and all the
-         * result metadata is available.
-         */
-        public void onCaptureCompleted(CameraDevice camera,
-                CaptureRequest request, TotalCaptureResult result) {
-            // default empty implementation
-        }
-
-        /**
-         * This method is called instead of {@link #onCaptureCompleted} when the
-         * camera device failed to produce a {@link CaptureResult} for the
-         * request.
-         */
-        public void onCaptureFailed(CameraDevice camera,
-                CaptureRequest request, CaptureFailure failure) {
-            // default empty implementation
-        }
-
-        /**
-         * This method is called independently of the others in CaptureListener,
-         * when a capture sequence finishes and all {@link CaptureResult}
-         * or {@link CaptureFailure} for it have been returned via this listener.
-         */
-        public void onCaptureSequenceCompleted(CameraDevice camera,
-                int sequenceId, long frameNumber) {
-            // default empty implementation
-        }
-
-        /**
-         * This method is called independently of the others in CaptureListener,
-         * when a capture sequence aborts before any {@link CaptureResult}
-         * or {@link CaptureFailure} for it have been returned via this listener.
-         */
-        public void onCaptureSequenceAborted(CameraDevice camera,
-                int sequenceId) {
-            // default empty implementation
-        }
-    }
-
-    /**
      * A listener for notifications about the state of a camera
      * device.
      *
@@ -542,40 +381,6 @@
         public abstract void onOpened(CameraDevice camera); // Must implement
 
         /**
-         * The method called when a camera device has no outputs configured.
-         *
-         * @deprecated Use {@link #onOpened} instead.
-         * @hide
-         */
-        @Deprecated
-        public void onUnconfigured(CameraDevice camera) {
-            // Default empty implementation
-        }
-
-        /**
-         * The method called when a camera device begins processing
-         * {@link CaptureRequest capture requests}.
-         *
-         * @deprecated Use {@link CameraCaptureSession.StateListener#onActive} instead.
-         * @hide
-         */
-        @Deprecated
-        public void onActive(CameraDevice camera) {
-            // Default empty implementation
-        }
-
-        /**
-         * The method called when a camera device is busy.
-         *
-         * @deprecated Use {@link CameraCaptureSession.StateListener#onConfigured} instead.
-         * @hide
-         */
-        @Deprecated
-        public void onBusy(CameraDevice camera) {
-            // Default empty implementation
-        }
-
-        /**
          * The method called when a camera device has been closed with
          * {@link CameraDevice#close}.
          *
@@ -591,18 +396,6 @@
         }
 
         /**
-         * The method called when a camera device has finished processing all
-         * submitted capture requests and has reached an idle state.
-         *
-         * @deprecated Use {@link CameraCaptureSession.StateListener#onReady} instead.
-         * @hide
-         */
-        @Deprecated
-        public void onIdle(CameraDevice camera) {
-            // Default empty implementation
-        }
-
-        /**
          * The method called when a camera device is no longer available for
          * use.
          *
diff --git a/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java b/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
index f829f5e..a15028c 100644
--- a/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
+++ b/core/java/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
@@ -103,7 +103,7 @@
          * Use the same handler as the device's StateListener for all the internal coming events
          *
          * This ensures total ordering between CameraDevice.StateListener and
-         * CameraDevice.CaptureListener events.
+         * CameraDeviceImpl.CaptureListener events.
          */
         mSequenceDrainer = new TaskDrainer<>(mDeviceHandler, new SequenceDrainListener(),
                 /*name*/"seq");
@@ -141,7 +141,7 @@
         checkNotClosed();
         checkLegalToCapture();
 
-        handler = checkHandler(handler);
+        handler = checkHandler(handler, listener);
 
         if (VERBOSE) {
             Log.v(TAG, "capture - request " + request + ", listener " + listener + " handler" +
@@ -164,7 +164,7 @@
         checkNotClosed();
         checkLegalToCapture();
 
-        handler = checkHandler(handler);
+        handler = checkHandler(handler, listener);
 
         if (VERBOSE) {
             CaptureRequest[] requestArray = requests.toArray(new CaptureRequest[0]);
@@ -186,7 +186,7 @@
         checkNotClosed();
         checkLegalToCapture();
 
-        handler = checkHandler(handler);
+        handler = checkHandler(handler, listener);
 
         if (VERBOSE) {
             Log.v(TAG, "setRepeatingRequest - request " + request + ", listener " + listener +
@@ -209,7 +209,7 @@
         checkNotClosed();
         checkLegalToCapture();
 
-        handler = checkHandler(handler);
+        handler = checkHandler(handler, listener);
 
         if (VERBOSE) {
             CaptureRequest[] requestArray = requests.toArray(new CaptureRequest[0]);
@@ -261,9 +261,12 @@
      * <p>The semantics are identical to {@link #close}, except that unconfiguring will be skipped.
      * <p>
      *
+     * <p>After this call completes, the session will not call any further methods on the camera
+     * device.</p>
+     *
      * @see CameraCaptureSession#close
      */
-    synchronized void replaceSessionClose(CameraCaptureSession other) {
+    synchronized void replaceSessionClose() {
         /*
          * In order for creating new sessions to be fast, the new session should be created
          * before the old session is closed.
@@ -278,13 +281,17 @@
 
         if (VERBOSE) Log.v(TAG, "replaceSessionClose");
 
-        // #close was already called explicitly, keep going the slow route
-        if (mClosed) {
-            if (VERBOSE) Log.v(TAG, "replaceSessionClose - close was already called");
-            return;
-        }
-
+        // Set up fast shutdown. Possible alternative paths:
+        // - This session is active, so close() below starts the shutdown drain
+        // - This session is mid-shutdown drain, and hasn't yet reached the idle drain listener.
+        // - This session is already closed and has executed the idle drain listener, and
+        //   configureOutputs(null) has already been called.
+        //
+        // Do not call configureOutputs(null) going forward, since it would race with the
+        // configuration for the new session. If it was already called, then we don't care, since it
+        // won't get called again.
         mSkipUnconfigure = true;
+
         close();
     }
 
@@ -347,7 +354,7 @@
 
     /**
      * Forward callbacks from
-     * CameraDevice.CaptureListener to the CameraCaptureSession.CaptureListener.
+     * CameraDeviceImpl.CaptureListener to the CameraCaptureSession.CaptureListener.
      *
      * <p>In particular, all calls are automatically split to go both to our own
      * internal listener, and to the user-specified listener (by transparently posting
@@ -356,9 +363,9 @@
      * <p>When a capture sequence finishes, update the pending checked sequences set.</p>
      */
     @SuppressWarnings("deprecation")
-    private CameraDevice.CaptureListener createCaptureListenerProxy(
+    private CameraDeviceImpl.CaptureListener createCaptureListenerProxy(
             Handler handler, CaptureListener listener) {
-        CameraDevice.CaptureListener localListener = new CameraDevice.CaptureListener() {
+        CameraDeviceImpl.CaptureListener localListener = new CameraDeviceImpl.CaptureListener() {
             @Override
             public void onCaptureSequenceCompleted(CameraDevice camera,
                     int sequenceId, long frameNumber) {
@@ -379,27 +386,30 @@
          * - then forward the call to a handler
          * - then finally invoke the destination method on the session listener object
          */
-        Dispatchable<CaptureListener> userListenerSink;
-        if (listener == null) { // OK: API allows the user to not specify a listener
-            userListenerSink = new NullDispatcher<>();
-        } else {
-            userListenerSink = new InvokeDispatcher<>(listener);
+        if (listener == null) {
+            // OK: API allows the user to not specify a listener, and the handler may
+            // also be null in that case. Collapse whole dispatch chain to only call the local
+            // listener
+            return localListener;
         }
 
-        InvokeDispatcher<CameraDevice.CaptureListener> localSink =
+        InvokeDispatcher<CameraDeviceImpl.CaptureListener> localSink =
                 new InvokeDispatcher<>(localListener);
+
+        InvokeDispatcher<CaptureListener> userListenerSink =
+                new InvokeDispatcher<>(listener);
         HandlerDispatcher<CaptureListener> handlerPassthrough =
                 new HandlerDispatcher<>(userListenerSink, handler);
-        DuckTypingDispatcher<CameraDevice.CaptureListener, CaptureListener> duckToSession
+        DuckTypingDispatcher<CameraDeviceImpl.CaptureListener, CaptureListener> duckToSession
                 = new DuckTypingDispatcher<>(handlerPassthrough, CaptureListener.class);
-        ArgumentReplacingDispatcher<CameraDevice.CaptureListener, CameraCaptureSessionImpl>
-            replaceDeviceWithSession = new ArgumentReplacingDispatcher<>(duckToSession,
-                    /*argumentIndex*/0, this);
+        ArgumentReplacingDispatcher<CameraDeviceImpl.CaptureListener, CameraCaptureSessionImpl>
+                replaceDeviceWithSession = new ArgumentReplacingDispatcher<>(duckToSession,
+                        /*argumentIndex*/0, this);
 
-        BroadcastDispatcher<CameraDevice.CaptureListener> broadcaster =
-                new BroadcastDispatcher<CameraDevice.CaptureListener>(
-                        replaceDeviceWithSession,
-                        localSink);
+        BroadcastDispatcher<CameraDeviceImpl.CaptureListener> broadcaster =
+                new BroadcastDispatcher<CameraDeviceImpl.CaptureListener>(
+                    replaceDeviceWithSession,
+                    localSink);
 
         return new ListenerProxies.DeviceCaptureListenerProxy(broadcaster);
     }
@@ -415,10 +425,10 @@
      * </ul>
      * </p>
      * */
-    CameraDevice.StateListener getDeviceStateListener() {
+    CameraDeviceImpl.StateListenerKK getDeviceStateListener() {
         final CameraCaptureSession session = this;
 
-        return new CameraDevice.StateListener() {
+        return new CameraDeviceImpl.StateListenerKK() {
             private boolean mBusy = false;
             private boolean mActive = false;
 
@@ -596,6 +606,8 @@
                  *
                  * This operation is idempotent; a session will not be closed twice.
                  */
+                if (VERBOSE) Log.v(TAG, "Session drain complete, skip unconfigure: " +
+                        mSkipUnconfigure);
 
                 // Fast path: A new capture session has replaced this one; don't unconfigure.
                 if (mSkipUnconfigure) {
diff --git a/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java b/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
index 18b1202..71eb0e9 100644
--- a/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
+++ b/core/java/android/hardware/camera2/impl/CameraDeviceImpl.java
@@ -21,8 +21,10 @@
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
 import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.CaptureRequest;
 import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureFailure;
 import android.hardware.camera2.ICameraDeviceCallbacks;
 import android.hardware.camera2.ICameraDeviceUser;
 import android.hardware.camera2.TotalCaptureResult;
@@ -47,7 +49,7 @@
 /**
  * HAL2.1+ implementation of CameraDevice. Use CameraManager#open to instantiate
  */
-public class CameraDeviceImpl extends android.hardware.camera2.CameraDevice {
+public class CameraDeviceImpl extends CameraDevice {
 
     private final String TAG;
     private final boolean DEBUG;
@@ -62,7 +64,7 @@
     private final CameraDeviceCallbacks mCallbacks = new CameraDeviceCallbacks();
 
     private final StateListener mDeviceListener;
-    private volatile StateListener mSessionStateListener;
+    private volatile StateListenerKK mSessionStateListener;
     private final Handler mDeviceHandler;
 
     private volatile boolean mClosing = false;
@@ -103,7 +105,7 @@
     private final Runnable mCallOnOpened = new Runnable() {
         @Override
         public void run() {
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 if (mRemoteDevice == null) return; // Camera already closed
 
@@ -119,7 +121,7 @@
     private final Runnable mCallOnUnconfigured = new Runnable() {
         @Override
         public void run() {
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 if (mRemoteDevice == null) return; // Camera already closed
 
@@ -128,14 +130,13 @@
             if (sessionListener != null) {
                 sessionListener.onUnconfigured(CameraDeviceImpl.this);
             }
-            mDeviceListener.onUnconfigured(CameraDeviceImpl.this);
         }
     };
 
     private final Runnable mCallOnActive = new Runnable() {
         @Override
         public void run() {
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 if (mRemoteDevice == null) return; // Camera already closed
 
@@ -144,14 +145,13 @@
             if (sessionListener != null) {
                 sessionListener.onActive(CameraDeviceImpl.this);
             }
-            mDeviceListener.onActive(CameraDeviceImpl.this);
         }
     };
 
     private final Runnable mCallOnBusy = new Runnable() {
         @Override
         public void run() {
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 if (mRemoteDevice == null) return; // Camera already closed
 
@@ -160,7 +160,6 @@
             if (sessionListener != null) {
                 sessionListener.onBusy(CameraDeviceImpl.this);
             }
-            mDeviceListener.onBusy(CameraDeviceImpl.this);
         }
     };
 
@@ -172,7 +171,7 @@
             if (mClosedOnce) {
                 throw new AssertionError("Don't post #onClosed more than once");
             }
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 sessionListener = mSessionStateListener;
             }
@@ -187,7 +186,7 @@
     private final Runnable mCallOnIdle = new Runnable() {
         @Override
         public void run() {
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 if (mRemoteDevice == null) return; // Camera already closed
 
@@ -196,14 +195,13 @@
             if (sessionListener != null) {
                 sessionListener.onIdle(CameraDeviceImpl.this);
             }
-            mDeviceListener.onIdle(CameraDeviceImpl.this);
         }
     };
 
     private final Runnable mCallOnDisconnected = new Runnable() {
         @Override
         public void run() {
-            StateListener sessionListener = null;
+            StateListenerKK sessionListener = null;
             synchronized(mInterfaceLock) {
                 if (mRemoteDevice == null) return; // Camera already closed
 
@@ -313,7 +311,6 @@
         return mCameraId;
     }
 
-    @Override
     public void configureOutputs(List<Surface> outputs) throws CameraAccessException {
         // Treat a null input the same an empty list
         if (outputs == null) {
@@ -390,7 +387,11 @@
 
             checkIfCameraClosedOrInError();
 
-            // TODO: we must be in UNCONFIGURED mode to begin with, or using another session
+            // Notify current session that it's going away, before starting camera operations
+            // After this call completes, the session is not allowed to call into CameraDeviceImpl
+            if (mCurrentSession != null) {
+                mCurrentSession.replaceSessionClose();
+            }
 
             // TODO: dont block for this
             boolean configureSuccess = true;
@@ -410,10 +411,6 @@
                     new CameraCaptureSessionImpl(outputs, listener, handler, this, mDeviceHandler,
                             configureSuccess);
 
-            if (mCurrentSession != null) {
-                mCurrentSession.replaceSessionClose(newSession);
-            }
-
             // TODO: wait until current session closes, then create the new session
             mCurrentSession = newSession;
 
@@ -425,6 +422,15 @@
         }
     }
 
+    /**
+     * For use by backwards-compatibility code only.
+     */
+    public void setSessionListener(StateListenerKK sessionListener) {
+        synchronized(mInterfaceLock) {
+            mSessionStateListener = sessionListener;
+        }
+    }
+
     @Override
     public CaptureRequest.Builder createCaptureRequest(int templateType)
             throws CameraAccessException {
@@ -449,7 +455,6 @@
         }
     }
 
-    @Override
     public int capture(CaptureRequest request, CaptureListener listener, Handler handler)
             throws CameraAccessException {
         if (DEBUG) {
@@ -460,7 +465,6 @@
         return submitCaptureRequest(requestList, listener, handler, /*streaming*/false);
     }
 
-    @Override
     public int captureBurst(List<CaptureRequest> requests, CaptureListener listener,
             Handler handler) throws CameraAccessException {
         if (requests == null || requests.isEmpty()) {
@@ -543,9 +547,7 @@
 
         // Need a valid handler, or current thread needs to have a looper, if
         // listener is valid
-        if (listener != null) {
-            handler = checkHandler(handler);
-        }
+        handler = checkHandler(handler, listener);
 
         // Make sure that there all requests have at least 1 surface; all surfaces are non-null
         for (CaptureRequest request : requestList) {
@@ -613,7 +615,6 @@
         }
     }
 
-    @Override
     public int setRepeatingRequest(CaptureRequest request, CaptureListener listener,
             Handler handler) throws CameraAccessException {
         List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
@@ -621,7 +622,6 @@
         return submitCaptureRequest(requestList, listener, handler, /*streaming*/true);
     }
 
-    @Override
     public int setRepeatingBurst(List<CaptureRequest> requests, CaptureListener listener,
             Handler handler) throws CameraAccessException {
         if (requests == null || requests.isEmpty()) {
@@ -630,7 +630,6 @@
         return submitCaptureRequest(requests, listener, handler, /*streaming*/true);
     }
 
-    @Override
     public void stopRepeating() throws CameraAccessException {
 
         synchronized(mInterfaceLock) {
@@ -681,7 +680,6 @@
         }
     }
 
-    @Override
     public void flush() throws CameraAccessException {
         synchronized(mInterfaceLock) {
             checkIfCameraClosedOrInError();
@@ -739,6 +737,133 @@
         }
     }
 
+    /**
+     * <p>A listener for tracking the progress of a {@link CaptureRequest}
+     * submitted to the camera device.</p>
+     *
+     */
+    public static abstract class CaptureListener {
+
+        /**
+         * This constant is used to indicate that no images were captured for
+         * the request.
+         *
+         * @hide
+         */
+        public static final int NO_FRAMES_CAPTURED = -1;
+
+        /**
+         * This method is called when the camera device has started capturing
+         * the output image for the request, at the beginning of image exposure.
+         *
+         * @see android.media.MediaActionSound
+         */
+        public void onCaptureStarted(CameraDevice camera,
+                CaptureRequest request, long timestamp) {
+            // default empty implementation
+        }
+
+        /**
+         * This method is called when some results from an image capture are
+         * available.
+         *
+         * @hide
+         */
+        public void onCapturePartial(CameraDevice camera,
+                CaptureRequest request, CaptureResult result) {
+            // default empty implementation
+        }
+
+        /**
+         * This method is called when an image capture makes partial forward progress; some
+         * (but not all) results from an image capture are available.
+         *
+         */
+        public void onCaptureProgressed(CameraDevice camera,
+                CaptureRequest request, CaptureResult partialResult) {
+            // default empty implementation
+        }
+
+        /**
+         * This method is called when an image capture has fully completed and all the
+         * result metadata is available.
+         */
+        public void onCaptureCompleted(CameraDevice camera,
+                CaptureRequest request, TotalCaptureResult result) {
+            // default empty implementation
+        }
+
+        /**
+         * This method is called instead of {@link #onCaptureCompleted} when the
+         * camera device failed to produce a {@link CaptureResult} for the
+         * request.
+         */
+        public void onCaptureFailed(CameraDevice camera,
+                CaptureRequest request, CaptureFailure failure) {
+            // default empty implementation
+        }
+
+        /**
+         * This method is called independently of the others in CaptureListener,
+         * when a capture sequence finishes and all {@link CaptureResult}
+         * or {@link CaptureFailure} for it have been returned via this listener.
+         */
+        public void onCaptureSequenceCompleted(CameraDevice camera,
+                int sequenceId, long frameNumber) {
+            // default empty implementation
+        }
+
+        /**
+         * This method is called independently of the others in CaptureListener,
+         * when a capture sequence aborts before any {@link CaptureResult}
+         * or {@link CaptureFailure} for it have been returned via this listener.
+         */
+        public void onCaptureSequenceAborted(CameraDevice camera,
+                int sequenceId) {
+            // default empty implementation
+        }
+    }
+
+    /**
+     * A listener for notifications about the state of a camera device, adding in the callbacks that
+     * were part of the earlier KK API design, but now only used internally.
+     */
+    public static abstract class StateListenerKK extends StateListener {
+        /**
+         * The method called when a camera device has no outputs configured.
+         *
+         */
+        public void onUnconfigured(CameraDevice camera) {
+            // Default empty implementation
+        }
+
+        /**
+         * The method called when a camera device begins processing
+         * {@link CaptureRequest capture requests}.
+         *
+         */
+        public void onActive(CameraDevice camera) {
+            // Default empty implementation
+        }
+
+        /**
+         * The method called when a camera device is busy.
+         *
+         */
+        public void onBusy(CameraDevice camera) {
+            // Default empty implementation
+        }
+
+        /**
+         * The method called when a camera device has finished processing all
+         * submitted capture requests and has reached an idle state.
+         *
+         */
+        public void onIdle(CameraDevice camera) {
+            // Default empty implementation
+        }
+    }
+
     static class CaptureListenerHolder {
 
         private final boolean mRepeating;
@@ -1155,6 +1280,18 @@
         return handler;
     }
 
+    /**
+     * Default handler management, conditional on there being a listener.
+     *
+     * <p>If the listener isn't null, check the handler, otherwise pass it through.</p>
+     */
+    static <T> Handler checkHandler(Handler handler, T listener) {
+        if (listener != null) {
+            return checkHandler(handler);
+        }
+        return handler;
+    }
+
     private void checkIfCameraClosedOrInError() throws CameraAccessException {
         if (mInError) {
             throw new CameraAccessException(CameraAccessException.CAMERA_ERROR,
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index dc71a06..febb015 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -67,6 +67,7 @@
 import java.nio.ByteOrder;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 
 /**
  * Implementation of camera metadata marshal/unmarshal across Binder to
@@ -227,6 +228,7 @@
 
     private static final String CELLID_PROCESS = "CELLID";
     private static final String GPS_PROCESS = "GPS";
+    private static final int FACE_LANDMARK_SIZE = 6;
 
     private static String translateLocationProviderToProcess(final String provider) {
         if (provider == null) {
@@ -347,7 +349,7 @@
         // Check if key has been overridden to use a wrapper class on the java side.
         GetCommand g = sGetCommandMap.get(key);
         if (g != null) {
-            return (T) g.getValue(this, key);
+            return g.getValue(this, key);
         }
         return getBase(key);
     }
@@ -587,9 +589,71 @@
         return availableFormats;
     }
 
-    private Face[] getFaces() {
-        final int FACE_LANDMARK_SIZE = 6;
+    private boolean setFaces(Face[] faces) {
+        if (faces == null) {
+            return false;
+        }
 
+        int numFaces = faces.length;
+
+        // Detect if all faces are SIMPLE or not; count # of valid faces
+        boolean fullMode = true;
+        for (Face face : faces) {
+            if (face == null) {
+                numFaces--;
+                Log.w(TAG, "setFaces - null face detected, skipping");
+                continue;
+            }
+
+            if (face.getId() == Face.ID_UNSUPPORTED) {
+                fullMode = false;
+            }
+        }
+
+        Rect[] faceRectangles = new Rect[numFaces];
+        byte[] faceScores = new byte[numFaces];
+        int[] faceIds = null;
+        int[] faceLandmarks = null;
+
+        if (fullMode) {
+            faceIds = new int[numFaces];
+            faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE];
+        }
+
+        int i = 0;
+        for (Face face : faces) {
+            if (face == null) {
+                continue;
+            }
+
+            faceRectangles[i] = face.getBounds();
+            faceScores[i] = (byte)face.getScore();
+
+            if (fullMode) {
+                faceIds[i] = face.getId();
+
+                int j = 0;
+
+                faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x;
+                faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y;
+                faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x;
+                faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y;
+                faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x;
+                faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y;
+            }
+
+            i++;
+        }
+
+        set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles);
+        set(CaptureResult.STATISTICS_FACE_IDS, faceIds);
+        set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks);
+        set(CaptureResult.STATISTICS_FACE_SCORES, faceScores);
+
+        return true;
+    }
+
+    private Face[] getFaces() {
         Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
         if (faceDetectMode == null) {
             Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE");
@@ -653,9 +717,12 @@
                 if (faceScores[i] <= Face.SCORE_MAX &&
                         faceScores[i] >= Face.SCORE_MIN &&
                         faceIds[i] >= 0) {
-                    Point leftEye = new Point(faceLandmarks[i*6], faceLandmarks[i*6+1]);
-                    Point rightEye = new Point(faceLandmarks[i*6+2], faceLandmarks[i*6+3]);
-                    Point mouth = new Point(faceLandmarks[i*6+4], faceLandmarks[i*6+5]);
+                    Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE],
+                            faceLandmarks[i*FACE_LANDMARK_SIZE+1]);
+                    Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2],
+                            faceLandmarks[i*FACE_LANDMARK_SIZE+3]);
+                    Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4],
+                            faceLandmarks[i*FACE_LANDMARK_SIZE+5]);
                     Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i],
                             leftEye, rightEye, mouth);
                     faceList.add(face);
@@ -865,6 +932,13 @@
                 metadata.setFaceRectangles((Rect[]) value);
             }
         });
+        sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(),
+                new SetCommand() {
+            @Override
+            public <T> void setValue(CameraMetadataNative metadata, T value) {
+                metadata.setFaces((Face[])value);
+            }
+        });
         sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() {
             @Override
             public <T> void setValue(CameraMetadataNative metadata, T value) {
diff --git a/core/java/android/hardware/camera2/impl/ListenerProxies.java b/core/java/android/hardware/camera2/impl/ListenerProxies.java
index ab9a4d5..f44f9ad 100644
--- a/core/java/android/hardware/camera2/impl/ListenerProxies.java
+++ b/core/java/android/hardware/camera2/impl/ListenerProxies.java
@@ -36,13 +36,13 @@
 
     // TODO: replace with codegen
 
-    public static class DeviceStateListenerProxy extends CameraDevice.StateListener {
-        private final MethodNameInvoker<CameraDevice.StateListener> mProxy;
+    public static class DeviceStateListenerProxy extends CameraDeviceImpl.StateListenerKK {
+        private final MethodNameInvoker<CameraDeviceImpl.StateListenerKK> mProxy;
 
         public DeviceStateListenerProxy(
-                Dispatchable<CameraDevice.StateListener> dispatchTarget) {
+                Dispatchable<CameraDeviceImpl.StateListenerKK> dispatchTarget) {
             dispatchTarget = checkNotNull(dispatchTarget, "dispatchTarget must not be null");
-            mProxy = new MethodNameInvoker<>(dispatchTarget, CameraDevice.StateListener.class);
+            mProxy = new MethodNameInvoker<>(dispatchTarget, CameraDeviceImpl.StateListenerKK.class);
         }
 
         @Override
@@ -87,13 +87,13 @@
     }
 
     @SuppressWarnings("deprecation")
-    public static class DeviceCaptureListenerProxy extends CameraDevice.CaptureListener {
-        private final MethodNameInvoker<CameraDevice.CaptureListener> mProxy;
+    public static class DeviceCaptureListenerProxy extends CameraDeviceImpl.CaptureListener {
+        private final MethodNameInvoker<CameraDeviceImpl.CaptureListener> mProxy;
 
         public DeviceCaptureListenerProxy(
-                Dispatchable<CameraDevice.CaptureListener> dispatchTarget) {
+                Dispatchable<CameraDeviceImpl.CaptureListener> dispatchTarget) {
             dispatchTarget = checkNotNull(dispatchTarget, "dispatchTarget must not be null");
-            mProxy = new MethodNameInvoker<>(dispatchTarget, CameraDevice.CaptureListener.class);
+            mProxy = new MethodNameInvoker<>(dispatchTarget, CameraDeviceImpl.CaptureListener.class);
         }
 
         @Override
diff --git a/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java b/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java
new file mode 100644
index 0000000..1470b70
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.Camera.FaceDetectionListener;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.legacy.ParameterUtils.ZoomData;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.utils.ListUtils;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.util.Log;
+import android.util.Size;
+
+import com.android.internal.util.ArrayUtils;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static android.hardware.camera2.CaptureRequest.*;
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Map legacy face detect callbacks into face detection results.
+ */
+@SuppressWarnings("deprecation")
+public class LegacyFaceDetectMapper {
+    private static String TAG = "LegacyFaceDetectMapper";
+    private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
+
+    private final Camera mCamera;
+    private final boolean mFaceDetectSupported;
+    private boolean mFaceDetectEnabled = false;
+
+    private final Object mLock = new Object();
+    private Camera.Face[] mFaces;
+    private Camera.Face[] mFacesPrev;
+    /**
+     * Instantiate a new face detect mapper.
+     *
+     * @param camera a non-{@code null} camera1 device
+     * @param characteristics a  non-{@code null} camera characteristics for that camera1
+     *
+     * @throws NullPointerException if any of the args were {@code null}
+     */
+    public LegacyFaceDetectMapper(Camera camera, CameraCharacteristics characteristics) {
+        mCamera = checkNotNull(camera, "camera must not be null");
+        checkNotNull(characteristics, "characteristics must not be null");
+
+        mFaceDetectSupported = ArrayUtils.contains(
+                characteristics.get(
+                        CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES),
+                STATISTICS_FACE_DETECT_MODE_SIMPLE);
+
+        if (!mFaceDetectSupported) {
+            return;
+        }
+
+       mCamera.setFaceDetectionListener(new FaceDetectionListener() {
+
+        @Override
+        public void onFaceDetection(Camera.Face[] faces, Camera camera) {
+            int lengthFaces = faces == null ? 0 : faces.length;
+            synchronized (mLock) {
+                if (mFaceDetectEnabled) {
+                    mFaces = faces;
+                } else if (lengthFaces > 0) {
+                    // stopFaceDetectMode could race against the requests, print a debug log
+                    Log.d(TAG,
+                            "onFaceDetection - Ignored some incoming faces since" +
+                            "face detection was disabled");
+                }
+            }
+
+            if (VERBOSE) {
+                Log.v(TAG, "onFaceDetection - read " + lengthFaces + " faces");
+            }
+        }
+       });
+    }
+
+    /**
+     * Process the face detect mode from the capture request into an api1 face detect toggle.
+     *
+     * <p>This method should be called after the parameters are {@link LegacyRequestMapper mapped}
+     * with the request.</p>
+     *
+     * <p>Callbacks are processed in the background, and the next call to {@link #mapResultTriggers}
+     * will have the latest faces detected as reflected by the camera1 callbacks.</p>
+     *
+     * <p>None of the arguments will be mutated.</p>
+     *
+     * @param captureRequest a non-{@code null} request
+     * @param parameters a non-{@code null} parameters corresponding to this request (read-only)
+     */
+    public void processFaceDetectMode(CaptureRequest captureRequest,
+            Camera.Parameters parameters) {
+        checkNotNull(captureRequest, "captureRequest must not be null");
+
+        /*
+         * statistics.faceDetectMode
+         */
+        int fdMode = ParamsUtils.getOrDefault(captureRequest, STATISTICS_FACE_DETECT_MODE,
+                STATISTICS_FACE_DETECT_MODE_OFF);
+
+        if (fdMode != STATISTICS_FACE_DETECT_MODE_OFF && !mFaceDetectSupported) {
+            Log.w(TAG,
+                    "processFaceDetectMode - Ignoring statistics.faceDetectMode; " +
+                    "face detection is not available");
+            return;
+        }
+
+        // Print some warnings out in case the values were wrong
+        switch (fdMode) {
+            case STATISTICS_FACE_DETECT_MODE_OFF:
+            case STATISTICS_FACE_DETECT_MODE_SIMPLE:
+                break;
+            case STATISTICS_FACE_DETECT_MODE_FULL:
+                Log.w(TAG,
+                        "processFaceDetectMode - statistics.faceDetectMode == FULL unsupported, " +
+                        "downgrading to SIMPLE");
+                break;
+            default:
+                Log.w(TAG, "processFaceDetectMode - ignoring unknown statistics.faceDetectMode = "
+                        + fdMode);
+                return;
+        }
+
+        boolean enableFaceDetect = fdMode != STATISTICS_FACE_DETECT_MODE_OFF;
+        synchronized (mLock) {
+            // Enable/disable face detection if it's changed since last time
+            if (enableFaceDetect != mFaceDetectEnabled) {
+                if (enableFaceDetect) {
+                    mCamera.startFaceDetection();
+
+                    if (VERBOSE) {
+                        Log.v(TAG, "processFaceDetectMode - start face detection");
+                    }
+                } else {
+                    mCamera.stopFaceDetection();
+
+                    if (VERBOSE) {
+                        Log.v(TAG, "processFaceDetectMode - stop face detection");
+                    }
+
+                    mFaces = null;
+                }
+
+                mFaceDetectEnabled = enableFaceDetect;
+            }
+        }
+    }
+
+    /**
+     * Update the {@code result} camera metadata map with the new value for the
+     * {@code statistics.faces} and {@code statistics.faceDetectMode}.
+     *
+     * <p>Face detect callbacks are processed in the background, and each call to
+     * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
+     *
+     * @param result a non-{@code null} result
+     * @param legacyRequest a non-{@code null} request (read-only)
+     */
+    public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
+        checkNotNull(result, "result must not be null");
+        checkNotNull(legacyRequest, "legacyRequest must not be null");
+
+        Camera.Face[] faces, previousFaces;
+        int fdMode;
+        synchronized (mLock) {
+            fdMode = mFaceDetectEnabled ?
+                            STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
+
+            if (mFaceDetectEnabled) {
+                faces = mFaces;
+            } else {
+                faces = null;
+            }
+
+            previousFaces = mFacesPrev;
+            mFacesPrev = faces;
+        }
+
+        CameraCharacteristics characteristics = legacyRequest.characteristics;
+        CaptureRequest request = legacyRequest.captureRequest;
+        Size previewSize = legacyRequest.previewSize;
+        Camera.Parameters params = legacyRequest.parameters;
+
+        Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+        ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray,
+                request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
+
+        List<Face> convertedFaces = new ArrayList<>();
+        if (faces != null) {
+            for (Camera.Face face : faces) {
+                if (face != null) {
+                    convertedFaces.add(
+                            ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
+                } else {
+                    Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
+                }
+            }
+        }
+
+        if (VERBOSE && previousFaces != faces) { // Log only in verbose and IF the faces changed
+            Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
+        }
+
+        result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
+        result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
+    }
+}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java b/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
index e576b43..d0a3a3f 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
@@ -247,7 +247,8 @@
                 // No action necessary. The callbacks will handle transitions.
                 break;
             default:
-                Log.w(TAG, "mapTriggers - ignoring unknown control.afTrigger = " + afTrigger);
+                Log.w(TAG, "processRequestTriggers - ignoring unknown control.afTrigger = "
+                        + afTrigger);
         }
     }
 
diff --git a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
index 711edf4..b05508b 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
@@ -204,6 +204,11 @@
         mapSensor(m, p);
 
         /*
+         * statistics.*
+         */
+        mapStatistics(m, p);
+
+        /*
          * sync.*
          */
         mapSync(m, p);
@@ -487,6 +492,18 @@
 
     private static void mapControlOther(CameraMetadataNative m, Camera.Parameters p) {
         /*
+         * android.control.availableVideoStabilizationModes
+         */
+        {
+            int stabModes[] = p.isVideoStabilizationSupported() ?
+                    new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+                                CONTROL_VIDEO_STABILIZATION_MODE_ON } :
+                    new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF };
+
+            m.set(CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, stabModes);
+        }
+
+        /*
          * android.control.maxRegions
          */
         final int AE = 0, AWB = 1, AF = 2;
@@ -742,6 +759,31 @@
         m.set(SENSOR_INFO_PIXEL_ARRAY_SIZE, largestJpegSize);
     }
 
+    private static void mapStatistics(CameraMetadataNative m, Parameters p) {
+        /*
+         * statistics.info.availableFaceDetectModes
+         */
+        int[] fdModes;
+
+        if (p.getMaxNumDetectedFaces() > 0) {
+            fdModes = new int[] {
+                STATISTICS_FACE_DETECT_MODE_OFF,
+                STATISTICS_FACE_DETECT_MODE_SIMPLE
+                // FULL is never-listed, since we have no way to query it statically
+            };
+        } else {
+            fdModes = new int[] {
+                STATISTICS_FACE_DETECT_MODE_OFF
+            };
+        }
+        m.set(STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, fdModes);
+
+        /*
+         * statistics.info.maxFaceCount
+         */
+        m.set(STATISTICS_INFO_MAX_FACE_COUNT, p.getMaxNumDetectedFaces());
+    }
+
     private static void mapSync(CameraMetadataNative m, Parameters p) {
         /*
          * sync.maxLatency
diff --git a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
index a6fe035..20f3fd2 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
@@ -150,10 +150,8 @@
             if (supported) {
                 params.setPreviewFpsRange(legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
                         legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
-                params.setRecordingHint(false);
             } else {
                 Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
-                params.setRecordingHint(true);
             }
         }
 
@@ -248,6 +246,18 @@
          // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
         }
 
+        // control.videoStabilizationMode
+        {
+            Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE,
+                    /*defaultValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+                    params.isVideoStabilizationSupported(),
+                    /*allowedValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+
+            if (stabMode != null) {
+                params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
+            }
+        }
+
         // lens.focusDistance
         {
             boolean infinityFocusSupported =
diff --git a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
index 9eff943..a2487f4 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
@@ -35,6 +35,9 @@
 import java.util.List;
 
 import static com.android.internal.util.Preconditions.*;
+import static android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+import static android.hardware.camera2.CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_ON;
+import static android.hardware.camera2.CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE;
 import static android.hardware.camera2.CaptureResult.*;
 
 /**
@@ -142,7 +145,6 @@
          */
         mapAwb(result, /*out*/params);
 
-
         /*
          * control.mode
          */
@@ -171,7 +173,6 @@
             }
         }
 
-
         /*
          * control.effectMode
          */
@@ -187,6 +188,15 @@
             }
         }
 
+        // control.videoStabilizationMode
+        {
+            int stabMode =
+                    (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ?
+                        CONTROL_VIDEO_STABILIZATION_MODE_ON :
+                        CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+            result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
+        }
+
         /*
          * flash
          */
diff --git a/core/java/android/hardware/camera2/legacy/ParameterUtils.java b/core/java/android/hardware/camera2/legacy/ParameterUtils.java
index efd12f2..385f844 100644
--- a/core/java/android/hardware/camera2/legacy/ParameterUtils.java
+++ b/core/java/android/hardware/camera2/legacy/ParameterUtils.java
@@ -43,6 +43,7 @@
 /**
  * Various utilities for dealing with camera API1 parameters.
  */
+@SuppressWarnings("deprecation")
 public class ParameterUtils {
     /** Upper/left minimal point of a normalized rectangle */
     public static final int NORMALIZED_RECTANGLE_MIN = -1000;
@@ -164,19 +165,23 @@
          * <p>If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX},
          * the score is clipped first and a warning is printed to logcat.</p>
          *
+         * <p>If the id is negative, the id is changed to 0 and a warning is printed to
+         * logcat.</p>
+         *
          * <p>All other parameters are passed-through as-is.</p>
          *
          * @return a new face with the optional features set
          */
         public Face toFace(
                 int id, Point leftEyePosition, Point rightEyePosition, Point mouthPosition) {
+            int idSafe = clipLower(id, /*lo*/0, rect, "id");
             int score = clip(weight,
                     Face.SCORE_MIN,
                     Face.SCORE_MAX,
                     rect,
                     "score");
 
-            return new Face(rect, score, id, leftEyePosition, rightEyePosition, mouthPosition);
+            return new Face(rect, score, idSafe, leftEyePosition, rightEyePosition, mouthPosition);
         }
 
         /**
@@ -861,6 +866,61 @@
                 /*usePreviewCrop*/true);
     }
 
+    /**
+     * Convert an api1 face into an active-array based api2 face.
+     *
+     * <p>Out-of-ranges scores and ids will be clipped to be within range (with a warning).</p>
+     *
+     * @param face a non-{@code null} api1 face
+     * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
+     * @param zoomData the calculated zoom data corresponding to this request
+     *
+     * @return a non-{@code null} api2 face
+     *
+     * @throws NullPointerException if the {@code face} was {@code null}
+     */
+    public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray,
+            ZoomData zoomData) {
+        checkNotNull(face, "face must not be null");
+
+        Face api2Face;
+
+        Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/1);
+
+        WeightedRectangle faceRect =
+                convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea);
+
+        Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth;
+        if (leftEye != null && rightEye != null && mouth != null) {
+            leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
+                    leftEye, /*usePreviewCrop*/true);
+            rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
+                    leftEye, /*usePreviewCrop*/true);
+            mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
+                    leftEye, /*usePreviewCrop*/true);
+
+            api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth);
+        } else {
+            api2Face = faceRect.toFace();
+        }
+
+        return api2Face;
+    }
+
+    private static Point convertCameraPointToActiveArrayPoint(
+            Rect activeArray, ZoomData zoomData, Point point, boolean usePreviewCrop) {
+        Rect pointedRect = new Rect(point.x, point.y, point.x, point.y);
+        Camera.Area pointedArea = new Area(pointedRect, /*weight*/1);
+
+        WeightedRectangle adjustedRect =
+                convertCameraAreaToActiveArrayRectangle(activeArray,
+                        zoomData, pointedArea, usePreviewCrop);
+
+        Point transformedPoint = new Point(adjustedRect.rect.left, adjustedRect.rect.top);
+
+        return transformedPoint;
+    }
+
     private static WeightedRectangle convertCameraAreaToActiveArrayRectangle(
             Rect activeArray, ZoomData zoomData, Camera.Area area, boolean usePreviewCrop) {
         Rect previewCrop = zoomData.previewCrop;
diff --git a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
index c556c32..2533a28 100644
--- a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
+++ b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
@@ -39,7 +39,6 @@
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.ArrayBlockingQueue;
 import java.util.concurrent.TimeUnit;
 
 import static com.android.internal.util.Preconditions.*;
@@ -55,18 +54,23 @@
  * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
  * </p>
  */
+@SuppressWarnings("deprecation")
 public class RequestThreadManager {
     private final String TAG;
     private final int mCameraId;
     private final RequestHandlerThread mRequestThread;
 
     private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
+    // For slightly more spammy messages that will get repeated every frame
+    private static final boolean VERBOSE =
+            Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.VERBOSE);
     private final Camera mCamera;
     private final CameraCharacteristics mCharacteristics;
 
     private final CameraDeviceState mDeviceState;
     private final CaptureCollector mCaptureCollector;
     private final LegacyFocusStateMapper mFocusStateMapper;
+    private final LegacyFaceDetectMapper mFaceDetectMapper;
 
     private static final int MSG_CONFIGURE_OUTPUTS = 1;
     private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
@@ -219,6 +223,9 @@
             };
 
     private void stopPreview() {
+        if (VERBOSE) {
+            Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning);
+        }
         if (mPreviewRunning) {
             mCamera.stopPreview();
             mPreviewRunning = false;
@@ -226,14 +233,18 @@
     }
 
     private void startPreview() {
+        if (VERBOSE) {
+            Log.v(TAG, "startPreview - preview running? " + mPreviewRunning);
+        }
         if (!mPreviewRunning) {
+            // XX: CameraClient:;startPreview is not getting called after a stop
             mCamera.startPreview();
             mPreviewRunning = true;
         }
     }
 
-    private void doJpegCapture(RequestHolder request) throws IOException {
-        if (DEBUG) Log.d(TAG, "doJpegCapture");
+    private void doJpegCapturePrepare(RequestHolder request) throws IOException {
+        if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning);
 
         if (!mPreviewRunning) {
             if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
@@ -242,11 +253,20 @@
             mCamera.setPreviewTexture(mDummyTexture);
             startPreview();
         }
+    }
+
+    private void doJpegCapture(RequestHolder request) {
+        if (DEBUG) Log.d(TAG, "doJpegCapturePrepare");
+
         mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
         mPreviewRunning = false;
     }
 
     private void doPreviewCapture(RequestHolder request) throws IOException {
+        if (VERBOSE) {
+            Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning);
+        }
+
         if (mPreviewRunning) {
             return; // Already running
         }
@@ -264,7 +284,20 @@
     }
 
     private void configureOutputs(Collection<Surface> outputs) throws IOException {
+        if (DEBUG) {
+            String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
+
+            Log.d(TAG, "configureOutputs with " + outputsStr);
+        }
+
         stopPreview();
+        /*
+         * Try to release the previous preview's surface texture earlier if we end up
+         * using a different one; this also reduces the likelihood of getting into a deadlock
+         * when disconnecting from the old previous texture at a later time.
+         */
+        mCamera.setPreviewTexture(/*surfaceTexture*/null);
+
         if (mGLThreadManager != null) {
             mGLThreadManager.waitUntilStarted();
             mGLThreadManager.ignoreNewFrames();
@@ -305,7 +338,6 @@
         }
         mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
                 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
-        mParams.setRecordingHint(true);
 
         if (mPreviewOutputs.size() > 0) {
             List<Size> outputSizes = new ArrayList<>(outputs.size());
@@ -613,10 +645,6 @@
                             }
                         }
 
-                        // Unconditionally process AF triggers, since they're non-idempotent
-                        // - must be done after setting the most-up-to-date AF mode
-                        mFocusStateMapper.processRequestTriggers(request, mParams);
-
                         try {
                             boolean success = mCaptureCollector.queueRequest(holder,
                                     mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS);
@@ -624,6 +652,8 @@
                             if (!success) {
                                 Log.e(TAG, "Timed out while queueing capture request.");
                             }
+                            // Starting the preview needs to happen before enabling
+                            // face detection or auto focus
                             if (holder.hasPreviewTargets()) {
                                 doPreviewCapture(holder);
                             }
@@ -635,12 +665,33 @@
                                     Log.e(TAG, "Timed out waiting for prior requests to complete.");
                                 }
                                 mReceivedJpeg.close();
+                                doJpegCapturePrepare(holder);
+                                if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
+                                    // TODO: report error to CameraDevice
+                                    Log.e(TAG, "Hit timeout for jpeg callback!");
+                                }
+                            }
+
+                            /*
+                             * Do all the actions that require a preview to have been started
+                             */
+
+                            // Toggle face detection on/off
+                            // - do this before AF to give AF a chance to use faces
+                            mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams);
+
+                            // Unconditionally process AF triggers, since they're non-idempotent
+                            // - must be done after setting the most-up-to-date AF mode
+                            mFocusStateMapper.processRequestTriggers(request, mParams);
+
+                            if (holder.hasJpegTargets()) {
                                 doJpegCapture(holder);
                                 if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
                                     // TODO: report error to CameraDevice
                                     Log.e(TAG, "Hit timeout for jpeg callback!");
                                 }
                             }
+
                         } catch (IOException e) {
                             // TODO: report error to CameraDevice
                             throw new IOError(e);
@@ -677,6 +728,8 @@
                                 mLastRequest, timestampMutable.value);
                         // Update AF state
                         mFocusStateMapper.mapResultTriggers(result);
+                        // Update detected faces list
+                        mFaceDetectMapper.mapResultFaces(result, mLastRequest);
 
                         mDeviceState.setCaptureResult(holder, result);
                     }
@@ -731,6 +784,7 @@
         TAG = name;
         mDeviceState = checkNotNull(deviceState, "deviceState must not be null");
         mFocusStateMapper = new LegacyFocusStateMapper(mCamera);
+        mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics);
         mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState);
         mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
     }
diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
index 1efabb1..2e6b9ae 100644
--- a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -813,6 +813,7 @@
         switch (format) {
             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
             case HAL_PIXEL_FORMAT_BLOB:
+            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
                 return format;
             case ImageFormat.JPEG:
                 throw new IllegalArgumentException(
@@ -843,12 +844,6 @@
      * @throws IllegalArgumentException if the format was not user-defined
      */
     static int checkArgumentFormat(int format) {
-        // TODO: remove this hack , CTS shouldn't have been using internal constants
-        if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
-            Log.w(TAG, "RAW_OPAQUE is not yet a published format; allowing it anyway");
-            return format;
-        }
-
         if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
             throw new IllegalArgumentException(String.format(
                     "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
diff --git a/core/java/android/hardware/display/DisplayManager.java b/core/java/android/hardware/display/DisplayManager.java
index d4e6df5..51b7229 100644
--- a/core/java/android/hardware/display/DisplayManager.java
+++ b/core/java/android/hardware/display/DisplayManager.java
@@ -96,11 +96,9 @@
      * windows on the display and the system may mirror the contents of other displays
      * onto it.
      * </p><p>
-     * Creating a public virtual display requires the
-     * {@link android.Manifest.permission#CAPTURE_VIDEO_OUTPUT}
-     * or {@link android.Manifest.permission#CAPTURE_SECURE_VIDEO_OUTPUT} permission.
-     * These permissions are reserved for use by system components and are not available to
-     * third-party applications.
+     * Creating a public virtual display that isn't restricted to own-content only implicitly
+     * creates an auto-mirroring display. See {@link #VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR} for
+     * restrictions on who is allowed to create an auto-mirroring display.
      * </p>
      *
      * <h3>Private virtual displays</h3>
@@ -108,6 +106,8 @@
      * When this flag is not set, the virtual display is private as defined by the
      * {@link Display#FLAG_PRIVATE} display flag.
      * </p>
+     *
+     * <p>
      * A private virtual display belongs to the application that created it.
      * Only the a owner of a private virtual display is allowed to place windows upon it.
      * The private virtual display also does not participate in display mirroring: it will
@@ -115,10 +115,11 @@
      * be mirrored elsewhere.  More precisely, the only processes that are allowed to
      * enumerate or interact with the private display are those that have the same UID as the
      * application that originally created the private virtual display.
-      * </p>
+     * </p>
      *
      * @see #createVirtualDisplay
      * @see #VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY
+     * @see #VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR
      */
     public static final int VIRTUAL_DISPLAY_FLAG_PUBLIC = 1 << 0;
 
@@ -187,29 +188,51 @@
      * will be blanked instead if it has no windows.
      * </p>
      *
+     * <p>
+     * This flag is mutually exclusive with {@link #VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR}.  If both
+     * flags are specified then the own-content only behavior will be applied.
+     * </p>
+     *
+     * <p>
+     * This behavior of this flag is implied whenever neither {@link #VIRTUAL_DISPLAY_FLAG_PUBLIC}
+     * nor {@link #VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR} have been set.  This flag is only required to
+     * override the default behavior when creating a public display.
+     * </p>
+     *
      * @see #createVirtualDisplay
      */
     public static final int VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY = 1 << 3;
 
 
     /**
-     * Virtual display flag: Indicates that the display is being created for
-     * the purpose of screen sharing.  This implies
-     * VIRTUAL_DISPLAY_FLAG_PRIVATE.  Other flags are not allowed (especially
-     * not VIRTUAL_DISPLAY_FLAG_PUBLIC or PRESENTATION).
+     * Virtual display flag: Allows content to be mirrored on private displays when no content is
+     * being shown.
      *
      * <p>
-     * Requires screen share permission for use.
+     * This flag is mutually exclusive with {@link #VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY}.
+     * If both flags are specified then the own-content only behavior will be applied.
      * </p>
      *
      * <p>
-     * While a display of this type exists, the system will show some sort of
-     * notification to the user indicating that the screen is being shared.
+     * The behavior of this flag is implied whenever {@link #VIRTUAL_DISPLAY_FLAG_PUBLIC} is set
+     * and {@link #VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY} has not been set.   This flag is only
+     * required to override the default behavior when creating a private display.
+     * </p>
+     *
+     * <p>
+     * Creating an auto-mirroing virtual display requires the
+     * {@link android.Manifest.permission#CAPTURE_VIDEO_OUTPUT}
+     * or {@link android.Manifest.permission#CAPTURE_SECURE_VIDEO_OUTPUT} permission.
+     * These permissions are reserved for use by system components and are not available to
+     * third-party applications.
+     *
+     * Alternatively, an appropriate {@link MediaProjection} may be used to create an
+     * auto-mirroring virtual display.
      * </p>
      *
      * @see #createVirtualDisplay
      */
-    public static final int VIRTUAL_DISPLAY_FLAG_SCREEN_SHARE = 1 << 4;
+    public static final int VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR = 1 << 4;
 
     /** @hide */
     public DisplayManager(Context context) {
@@ -489,7 +512,7 @@
      * @param flags A combination of virtual display flags:
      * {@link #VIRTUAL_DISPLAY_FLAG_PUBLIC}, {@link #VIRTUAL_DISPLAY_FLAG_PRESENTATION},
      * {@link #VIRTUAL_DISPLAY_FLAG_SECURE}, {@link #VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY},
-     * or {@link #VIRTUAL_DISPLAY_FLAG_SCREEN_SHARE}.
+     * or {@link #VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR}.
      * @param callbacks Callbacks to call when the state of the {@link VirtualDisplay} changes
      * @param handler The handler on which the listener should be invoked, or null
      * if the listener should be invoked on the calling thread's looper.
diff --git a/core/java/android/widget/TimePickerClockDelegate.java b/core/java/android/widget/TimePickerClockDelegate.java
index 59ec6d3..8102c4a 100644
--- a/core/java/android/widget/TimePickerClockDelegate.java
+++ b/core/java/android/widget/TimePickerClockDelegate.java
@@ -18,6 +18,7 @@
 
 import android.content.Context;
 import android.content.res.Configuration;
+import android.content.res.Resources;
 import android.content.res.TypedArray;
 import android.graphics.Color;
 import android.os.Parcel;
@@ -171,7 +172,10 @@
         mMinuteSpinnerInput.setImeOptions(EditorInfo.IME_ACTION_NEXT);
 
             /* Get the localized am/pm strings and use them in the spinner */
-        mAmPmStrings = new DateFormatSymbols().getAmPmStrings();
+        final Resources res = context.getResources();
+        final String amText = res.getString(R.string.time_picker_am_label);
+        final String pmText = res.getString(R.string.time_picker_pm_label);
+        mAmPmStrings = new String[] {amText, pmText};
 
         // am/pm
         View amPmView = mDelegator.findViewById(R.id.amPm);
diff --git a/core/java/android/widget/TimePickerSpinnerDelegate.java b/core/java/android/widget/TimePickerSpinnerDelegate.java
index 4e9a39f..523965c 100644
--- a/core/java/android/widget/TimePickerSpinnerDelegate.java
+++ b/core/java/android/widget/TimePickerSpinnerDelegate.java
@@ -134,6 +134,8 @@
         mSelectHours = res.getString(R.string.select_hours);
         mMinutePickerDescription = res.getString(R.string.minute_picker_description);
         mSelectMinutes = res.getString(R.string.select_minutes);
+        mAmText = res.getString(R.string.time_picker_am_label);
+        mPmText = res.getString(R.string.time_picker_pm_label);
 
         final int layoutResourceId = a.getResourceId(R.styleable.TimePicker_internalLayout,
                 R.layout.time_picker_holo);
@@ -182,10 +184,6 @@
                 R.id.radial_picker);
         mDoneButton = (Button) mainView.findViewById(R.id.done_button);
 
-        String[] amPmTexts = new DateFormatSymbols().getAmPmStrings();
-        mAmText = amPmTexts[0];
-        mPmText = amPmTexts[1];
-
         setupListeners();
 
         mAllowAutoAdvance = true;
diff --git a/core/res/res/values/strings.xml b/core/res/res/values/strings.xml
index 4761f1a..c34560d 100644
--- a/core/res/res/values/strings.xml
+++ b/core/res/res/values/strings.xml
@@ -4134,6 +4134,10 @@
     <string name="time_picker_increment_set_pm_button">Set PM</string>
     <!-- Description of the button to decrease the TimePicker's set AM value. [CHAR LIMIT=NONE] -->
     <string name="time_picker_decrement_set_am_button">Set AM</string>
+    <!-- Label for the TimePicker's PM button. [CHAR LIMIT=2] -->
+    <string name="time_picker_pm_label">PM</string>
+    <!-- Label for the TimePicker's AM button. [CHAR LIMIT=2] -->
+    <string name="time_picker_am_label">AM</string>
 
     <!-- DatePicker - accessibility support -->
     <!-- Description of the button to increase the DatePicker's month value. [CHAR LIMIT=NONE] -->
diff --git a/core/res/res/values/symbols.xml b/core/res/res/values/symbols.xml
index 84bc62c..6030715 100644
--- a/core/res/res/values/symbols.xml
+++ b/core/res/res/values/symbols.xml
@@ -1969,9 +1969,7 @@
   <java-symbol type="array" name="config_cdma_home_system" />
   <java-symbol type="attr" name="headerSelectedTextColor" />
   <java-symbol type="attr" name="amPmSelectedBackgroundColor" />
-
-  <!--From SmsMessage-->
-  <!--Support decoding the user data payload as pack GSM 8-bit (a GSM alphabet
-     string that's stored in 8-bit unpacked format) characters.-->
   <java-symbol type="bool" name="config_sms_decode_gsm_8bit_data" />
+  <java-symbol type="string" name="time_picker_am_label" />
+  <java-symbol type="string" name="time_picker_pm_label" />
 </resources>
diff --git a/graphics/java/android/graphics/drawable/Ripple.java b/graphics/java/android/graphics/drawable/Ripple.java
index 6f21f2e..be2241b 100644
--- a/graphics/java/android/graphics/drawable/Ripple.java
+++ b/graphics/java/android/graphics/drawable/Ripple.java
@@ -276,7 +276,7 @@
     public void getBounds(Rect bounds) {
         final int outerX = (int) mOuterX;
         final int outerY = (int) mOuterY;
-        final int r = (int) mOuterRadius;
+        final int r = (int) mOuterRadius + 1;
         bounds.set(outerX - r, outerY - r, outerX + r, outerY + r);
     }
 
diff --git a/graphics/java/android/graphics/drawable/RippleBackground.java b/graphics/java/android/graphics/drawable/RippleBackground.java
index d404ccd..93df648 100644
--- a/graphics/java/android/graphics/drawable/RippleBackground.java
+++ b/graphics/java/android/graphics/drawable/RippleBackground.java
@@ -264,7 +264,7 @@
     public void getBounds(Rect bounds) {
         final int outerX = (int) mOuterX;
         final int outerY = (int) mOuterY;
-        final int r = (int) mOuterRadius;
+        final int r = (int) mOuterRadius + 1;
         bounds.set(outerX - r, outerY - r, outerX + r, outerY + r);
     }
 
diff --git a/media/java/android/media/projection/IMediaProjection.aidl b/media/java/android/media/projection/IMediaProjection.aidl
index dd84ad32..3d25aa6 100644
--- a/media/java/android/media/projection/IMediaProjection.aidl
+++ b/media/java/android/media/projection/IMediaProjection.aidl
@@ -25,7 +25,7 @@
     boolean canProjectAudio();
     boolean canProjectVideo();
     boolean canProjectSecureVideo();
-    int getVirtualDisplayFlags();
+    int applyVirtualDisplayFlags(int flags);
     void addCallback(IMediaProjectionCallback callback);
     void removeCallback(IMediaProjectionCallback callback);
 }
diff --git a/media/java/android/media/projection/MediaProjection.java b/media/java/android/media/projection/MediaProjection.java
index 7c03171..861039d 100644
--- a/media/java/android/media/projection/MediaProjection.java
+++ b/media/java/android/media/projection/MediaProjection.java
@@ -93,6 +93,19 @@
     }
 
     /**
+     * @hide
+     */
+    public VirtualDisplay createVirtualDisplay(@NonNull String name,
+            int width, int height, int dpi, boolean isSecure, @Nullable Surface surface,
+            @Nullable VirtualDisplay.Callbacks callbacks, @Nullable Handler handler) {
+        DisplayManager dm = (DisplayManager) mContext.getSystemService(Context.DISPLAY_SERVICE);
+        int flags = isSecure ? DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE : 0;
+        return dm.createVirtualDisplay(this, name, width, height, dpi, surface,
+                    flags | DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR |
+                    DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION, callbacks, handler);
+    }
+
+    /**
      * Creates a {@link android.hardware.display.VirtualDisplay} to capture the
      * contents of the screen.
      *
@@ -105,9 +118,8 @@
      * than 0.
      * @param surface The surface to which the content of the virtual display
      * should be rendered, or null if there is none initially.
-     * @param isSecure Whether the display should be considered a secure
-     * display. This typically requires special permissions not available to
-     * third party applications.
+     * @param flags A combination of virtual display flags. See {@link DisplayManager} for the full
+     * list of flags.
      * @param callbacks Callbacks to call when the virtual display's state
      * changes, or null if none.
      * @param handler The {@link android.os.Handler} on which the callback should be
@@ -118,10 +130,9 @@
      * String, int, int, int, int, Surface, VirtualDisplay.Callbacks, Handler)
      */
     public VirtualDisplay createVirtualDisplay(@NonNull String name,
-            int width, int height, int dpi, boolean isSecure, @Nullable Surface surface,
+            int width, int height, int dpi, int flags, @Nullable Surface surface,
             @Nullable VirtualDisplay.Callbacks callbacks, @Nullable Handler handler) {
         DisplayManager dm = (DisplayManager) mContext.getSystemService(Context.DISPLAY_SERVICE);
-        int flags = isSecure ? DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE : 0;
         return dm.createVirtualDisplay(
                     this, name, width, height, dpi, surface, flags, callbacks, handler);
     }
diff --git a/packages/SystemUI/res/values/strings.xml b/packages/SystemUI/res/values/strings.xml
index 97efb47..51633dc 100644
--- a/packages/SystemUI/res/values/strings.xml
+++ b/packages/SystemUI/res/values/strings.xml
@@ -768,13 +768,13 @@
     <string name="disconnect_vpn">Disconnect VPN</string>
 
     <!-- Monitoring dialog device owner body text [CHAR LIMIT=300] -->
-    <string name="monitoring_description_device_owned">This device is managed by:\n<xliff:g id="organization">%1$s</xliff:g>\n\nYour administrator can monitor your network activity, including emails, apps and secure websites.\n\nFor more information, contact your administrator.</string>
+    <string name="monitoring_description_device_owned">This device is managed by:\n<xliff:g id="organization">%1$s</xliff:g>\n\nYour administrator can monitor your device and network activity, including emails, apps and secure websites.\n\nFor more information, contact your administrator.</string>
 
     <!-- Monitoring dialog non-legacy VPN text [CHAR LIMIT=300] -->
-    <string name="monitoring_description_vpn">You gave \"<xliff:g id="application">%1$s</xliff:g>\" permission to set up a VPN connection.\n\nThis app can monitor your network activity, including emails, apps and secure websites.</string>
+    <string name="monitoring_description_vpn">You gave \"<xliff:g id="application">%1$s</xliff:g>\" permission to set up a VPN connection.\n\nThis app can monitor your device and network activity, including emails, apps and secure websites.</string>
 
     <!-- Monitoring dialog legacy VPN text [CHAR LIMIT=300] -->
-    <string name="monitoring_description_legacy_vpn">You\'re connected to a VPN (\"<xliff:g id="application">%1$s</xliff:g>\").\n\nYour VPN service provider can monitor your network activity including emails, apps, and secure websites.</string>
+    <string name="monitoring_description_legacy_vpn">You\'re connected to a VPN (\"<xliff:g id="application">%1$s</xliff:g>\").\n\nYour VPN service provider can monitor your device and network activity including emails, apps, and secure websites.</string>
 
     <!-- Monitoring dialog non-legacy VPN with device owner text [CHAR LIMIT=300] -->
     <string name="monitoring_description_vpn_device_owned">This device is managed by:\n<xliff:g id="organization">%1$s</xliff:g>\n\nYour administrator is capable of monitoring your network activity including emails, apps, and secure websites. For more information, contact your administrator.\n\nAlso, you gave \"<xliff:g id="application">%2$s</xliff:g>\" permission to set up a VPN connection. This app can monitor network activity too.</string>
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/phone/KeyguardBouncer.java b/packages/SystemUI/src/com/android/systemui/statusbar/phone/KeyguardBouncer.java
index 8996197..3b14082 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/phone/KeyguardBouncer.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/phone/KeyguardBouncer.java
@@ -82,6 +82,7 @@
 
     public void hide(boolean destroyView) {
         if (mKeyguardView != null) {
+            mKeyguardView.setOnDismissAction(null);
             mKeyguardView.cleanUp();
         }
         if (destroyView) {
diff --git a/policy/src/com/android/internal/policy/impl/PhoneWindow.java b/policy/src/com/android/internal/policy/impl/PhoneWindow.java
index ff3cd9d..964acbd 100644
--- a/policy/src/com/android/internal/policy/impl/PhoneWindow.java
+++ b/policy/src/com/android/internal/policy/impl/PhoneWindow.java
@@ -1314,7 +1314,6 @@
             mBackgroundDrawable = drawable;
             if (mDecor != null) {
                 mDecor.setWindowBackground(drawable);
-                mDecor.setClipToOutline(drawable != null && mClipToOutline);
             }
         }
     }
@@ -3389,10 +3388,6 @@
             }
             mDecor.setWindowBackground(background);
 
-            if (background != null) {
-                mDecor.setClipToOutline(mClipToOutline);
-            }
-
             final Drawable frame;
             if (mFrameResource != 0) {
                 frame = getContext().getDrawable(mFrameResource);
@@ -3402,6 +3397,7 @@
             mDecor.setWindowFrame(frame);
 
             mDecor.setElevation(mElevation);
+            mDecor.setClipToOutline(mClipToOutline);
 
             if (mTitle != null) {
                 setTitle(mTitle);
diff --git a/services/core/java/com/android/server/display/DisplayManagerService.java b/services/core/java/com/android/server/display/DisplayManagerService.java
index 2dd150a..e31f177 100644
--- a/services/core/java/com/android/server/display/DisplayManagerService.java
+++ b/services/core/java/com/android/server/display/DisplayManagerService.java
@@ -1269,28 +1269,26 @@
                         + "greater than 0");
             }
 
+            if ((flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC) != 0) {
+                flags |= DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR;
+            }
+            if ((flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY) != 0) {
+                flags &= ~DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR;
+            }
+
             if (projection != null) {
                 try {
                     if (!getProjectionService().isValidMediaProjection(projection)) {
                         throw new SecurityException("Invalid media projection");
                     }
+                    flags = projection.applyVirtualDisplayFlags(flags);
                 } catch (RemoteException e) {
-                    throw new SecurityException("unable to validate media projection");
-                }
-                flags &= DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE;
-                try {
-                    flags |= projection.getVirtualDisplayFlags();
-                } catch (RemoteException e) {
-                    throw new RuntimeException("unable to retrieve media projection flags");
+                    throw new SecurityException("unable to validate media projection or flags");
                 }
             }
 
-            if ((flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_SCREEN_SHARE) != 0) {
-                if ((flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC) != 0 ||
-                        (flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION) != 0) {
-                    throw new IllegalArgumentException("screen sharing virtual displays must not "
-                            + "be public or presentation displays");
-                }
+            if (callingUid != Process.SYSTEM_UID &&
+                    (flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR) != 0) {
                 if (!canProjectVideo(projection)) {
                     throw new SecurityException("Requires CAPTURE_VIDEO_OUTPUT or "
                             + "CAPTURE_SECURE_VIDEO_OUTPUT permission, or an appropriate "
@@ -1298,16 +1296,6 @@
                             + "display.");
                 }
             }
-
-
-            if (callingUid != Process.SYSTEM_UID &&
-                    (flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC) != 0) {
-                if (!canProjectVideo(projection)) {
-                    throw new SecurityException("Requires CAPTURE_VIDEO_OUTPUT or "
-                            + "CAPTURE_SECURE_VIDEO_OUTPUT permission, or an appropriate "
-                            + "MediaProjection token to create a public virtual display.");
-                }
-            }
             if ((flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE) != 0) {
                 if (!canProjectSecureVideo(projection)) {
                     throw new SecurityException("Requires CAPTURE_SECURE_VIDEO_OUTPUT "
diff --git a/services/core/java/com/android/server/display/VirtualDisplayAdapter.java b/services/core/java/com/android/server/display/VirtualDisplayAdapter.java
index 0ebd2de..72ac29a 100644
--- a/services/core/java/com/android/server/display/VirtualDisplayAdapter.java
+++ b/services/core/java/com/android/server/display/VirtualDisplayAdapter.java
@@ -262,14 +262,15 @@
                 mInfo.presentationDeadlineNanos = 1000000000L / (int) mInfo.refreshRate; // 1 frame
                 mInfo.flags = 0;
                 if ((mFlags & DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC) == 0) {
-                    mInfo.flags |= DisplayDeviceInfo.FLAG_PRIVATE;
-                    if ((mFlags & DisplayManager.VIRTUAL_DISPLAY_FLAG_SCREEN_SHARE) == 0) {
-                        mInfo.flags |=  DisplayDeviceInfo.FLAG_OWN_CONTENT_ONLY
-                                | DisplayDeviceInfo.FLAG_NEVER_BLANK;
-                    }
-                } else if ((mFlags & DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY) != 0) {
+                    mInfo.flags |= DisplayDeviceInfo.FLAG_PRIVATE
+                            | DisplayDeviceInfo.FLAG_NEVER_BLANK;
+                }
+                if ((mInfo.flags & DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR) != 0) {
+                    mInfo.flags &= ~DisplayDeviceInfo.FLAG_NEVER_BLANK;
+                } else {
                     mInfo.flags |= DisplayDeviceInfo.FLAG_OWN_CONTENT_ONLY;
                 }
+
                 if ((mFlags & DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE) != 0) {
                     mInfo.flags |= DisplayDeviceInfo.FLAG_SECURE;
                 }
diff --git a/services/core/java/com/android/server/media/projection/MediaProjectionManagerService.java b/services/core/java/com/android/server/media/projection/MediaProjectionManagerService.java
index 7b28699..289b5aa 100644
--- a/services/core/java/com/android/server/media/projection/MediaProjectionManagerService.java
+++ b/services/core/java/com/android/server/media/projection/MediaProjectionManagerService.java
@@ -103,7 +103,7 @@
             try {
                 hasPermission |= checkPermission(packageName,
                         android.Manifest.permission.CAPTURE_VIDEO_OUTPUT)
-                        || mAppOps.checkOpNoThrow(
+                        || mAppOps.noteOpNoThrow(
                                 AppOpsManager.OP_PROJECT_MEDIA, uid, packageName)
                         == AppOpsManager.MODE_ALLOWED;
             } finally {
@@ -196,18 +196,27 @@
         }
 
         @Override // Binder call
-        public int getVirtualDisplayFlags() {
-            switch (mType) {
-                case MediaProjectionManager.TYPE_SCREEN_CAPTURE:
-                    return DisplayManager.VIRTUAL_DISPLAY_FLAG_SCREEN_SHARE;
-                case MediaProjectionManager.TYPE_MIRRORING:
-                    return DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC |
-                            DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
-                case MediaProjectionManager.TYPE_PRESENTATION:
-                    return DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION |
-                            DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY;
+        public int applyVirtualDisplayFlags(int flags) {
+            if (mType == MediaProjectionManager.TYPE_SCREEN_CAPTURE) {
+                flags &= ~DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY;
+                flags |= DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR
+                        | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
+                return flags;
+            } else if (mType == MediaProjectionManager.TYPE_MIRRORING) {
+                flags &= ~(DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC |
+                        DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR);
+                flags |= DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY |
+                        DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
+                return flags;
+            } else if (mType == MediaProjectionManager.TYPE_PRESENTATION) {
+                flags &= ~DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY;
+                flags |= DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC |
+                        DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION |
+                        DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR;
+                return flags;
+            } else  {
+                throw new RuntimeException("Unknown MediaProjection type");
             }
-            throw new RuntimeException("Unknown MediaProjection type");
         }
 
         @Override // Binder call