Merge "Initial Frameserver Implementation" into ub-camera-haleakala
diff --git a/res/drawable-anydpi-v21/ic_camera_blanket.xml b/res/drawable-anydpi-v21/ic_camera_blanket.xml
new file mode 100644
index 0000000..45fc29e
--- /dev/null
+++ b/res/drawable-anydpi-v21/ic_camera_blanket.xml
@@ -0,0 +1,27 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+        android:width="64.0dp"
+        android:height="64.0dp"
+        android:viewportWidth="48.0"
+        android:viewportHeight="48.0">
+    <path
+        android:fillColor="#FF5a5b5e"
+        android:pathData="M24.0,24.0m-6.4,0.0a6.4,6.4 0.0,1.0 1.0,12.8 0.0a6.4,6.4 0.0,1.0 1.0,-12.8 0.0"/>
+    <path
+        android:fillColor="#FF5a5b5e"
+        android:pathData="M18.0,4.0l-3.66,4.0l-6.34,0.0c-2.21,0.0 -4.0,1.79 -4.0,4.0l0.0,24.0c0.0,2.21 1.79,4.0 4.0,4.0l32.0,0.0c2.21,0.0 4.0,-1.79 4.0,-4.0l0.0,-24.0c0.0,-2.21 -1.79,-4.0 -4.0,-4.0l-6.34,0.0l-3.66,-4.0l-12.0,0.0zm6.0,30.0c-5.52,0.0 -10.0,-4.48 -10.0,-10.0s4.48,-10.0 10.0,-10.0 10.0,4.48 10.0,10.0 -4.48,10.0 -10.0,10.0z"/>
+</vector>
diff --git a/res/drawable-hdpi/ic_camera_blanket.png b/res/drawable-hdpi/ic_camera_blanket.png
index 2630701..c4e60e5 100644
--- a/res/drawable-hdpi/ic_camera_blanket.png
+++ b/res/drawable-hdpi/ic_camera_blanket.png
Binary files differ
diff --git a/res/drawable-hdpi/ic_video_blanket.png b/res/drawable-hdpi/ic_video_blanket.png
index 6ec9d12..d6f2e46 100644
--- a/res/drawable-hdpi/ic_video_blanket.png
+++ b/res/drawable-hdpi/ic_video_blanket.png
Binary files differ
diff --git a/res/drawable-mdpi/ic_camera_blanket.png b/res/drawable-mdpi/ic_camera_blanket.png
index 8d2e660..3fc6400 100644
--- a/res/drawable-mdpi/ic_camera_blanket.png
+++ b/res/drawable-mdpi/ic_camera_blanket.png
Binary files differ
diff --git a/res/drawable-mdpi/ic_video_blanket.png b/res/drawable-mdpi/ic_video_blanket.png
index bea42e1..db57c43 100644
--- a/res/drawable-mdpi/ic_video_blanket.png
+++ b/res/drawable-mdpi/ic_video_blanket.png
Binary files differ
diff --git a/res/drawable-xhdpi/ic_camera_blanket.png b/res/drawable-xhdpi/ic_camera_blanket.png
index 26fcae4..2d66653 100644
--- a/res/drawable-xhdpi/ic_camera_blanket.png
+++ b/res/drawable-xhdpi/ic_camera_blanket.png
Binary files differ
diff --git a/res/drawable-xhdpi/ic_video_blanket.png b/res/drawable-xhdpi/ic_video_blanket.png
index da017fd..790313c 100644
--- a/res/drawable-xhdpi/ic_video_blanket.png
+++ b/res/drawable-xhdpi/ic_video_blanket.png
Binary files differ
diff --git a/res/drawable-xxhdpi/ic_camera_blanket.png b/res/drawable-xxhdpi/ic_camera_blanket.png
index 1799adc..b70de30 100644
--- a/res/drawable-xxhdpi/ic_camera_blanket.png
+++ b/res/drawable-xxhdpi/ic_camera_blanket.png
Binary files differ
diff --git a/res/drawable-xxhdpi/ic_video_blanket.png b/res/drawable-xxhdpi/ic_video_blanket.png
index 6050437..d66dbd1 100644
--- a/res/drawable-xxhdpi/ic_video_blanket.png
+++ b/res/drawable-xxhdpi/ic_video_blanket.png
Binary files differ
diff --git a/res/drawable-xxxhdpi/ic_camera_blanket.png b/res/drawable-xxxhdpi/ic_camera_blanket.png
index a896d46..717dca2 100644
--- a/res/drawable-xxxhdpi/ic_camera_blanket.png
+++ b/res/drawable-xxxhdpi/ic_camera_blanket.png
Binary files differ
diff --git a/res/drawable-xxxhdpi/ic_video_blanket.png b/res/drawable-xxxhdpi/ic_video_blanket.png
index 23caf41..758efef 100644
--- a/res/drawable-xxxhdpi/ic_video_blanket.png
+++ b/res/drawable-xxxhdpi/ic_video_blanket.png
Binary files differ
diff --git a/res/values/dimens.xml b/res/values/dimens.xml
index 1255334..2dd796c 100644
--- a/res/values/dimens.xml
+++ b/res/values/dimens.xml
@@ -179,7 +179,7 @@
     <dimen name="camera_controls_padding_start">36dp</dimen>
     <dimen name="camera_controls_padding_end">36dp</dimen>
 
-    <dimen name="mode_transition_view_icon_size">144dp</dimen>
+    <dimen name="mode_transition_view_icon_size">64dp</dimen>
     <dimen name="flash_circle_size_after_shrink">36dp</dimen>
 
     <dimen name="mode_options_height">72dp</dimen>
diff --git a/res_p/drawable-hdpi/ic_pano_blanket.png b/res_p/drawable-hdpi/ic_pano_blanket.png
index eecd5cb..3a9fc7b 100644
--- a/res_p/drawable-hdpi/ic_pano_blanket.png
+++ b/res_p/drawable-hdpi/ic_pano_blanket.png
Binary files differ
diff --git a/res_p/drawable-hdpi/ic_photosphere_blanket.png b/res_p/drawable-hdpi/ic_photosphere_blanket.png
index c667cc6..8912ca0 100644
--- a/res_p/drawable-hdpi/ic_photosphere_blanket.png
+++ b/res_p/drawable-hdpi/ic_photosphere_blanket.png
Binary files differ
diff --git a/res_p/drawable-hdpi/ic_refocus_blanket.png b/res_p/drawable-hdpi/ic_refocus_blanket.png
index b260afe..81a2707 100644
--- a/res_p/drawable-hdpi/ic_refocus_blanket.png
+++ b/res_p/drawable-hdpi/ic_refocus_blanket.png
Binary files differ
diff --git a/res_p/drawable-mdpi/ic_pano_blanket.png b/res_p/drawable-mdpi/ic_pano_blanket.png
index 42c0c37..b48b1a3 100644
--- a/res_p/drawable-mdpi/ic_pano_blanket.png
+++ b/res_p/drawable-mdpi/ic_pano_blanket.png
Binary files differ
diff --git a/res_p/drawable-mdpi/ic_photosphere_blanket.png b/res_p/drawable-mdpi/ic_photosphere_blanket.png
index 1db0e49..19fba8d 100644
--- a/res_p/drawable-mdpi/ic_photosphere_blanket.png
+++ b/res_p/drawable-mdpi/ic_photosphere_blanket.png
Binary files differ
diff --git a/res_p/drawable-mdpi/ic_refocus_blanket.png b/res_p/drawable-mdpi/ic_refocus_blanket.png
index 8cfaa2c..303f081 100644
--- a/res_p/drawable-mdpi/ic_refocus_blanket.png
+++ b/res_p/drawable-mdpi/ic_refocus_blanket.png
Binary files differ
diff --git a/res_p/drawable-xhdpi/ic_pano_blanket.png b/res_p/drawable-xhdpi/ic_pano_blanket.png
index 9f313e2..f69d872 100644
--- a/res_p/drawable-xhdpi/ic_pano_blanket.png
+++ b/res_p/drawable-xhdpi/ic_pano_blanket.png
Binary files differ
diff --git a/res_p/drawable-xhdpi/ic_photosphere_blanket.png b/res_p/drawable-xhdpi/ic_photosphere_blanket.png
index e7306d3..07a9798 100644
--- a/res_p/drawable-xhdpi/ic_photosphere_blanket.png
+++ b/res_p/drawable-xhdpi/ic_photosphere_blanket.png
Binary files differ
diff --git a/res_p/drawable-xhdpi/ic_refocus_blanket.png b/res_p/drawable-xhdpi/ic_refocus_blanket.png
index 156c8f4..db7ffd7 100644
--- a/res_p/drawable-xhdpi/ic_refocus_blanket.png
+++ b/res_p/drawable-xhdpi/ic_refocus_blanket.png
Binary files differ
diff --git a/res_p/drawable-xxhdpi/ic_pano_blanket.png b/res_p/drawable-xxhdpi/ic_pano_blanket.png
index ff33adc..f0f1a26 100644
--- a/res_p/drawable-xxhdpi/ic_pano_blanket.png
+++ b/res_p/drawable-xxhdpi/ic_pano_blanket.png
Binary files differ
diff --git a/res_p/drawable-xxhdpi/ic_photosphere_blanket.png b/res_p/drawable-xxhdpi/ic_photosphere_blanket.png
index 54d0360..4d710f7 100644
--- a/res_p/drawable-xxhdpi/ic_photosphere_blanket.png
+++ b/res_p/drawable-xxhdpi/ic_photosphere_blanket.png
Binary files differ
diff --git a/res_p/drawable-xxhdpi/ic_refocus_blanket.png b/res_p/drawable-xxhdpi/ic_refocus_blanket.png
index b4c4239..5261068 100644
--- a/res_p/drawable-xxhdpi/ic_refocus_blanket.png
+++ b/res_p/drawable-xxhdpi/ic_refocus_blanket.png
Binary files differ
diff --git a/src/com/android/camera/CameraActivity.java b/src/com/android/camera/CameraActivity.java
index 56fc826..a48f6be 100644
--- a/src/com/android/camera/CameraActivity.java
+++ b/src/com/android/camera/CameraActivity.java
@@ -160,8 +160,7 @@
 
 public class CameraActivity extends QuickActivity
         implements AppController, CameraAgent.CameraOpenCallback,
-        ShareActionProvider.OnShareTargetSelectedListener,
-        OrientationManager.OnOrientationChangeListener {
+        ShareActionProvider.OnShareTargetSelectedListener {
 
     private static final Log.Tag TAG = new Log.Tag("CameraActivity");
 
@@ -224,7 +223,6 @@
     private long mStorageSpaceBytes = Storage.LOW_STORAGE_THRESHOLD_BYTES;
     private boolean mAutoRotateScreen;
     private boolean mSecureCamera;
-    private int mLastRawOrientation;
     private OrientationManagerImpl mOrientationManager;
     private LocationManager mLocationManager;
     private ButtonManager mButtonManager;
@@ -1495,9 +1493,7 @@
         }
 
         mLocationManager = new LocationManager(mAppContext);
-
-        mOrientationManager = new OrientationManagerImpl(this);
-        mOrientationManager.addOnOrientationChangeListener(mMainHandler, this);
+        mOrientationManager = new OrientationManagerImpl(this, mMainHandler);
 
         setModuleFromModeIndex(getModeIndex());
         mCameraAppUI.prepareModuleUI();
@@ -2197,7 +2193,6 @@
         mCameraAppUI.resetBottomControls(mCurrentModule, modeIndex);
         mCameraAppUI.addShutterListener(mCurrentModule);
         openModule(mCurrentModule);
-        mCurrentModule.onOrientationChanged(mLastRawOrientation);
         // Store the module index so we can use it the next time the Camera
         // starts up.
         mSettingsManager.set(SettingsManager.SCOPE_GLOBAL,
@@ -2474,25 +2469,6 @@
         }
     }
 
-    @Override
-    public void onOrientationChanged(int orientation) {
-        if (orientation != mLastRawOrientation) {
-            Log.v(TAG, "orientation changed (from:to) " + mLastRawOrientation +
-                    ":" + orientation);
-        }
-
-        // We keep the last known orientation. So if the user first orient
-        // the camera then point the camera to floor or sky, we still have
-        // the correct orientation.
-        if (orientation == OrientationManager.ORIENTATION_UNKNOWN) {
-            return;
-        }
-        mLastRawOrientation = orientation;
-        if (mCurrentModule != null) {
-            mCurrentModule.onOrientationChanged(orientation);
-        }
-    }
-
     /**
      * Enable/disable swipe-to-filmstrip. Will always disable swipe if in
      * capture intent.
diff --git a/src/com/android/camera/CameraModule.java b/src/com/android/camera/CameraModule.java
index 5049cab..7073ee4 100644
--- a/src/com/android/camera/CameraModule.java
+++ b/src/com/android/camera/CameraModule.java
@@ -102,4 +102,9 @@
      * @return An accessibility String to be announced during the peek animation.
      */
     public abstract String getPeekAccessibilityString();
+
+    @Override
+    public void onShutterButtonLongPressed() {
+        // noop
+    }
 }
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index a6b7ec6..33f3551 100644
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -33,7 +33,6 @@
 import android.os.SystemClock;
 import android.provider.MediaStore;
 import android.view.KeyEvent;
-import android.view.OrientationEventListener;
 import android.view.Surface;
 import android.view.TextureView;
 import android.view.View;
@@ -44,9 +43,15 @@
 import com.android.camera.app.CameraAppUI.BottomBarUISpec;
 import com.android.camera.app.LocationManager;
 import com.android.camera.app.MediaSaver;
+import com.android.camera.app.OrientationManager;
+import com.android.camera.burst.BurstFacade;
+import com.android.camera.burst.BurstFacadeFactory;
 import com.android.camera.debug.DebugPropertyHelper;
 import com.android.camera.debug.Log;
 import com.android.camera.debug.Log.Tag;
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+import com.android.camera.gl.FrameDistributorWrapper;
+import com.android.camera.gl.SurfaceTextureConsumer;
 import com.android.camera.hardware.HardwareSpec;
 import com.android.camera.module.ModuleController;
 import com.android.camera.one.OneCamera;
@@ -74,6 +79,8 @@
 import com.android.ex.camera2.portability.CameraAgent.CameraProxy;
 
 import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.Semaphore;
 import java.util.concurrent.TimeUnit;
 
@@ -116,6 +123,7 @@
                 int oldTop, int oldRight, int oldBottom) {
             int width = right - left;
             int height = bottom - top;
+            mPreviewConsumer.setSize(width, height);
             updatePreviewTransform(width, height, false);
         }
     };
@@ -135,6 +143,7 @@
         }
     };
 
+
     private static final Tag TAG = new Tag("CaptureModule");
     private static final String PHOTO_MODULE_STRING_ID = "PhotoModule";
     /** Enable additional debug output. */
@@ -155,10 +164,6 @@
      */
     private final boolean mStickyGcamCamera;
 
-    /**
-     * Lock for race conditions in the SurfaceTextureListener callbacks.
-     */
-    private final Object mSurfaceLock = new Object();
     /** Controller giving us access to other services. */
     private final AppController mAppController;
     /** The applications settings manager. */
@@ -177,9 +182,6 @@
     /** Whether HDR is currently enabled. */
     private boolean mHdrEnabled = false;
 
-    /** The texture used to render the preview in. */
-    private SurfaceTexture mPreviewTexture;
-
     /** State by the module state machine. */
     private static enum ModuleState {
         IDLE,
@@ -189,8 +191,6 @@
 
     /** The current state of the module. */
     private ModuleState mState = ModuleState.IDLE;
-    /** Current orientation of the device. */
-    private int mOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
     /** Current zoom value. */
     private float mZoomValue = 1f;
     /** Current duration of capture timer in seconds. */
@@ -228,7 +228,7 @@
     private int mHeading = -1;
 
     /** Used to fetch and embed the location into captured images. */
-    private LocationManager mLocationManager;
+    private final LocationManager mLocationManager;
     /** Plays sounds for countdown timer. */
     private SoundPlayer mCountdownSoundPlayer;
 
@@ -261,6 +261,15 @@
     /** A directory to store debug information in during development. */
     private final File mDebugDataDir;
 
+    /** Used to distribute camera frames to consumers. */
+    private final FrameDistributorWrapper mFrameDistributor;
+
+    /** The frame consumer that renders frames to the preview. */
+    private final SurfaceTextureConsumer mPreviewConsumer;
+
+    /** The burst manager for controlling the burst. */
+    private final BurstFacade mBurstController;
+
     /** CLEAN UP START */
     // private boolean mFirstLayout;
     // private int[] mTargetFPSRanges;
@@ -283,6 +292,12 @@
         mSettingsManager.addListener(this);
         mDebugDataDir = mContext.getExternalCacheDir();
         mStickyGcamCamera = stickyHdr;
+        mLocationManager = mAppController.getLocationManager();
+
+        mPreviewConsumer = new SurfaceTextureConsumer();
+        mFrameDistributor = new FrameDistributorWrapper();
+        mBurstController = BurstFacadeFactory.create(mAppController, getServices().getMediaSaver(),
+                mLocationManager, mAppController.getOrientationManager(), mDebugDataDir);
     }
 
     @Override
@@ -293,7 +308,6 @@
         thread.start();
         mCameraHandler = new Handler(thread.getLooper());
         mCameraManager = mAppController.getCameraManager();
-        mLocationManager = mAppController.getLocationManager();
         mDisplayRotation = CameraUtil.getDisplayRotation(mContext);
         mCameraFacing = getFacingFromCameraId(mSettingsManager.getInteger(
                 mAppController.getModuleScope(),
@@ -301,7 +315,14 @@
         mUI = new CaptureModuleUI(activity, this, mAppController.getModuleLayoutRoot(),
                 mLayoutListener);
         mAppController.setPreviewStatusListener(mUI);
-        mPreviewTexture = mAppController.getCameraAppUI().getSurfaceTexture();
+
+        mBurstController.setContentResolver(activity.getContentResolver());
+        // Set the preview texture from UI for the SurfaceTextureConsumer.
+        mPreviewConsumer.setSurfaceTexture(
+                mAppController.getCameraAppUI().getSurfaceTexture(),
+                mAppController.getCameraAppUI().getSurfaceWidth(),
+                mAppController.getCameraAppUI().getSurfaceHeight());
+
         mSensorManager = (SensorManager) (mContext.getSystemService(Context.SENSOR_SERVICE));
         mAccelerometerSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
         mMagneticSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
@@ -320,8 +341,16 @@
     }
 
     @Override
+    public void onShutterButtonLongPressed() {
+        mBurstController.startBurst();
+    }
+
+    @Override
     public void onShutterButtonFocus(boolean pressed) {
-        // TODO Auto-generated method stub
+        if (!pressed) {
+            // the shutter button was released, stop any bursts.
+            mBurstController.stopBurst();
+        }
     }
 
     @Override
@@ -363,7 +392,8 @@
         PhotoCaptureParameters params = new PhotoCaptureParameters();
         params.title = title;
         params.callback = this;
-        params.orientation = getOrientation();
+        params.orientation =
+                mAppController.getOrientationManager().getDeviceOrientation().getDegrees();
         params.flashMode = getFlashModeFromSettings();
         params.heading = mHeading;
         params.debugDataFolder = mDebugDataDir;
@@ -470,11 +500,16 @@
         // Force to re-apply transform matrix here as a workaround for
         // b/11168275
         updatePreviewTransform(width, height, true);
-        initSurface(surface);
+        initSurfaceTextureConsumer(surface, width, height);
     }
 
-    public void initSurface(final SurfaceTexture surface) {
-        mPreviewTexture = surface;
+    private void initSurfaceTextureConsumer(SurfaceTexture surface, int width, int height) {
+        if (mPreviewConsumer.getSurfaceTexture() != surface) {
+            mPreviewConsumer.setSurfaceTexture(surface, width, height);
+        } else if (mPreviewConsumer.getWidth() != width
+                || mPreviewConsumer.getHeight() != height) {
+            mPreviewConsumer.setSize(width, height);
+        }
         closeCamera();
         openCameraAndStartPreview();
     }
@@ -482,14 +517,14 @@
     @Override
     public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
         Log.d(TAG, "onSurfaceTextureSizeChanged");
-        resetDefaultBufferSize();
+        updateFrameDistributorBufferSize();
     }
 
     @Override
     public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
         Log.d(TAG, "onSurfaceTextureDestroyed");
-        mPreviewTexture = null;
         closeCamera();
+
         return true;
     }
 
@@ -503,6 +538,19 @@
         }
     }
 
+    private void initializeFrameDistributor() {
+        // Currently, there is only one consumer to FrameDistributor for
+        // rendering the frames to the preview texture.
+        List<FrameConsumer> frameConsumers = new ArrayList<FrameConsumer>();
+        frameConsumers.add(mBurstController.getPreviewFrameConsumer());
+        frameConsumers.add(mPreviewConsumer);
+        mFrameDistributor.start(frameConsumers);
+    }
+
+    private void updateFrameDistributorBufferSize() {
+        mFrameDistributor.updatePreviewBufferSize(mPreviewBufferWidth, mPreviewBufferHeight);
+    }
+
     @Override
     public String getModuleStringIdentifier() {
         return PHOTO_MODULE_STRING_ID;
@@ -513,7 +561,8 @@
         mPaused = false;
         mAppController.getCameraAppUI().onChangeCamera();
         mAppController.addPreviewAreaSizeChangedListener(this);
-        resetDefaultBufferSize();
+        initializeFrameDistributor();
+        updateFrameDistributorBufferSize();
         getServices().getRemoteShutterListener().onModuleReady(this);
         // TODO: Check if we can really take a photo right now (memory, camera
         // state, ... ).
@@ -535,8 +584,10 @@
         // This means we are resuming with an existing preview texture. This
         // means we will never get the onSurfaceTextureAvailable call. So we
         // have to open the camera and start the preview here.
-        if (mPreviewTexture != null) {
-            initSurface(mPreviewTexture);
+        if (mPreviewConsumer.getSurfaceTexture() != null) {
+            initSurfaceTextureConsumer(mPreviewConsumer.getSurfaceTexture(),
+                    mAppController.getCameraAppUI().getSurfaceWidth(),
+                    mAppController.getCameraAppUI().getSurfaceHeight());
         }
 
         mCountdownSoundPlayer.loadSound(R.raw.timer_final_second);
@@ -547,9 +598,11 @@
     public void pause() {
         mPaused = true;
         getServices().getRemoteShutterListener().onModuleExit();
+        mBurstController.stopBurst();
         cancelCountDown();
         closeCamera();
         resetTextureBufferSize();
+        mFrameDistributor.close();
         mCountdownSoundPlayer.unloadSound(R.raw.timer_final_second);
         mCountdownSoundPlayer.unloadSound(R.raw.timer_increment);
         // Remove delayed resume trigger, if it hasn't been executed yet.
@@ -573,21 +626,7 @@
     @Override
     public void onLayoutOrientationChanged(boolean isLandscape) {
         Log.d(TAG, "onLayoutOrientationChanged");
-    }
-
-    @Override
-    public void onOrientationChanged(int orientation) {
-        // We keep the last known orientation. So if the user first orient
-        // the camera then point the camera to floor or sky, we still have
-        // the correct orientation.
-        if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
-            return;
-        }
-
-        // TODO: Document orientation compute logic and unify them in OrientationManagerImpl.
-        // b/17443789
-        // Flip to counter-clockwise orientation.
-        mOrientation = (360 - orientation) % 360;
+        mBurstController.stopBurst();
     }
 
     @Override
@@ -836,6 +875,10 @@
 
     @Override
     public void onReadyStateChanged(boolean readyForCapture) {
+        if (mBurstController.isBurstRunning()) {
+            return;
+        }
+
         if (readyForCapture) {
             mAppController.getCameraAppUI().enableModeOptions();
         }
@@ -1171,17 +1214,7 @@
         Size previewBufferSize = mCamera.pickPreviewSize(pictureSize, mContext);
         mPreviewBufferWidth = previewBufferSize.getWidth();
         mPreviewBufferHeight = previewBufferSize.getHeight();
-    }
-
-    /**
-     * Resets the default buffer size to the initially calculated size.
-     */
-    private void resetDefaultBufferSize() {
-        synchronized (mSurfaceLock) {
-            if (mPreviewTexture != null) {
-                mPreviewTexture.setDefaultBufferSize(mPreviewBufferWidth, mPreviewBufferHeight);
-            }
-        }
+        updateFrameDistributorBufferSize();
     }
 
     /**
@@ -1223,6 +1256,7 @@
                     @Override
                     public void onCameraClosed() {
                         mCamera = null;
+                        mBurstController.onCameraDetached();
                         mCameraOpenCloseLock.release();
                     }
 
@@ -1230,17 +1264,18 @@
                     public void onCameraOpened(final OneCamera camera) {
                         Log.d(TAG, "onCameraOpened: " + camera);
                         mCamera = camera;
+                        mBurstController.onCameraAttached(mCamera);
                         updatePreviewBufferDimension();
 
                         // If the surface texture is not destroyed, it may have
                         // the last frame lingering. We need to hold off setting
                         // transform until preview is started.
-                        resetDefaultBufferSize();
+                        updateFrameDistributorBufferSize();
                         mState = ModuleState.WATCH_FOR_NEXT_FRAME_AFTER_PREVIEW_STARTED;
                         Log.d(TAG, "starting preview ...");
 
                         // TODO: Consider rolling these two calls into one.
-                        camera.startPreview(new Surface(mPreviewTexture),
+                        camera.startPreview(new Surface(mFrameDistributor.getInputSurfaceTexture()),
                                 new CaptureReadyCallback() {
                                     @Override
                                     public void onSetupFailed() {
@@ -1294,7 +1329,7 @@
     private void closeCamera() {
         try {
             mCameraOpenCloseLock.acquire();
-        } catch(InterruptedException e) {
+        } catch (InterruptedException e) {
             throw new RuntimeException("Interrupted while waiting to acquire camera-open lock.", e);
         }
         try {
@@ -1308,21 +1343,13 @@
         }
     }
 
-    private int getOrientation() {
-        if (mAppController.isAutoRotateScreen()) {
-            return mDisplayRotation;
-        } else {
-            return mOrientation;
-        }
-    }
-
     /**
      * @return Whether we are resuming from within the lockscreen.
      */
     private static boolean isResumeFromLockscreen(Activity activity) {
         String action = activity.getIntent().getAction();
         return (MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA.equals(action)
-        || MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(action));
+                || MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(action));
     }
 
     /**
@@ -1334,7 +1361,9 @@
         }
         cancelCountDown();
         mAppController.freezeScreenUntilPreviewReady();
-        initSurface(mPreviewTexture);
+        initSurfaceTextureConsumer(mPreviewConsumer.getSurfaceTexture(),
+                mAppController.getCameraAppUI().getSurfaceWidth(),
+                mAppController.getCameraAppUI().getSurfaceHeight());
 
         // TODO: Un-comment once we have focus back.
         // if (mFocusManager != null) {
@@ -1367,9 +1396,6 @@
     }
 
     private void resetTextureBufferSize() {
-        // Reset the default buffer sizes on the shared SurfaceTexture
-        // so they are not scaled for gcam.
-        //
         // According to the documentation for
         // SurfaceTexture.setDefaultBufferSize,
         // photo and video based image producers (presumably only Camera 1 api),
@@ -1377,10 +1403,7 @@
         // SurfaceTexture must have these buffer sizes reset manually. Otherwise
         // the SurfaceTexture cannot be transformed by matrix set on the
         // TextureView.
-        if (mPreviewTexture != null) {
-            mPreviewTexture.setDefaultBufferSize(mAppController.getCameraAppUI().getSurfaceWidth(),
-                    mAppController.getCameraAppUI().getSurfaceHeight());
-        }
+        updateFrameDistributorBufferSize();
     }
 
     /**
diff --git a/src/com/android/camera/MediaSaverImpl.java b/src/com/android/camera/MediaSaverImpl.java
index c5c4c0f..ad28ca5 100644
--- a/src/com/android/camera/MediaSaverImpl.java
+++ b/src/com/android/camera/MediaSaverImpl.java
@@ -25,6 +25,7 @@
 import android.provider.MediaStore.Video;
 
 import com.android.camera.app.MediaSaver;
+import com.android.camera.data.LocalData;
 import com.android.camera.debug.Log;
 import com.android.camera.exif.ExifInterface;
 
@@ -58,13 +59,21 @@
     public void addImage(final byte[] data, String title, long date, Location loc, int width,
             int height, int orientation, ExifInterface exif, OnMediaSavedListener l,
             ContentResolver resolver) {
+        addImage(data, title, date, loc, width, height, orientation, exif, l,
+                resolver, LocalData.MIME_TYPE_JPEG);
+    }
+
+    @Override
+    public void addImage(final byte[] data, String title, long date, Location loc, int width,
+            int height, int orientation, ExifInterface exif, OnMediaSavedListener l,
+            ContentResolver resolver, String mimeType) {
         if (isQueueFull()) {
             Log.e(TAG, "Cannot add image when the queue is full");
             return;
         }
         ImageSaveTask t = new ImageSaveTask(data, title, date,
                 (loc == null) ? null : new Location(loc),
-                width, height, orientation, exif, resolver, l);
+                width, height, orientation, mimeType, exif, resolver, l);
 
         mMemoryUse += data.length;
         if (isQueueFull()) {
@@ -78,14 +87,15 @@
             ExifInterface exif, OnMediaSavedListener l, ContentResolver resolver) {
         // When dimensions are unknown, pass 0 as width and height,
         // and decode image for width and height later in a background thread
-        addImage(data, title, date, loc, 0, 0, orientation, exif, l, resolver);
+        addImage(data, title, date, loc, 0, 0, orientation, exif, l, resolver,
+                LocalData.MIME_TYPE_JPEG);
     }
     @Override
     public void addImage(final byte[] data, String title, Location loc, int width, int height,
             int orientation, ExifInterface exif, OnMediaSavedListener l,
             ContentResolver resolver) {
         addImage(data, title, System.currentTimeMillis(), loc, width, height,
-                orientation, exif, l, resolver);
+                orientation, exif, l, resolver, LocalData.MIME_TYPE_JPEG);
     }
 
     @Override
@@ -124,13 +134,15 @@
         private final Location loc;
         private int width, height;
         private final int orientation;
+        private final String mimeType;
         private final ExifInterface exif;
         private final ContentResolver resolver;
         private final OnMediaSavedListener listener;
 
         public ImageSaveTask(byte[] data, String title, long date, Location loc,
-                             int width, int height, int orientation, ExifInterface exif,
-                             ContentResolver resolver, OnMediaSavedListener listener) {
+                             int width, int height, int orientation, String mimeType,
+                             ExifInterface exif, ContentResolver resolver,
+                             OnMediaSavedListener listener) {
             this.data = data;
             this.title = title;
             this.date = date;
@@ -138,6 +150,7 @@
             this.width = width;
             this.height = height;
             this.orientation = orientation;
+            this.mimeType = mimeType;
             this.exif = exif;
             this.resolver = resolver;
             this.listener = listener;
@@ -159,7 +172,8 @@
                 height = options.outHeight;
             }
             return Storage.addImage(
-                    resolver, title, date, loc, orientation, exif, data, width, height);
+                    resolver, title, date, loc, orientation, exif, data, width, height,
+                    mimeType);
         }
 
         @Override
diff --git a/src/com/android/camera/PhotoModule.java b/src/com/android/camera/PhotoModule.java
index b4b5ee7..b3ffdcc 100644
--- a/src/com/android/camera/PhotoModule.java
+++ b/src/com/android/camera/PhotoModule.java
@@ -52,6 +52,7 @@
 import com.android.camera.app.MemoryManager;
 import com.android.camera.app.MemoryManager.MemoryListener;
 import com.android.camera.app.MotionManager;
+import com.android.camera.app.OrientationManager;
 import com.android.camera.debug.Log;
 import com.android.camera.exif.ExifInterface;
 import com.android.camera.exif.ExifTag;
@@ -157,9 +158,6 @@
     private boolean mAwbLockSupported;
     private boolean mContinuousFocusSupported;
 
-    // The degrees of the device rotated clockwise from its natural orientation.
-    private int mOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
-
     private static final String sTempCropFilename = "crop-temp";
 
     private boolean mFaceDetectionStarted = false;
@@ -1298,15 +1296,15 @@
         // Set JPEG orientation. Even if screen UI is locked in portrait, camera orientation should
         // still match device orientation (e.g., users should always get landscape photos while
         // capturing by putting device in landscape.)
-        int orientation = mActivity.isAutoRotateScreen() ? mDisplayRotation : mOrientation;
         Characteristics info = mActivity.getCameraProvider().getCharacteristics(mCameraId);
-        mJpegRotation = info.getJpegOrientation(orientation);
+        int sensorOrientation = info.getSensorOrientation();
+        int deviceOrientation =
+                mAppController.getOrientationManager().getDeviceOrientation().getDegrees();
+        boolean isFrontCamera = info.isFacingFront();
+        mJpegRotation =
+                CameraUtil.getImageRotation(sensorOrientation, deviceOrientation, isFrontCamera);
         mCameraDevice.setJpegOrientation(mJpegRotation);
 
-        Log.v(TAG, "capture orientation (screen:device:used:jpeg) " +
-                mDisplayRotation + ":" + mOrientation + ":" +
-                orientation + ":" + mJpegRotation);
-
         mCameraDevice.takePicture(mHandler,
                 new ShutterCallback(!animateBefore),
                 mRawPictureCallback, mPostViewPictureCallback,
@@ -1345,18 +1343,6 @@
     }
 
     @Override
-    public void onOrientationChanged(int orientation) {
-        if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
-            return;
-        }
-
-        // TODO: Document orientation compute logic and unify them in OrientationManagerImpl.
-        // b/17443789
-        // Flip to counter-clockwise orientation.
-        mOrientation = (360 - orientation) % 360;
-    }
-
-    @Override
     public void onCameraAvailable(CameraProxy cameraProxy) {
         Log.i(TAG, "onCameraAvailable");
         if (mPaused) {
diff --git a/src/com/android/camera/ShutterButton.java b/src/com/android/camera/ShutterButton.java
index 05a6126..e80bfcb 100755
--- a/src/com/android/camera/ShutterButton.java
+++ b/src/com/android/camera/ShutterButton.java
@@ -17,8 +17,9 @@
 package com.android.camera;
 
 import android.content.Context;
-import android.text.method.Touch;
 import android.util.AttributeSet;
+import android.view.GestureDetector;
+import android.view.GestureDetector.SimpleOnGestureListener;
 import android.view.MotionEvent;
 import android.view.View;
 import android.widget.ImageView;
@@ -26,8 +27,8 @@
 import com.android.camera.debug.Log;
 import com.android.camera.ui.TouchCoordinate;
 
-import java.util.List;
 import java.util.ArrayList;
+import java.util.List;
 
 /**
  * A button designed to be used for the on-screen shutter button.
@@ -40,6 +41,8 @@
     public static final float ALPHA_WHEN_DISABLED = 0.2f;
     private boolean mTouchEnabled = true;
     private TouchCoordinate mTouchCoordinate;
+    private final GestureDetector mGestureDetector;
+
     /**
      * A callback to be invoked when a ShutterButton's pressed state changes.
      */
@@ -52,6 +55,23 @@
         void onShutterButtonFocus(boolean pressed);
         void onShutterCoordinate(TouchCoordinate coord);
         void onShutterButtonClick();
+
+        /**
+         * Called when shutter button is held down for a long press.
+         */
+        void onShutterButtonLongPressed();
+    }
+
+    /**
+     * A gesture listener to detect long presses.
+     */
+    private class LongPressGestureListener extends SimpleOnGestureListener {
+        @Override
+        public void onLongPress(MotionEvent event) {
+            for (OnShutterButtonListener listener : mListeners) {
+                listener.onShutterButtonLongPressed();
+            }
+        }
     }
 
     private List<OnShutterButtonListener> mListeners
@@ -60,6 +80,8 @@
 
     public ShutterButton(Context context, AttributeSet attrs) {
         super(context, attrs);
+        mGestureDetector = new GestureDetector(context, new LongPressGestureListener());
+        mGestureDetector.setIsLongpressEnabled(true);
     }
 
     /**
@@ -83,6 +105,7 @@
     @Override
     public boolean dispatchTouchEvent(MotionEvent m) {
         if (mTouchEnabled) {
+            mGestureDetector.onTouchEvent(m);
             if (m.getActionMasked() == MotionEvent.ACTION_UP) {
                 mTouchCoordinate = new TouchCoordinate(m.getX(), m.getY(), this.getMeasuredWidth(),
                         this.getMeasuredHeight());
diff --git a/src/com/android/camera/Storage.java b/src/com/android/camera/Storage.java
index d4a1790..3616bc1 100644
--- a/src/com/android/camera/Storage.java
+++ b/src/com/android/camera/Storage.java
@@ -45,6 +45,7 @@
             Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).toString();
     public static final String DIRECTORY = DCIM + "/Camera";
     public static final String JPEG_POSTFIX = ".jpg";
+    public static final String GIF_POSTFIX = ".gif";
     // Match the code in MediaProvider.computeBucketValues().
     public static final String BUCKET_ID =
             String.valueOf(DIRECTORY.toLowerCase().hashCode());
@@ -106,11 +107,11 @@
      * @return The URI of the added image, or null if the image could not be
      *         added.
      */
-    private static Uri addImage(ContentResolver resolver, String title, long date,
+    static Uri addImage(ContentResolver resolver, String title, long date,
             Location location, int orientation, ExifInterface exif, byte[] data, int width,
             int height, String mimeType) {
 
-        String path = generateFilepath(title);
+        String path = generateFilepath(title, mimeType);
         long fileLength = writeFile(path, data, exif);
         if (fileLength >= 0) {
             return addImageToMediaStore(resolver, title, date, location, orientation, fileLength,
@@ -238,7 +239,7 @@
     public static Uri updateImage(Uri imageUri, ContentResolver resolver, String title, long date,
            Location location, int orientation, ExifInterface exif,
            byte[] jpeg, int width, int height, String mimeType) {
-        String path = generateFilepath(title);
+        String path = generateFilepath(title, mimeType);
         writeFile(path, jpeg, exif);
         return updateImage(imageUri, resolver, title, date, location, orientation, jpeg.length, path,
                 width, height, mimeType);
@@ -328,8 +329,16 @@
         return resultUri;
     }
 
-    private static String generateFilepath(String title) {
-        return DIRECTORY + '/' + title + ".jpg";
+    private static String generateFilepath(String title, String mimeType) {
+        String extension = null;
+        if (LocalData.MIME_TYPE_JPEG.equals(mimeType)) {
+            extension = JPEG_POSTFIX;
+        } else if (LocalData.MIME_TYPE_GIF.equals(mimeType)) {
+            extension = GIF_POSTFIX;
+        } else {
+            throw new IllegalArgumentException("Invalid mimeType: " + mimeType);
+        }
+        return DIRECTORY + '/' + title + extension;
     }
 
     /**
diff --git a/src/com/android/camera/VideoModule.java b/src/com/android/camera/VideoModule.java
index a99fcb4..9e60d4d 100644
--- a/src/com/android/camera/VideoModule.java
+++ b/src/com/android/camera/VideoModule.java
@@ -45,7 +45,6 @@
 import android.provider.MediaStore.MediaColumns;
 import android.provider.MediaStore.Video;
 import android.view.KeyEvent;
-import android.view.OrientationEventListener;
 import android.view.View;
 import android.widget.Toast;
 
@@ -55,6 +54,7 @@
 import com.android.camera.app.MediaSaver;
 import com.android.camera.app.MemoryManager;
 import com.android.camera.app.MemoryManager.MemoryListener;
+import com.android.camera.app.OrientationManager;
 import com.android.camera.debug.Log;
 import com.android.camera.exif.ExifInterface;
 import com.android.camera.hardware.HardwareSpec;
@@ -87,11 +87,9 @@
 import java.util.Set;
 
 public class VideoModule extends CameraModule
-    implements ModuleController,
-    VideoController,
-    MemoryListener,
-    MediaRecorder.OnErrorListener,
-    MediaRecorder.OnInfoListener, FocusOverlayManager.Listener {
+        implements FocusOverlayManager.Listener, MediaRecorder.OnErrorListener,
+        MediaRecorder.OnInfoListener, MemoryListener,
+        OrientationManager.OnOrientationChangeListener, VideoController {
 
     private static final String VIDEO_MODULE_STRING_ID = "VideoModule";
 
@@ -178,9 +176,6 @@
     private VideoUI mUI;
     private CameraProxy mCameraDevice;
 
-    // The degrees of the device rotated clockwise from its natural orientation.
-    private int mOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
-
     private float mZoomValue;  // The current zoom ratio.
 
     private final MediaSaver.OnMediaSavedListener mOnVideoSavedListener =
@@ -488,20 +483,9 @@
     }
 
     @Override
-    public void onOrientationChanged(int orientation) {
-        // We keep the last known orientation. So if the user first orient
-        // the camera then point the camera to floor or sky, we still have
-        // the correct orientation.
-        if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
-            return;
-        }
-        int newOrientation = CameraUtil.roundOrientation(orientation, mOrientation);
-
-        if (mOrientation != newOrientation) {
-            mOrientation = newOrientation;
-        }
-        mUI.onOrientationChanged(orientation);
-
+    public void onOrientationChanged(OrientationManager orientationManager,
+                                     OrientationManager.DeviceOrientation deviceOrientation) {
+        mUI.onOrientationChanged(orientationManager, deviceOrientation);
     }
 
     private final ButtonManager.ButtonCallback mFlashCallback =
@@ -1161,23 +1145,12 @@
             // on the size restriction.
         }
 
-        // See com.android.camera.cameradevice.CameraSettings.setPhotoRotationDegrees
-        // for documentation.
-        // Note that mOrientation here is the device orientation, which is the opposite of
-        // what activity.getWindowManager().getDefaultDisplay().getRotation() would return,
-        // which is the orientation the graphics need to rotate in order to render correctly.
-        int rotation = 0;
-        if (mOrientation != OrientationEventListener.ORIENTATION_UNKNOWN) {
-            Characteristics info =
-                    mActivity.getCameraProvider().getCharacteristics(mCameraId);
-            if (isCameraFrontFacing()) {
-                rotation = (info.getSensorOrientation() - mOrientation + 360) % 360;
-            } else if (isCameraBackFacing()) {
-                rotation = (info.getSensorOrientation() + mOrientation) % 360;
-            } else {
-                Log.e(TAG, "Camera is facing unhandled direction");
-            }
-        }
+        int sensorOrientation =
+                mActivity.getCameraProvider().getCharacteristics(mCameraId).getSensorOrientation();
+        int deviceOrientation =
+                mAppController.getOrientationManager().getDeviceOrientation().getDegrees();
+        int rotation = CameraUtil.getImageRotation(
+                sensorOrientation, deviceOrientation, isCameraFrontFacing());
         mMediaRecorder.setOrientationHint(rotation);
 
         try {
@@ -1716,6 +1689,10 @@
         mAppController.setShutterEnabled(false);
         mZoomValue = 1.0f;
 
+        OrientationManager orientationManager = mAppController.getOrientationManager();
+        orientationManager.addOnOrientationChangeListener(this);
+        mUI.onOrientationChanged(orientationManager, orientationManager.getDeviceOrientation());
+
         showVideoSnapshotUI(false);
 
         if (!mPreviewing) {
@@ -1743,6 +1720,8 @@
     public void pause() {
         mPaused = true;
 
+        mAppController.getOrientationManager().removeOnOrientationChangeListener(this);
+
         if (mFocusManager != null) {
             // If camera is not open when resume is called, focus manager will not
             // be initialized yet, in which case it will start listening to
diff --git a/src/com/android/camera/VideoUI.java b/src/com/android/camera/VideoUI.java
index 1276c84..86f27af 100644
--- a/src/com/android/camera/VideoUI.java
+++ b/src/com/android/camera/VideoUI.java
@@ -27,6 +27,7 @@
 import android.widget.LinearLayout;
 import android.widget.TextView;
 
+import com.android.camera.app.OrientationManager;
 import com.android.camera.debug.Log;
 import com.android.camera.ui.FocusOverlay;
 import com.android.camera.ui.PreviewOverlay;
@@ -269,8 +270,9 @@
         return new Point(mRootView.getMeasuredWidth(), mRootView.getMeasuredHeight());
     }
 
-    public void onOrientationChanged(int orientation) {
-        mVideoHints.onOrientationChanged(orientation);
+    public void onOrientationChanged(OrientationManager orientationManager,
+                                     OrientationManager.DeviceOrientation deviceOrientation) {
+        mVideoHints.onOrientationChanged(orientationManager, deviceOrientation);
     }
 
     private class ZoomChangeListener implements PreviewOverlay.OnZoomChangedListener {
diff --git a/src/com/android/camera/app/CameraAppUI.java b/src/com/android/camera/app/CameraAppUI.java
index 2362e69..673134b 100644
--- a/src/com/android/camera/app/CameraAppUI.java
+++ b/src/com/android/camera/app/CameraAppUI.java
@@ -1372,6 +1372,11 @@
         // noop
     }
 
+    @Override
+    public void onShutterButtonLongPressed() {
+        // noop
+    }
+
     /**
      * Set the mode options toggle clickable.
      */
diff --git a/src/com/android/camera/app/MediaSaver.java b/src/com/android/camera/app/MediaSaver.java
index 4387c98..459a4e9 100644
--- a/src/com/android/camera/app/MediaSaver.java
+++ b/src/com/android/camera/app/MediaSaver.java
@@ -64,6 +64,11 @@
     /**
      * Adds an image into {@link android.content.ContentResolver} and also
      * saves the file to the storage in the background.
+     * <p/>
+     * Equivalent to calling
+     * {@link #addImage(byte[], String, long, Location, int, int, int,
+     * ExifInterface, OnMediaSavedListener, ContentResolver, String)}
+     * with <code>image/jpeg</code> as <code>mimeType</code>.
      *
      * @param data The JPEG image data.
      * @param title The title of the image.
@@ -86,6 +91,31 @@
 
     /**
      * Adds an image into {@link android.content.ContentResolver} and also
+     * saves the file to the storage in the background.
+     *
+     * @param data The image data.
+     * @param title The title of the image.
+     * @param date The date when the image is created.
+     * @param loc The location where the image is created. Can be {@code null}.
+     * @param width The width of the image data before the orientation is
+     *              applied.
+     * @param height The height of the image data before the orientation is
+     *               applied.
+     * @param orientation The orientation of the image. The value should be a
+     *                    degree of rotation in clockwise. Valid values are
+     *                    0, 90, 180 and 270.
+     * @param exif The EXIF data of this image.
+     * @param l A callback object used when the saving is done.
+     * @param resolver The {@link android.content.ContentResolver} to be
+     *                 updated.
+     * @param mimeType The mimeType of the image.
+     */
+    void addImage(byte[] data, String title, long date, Location loc, int width, int height,
+            int orientation, ExifInterface exif, OnMediaSavedListener l, ContentResolver resolver,
+            String mimeType);
+
+    /**
+     * Adds an image into {@link android.content.ContentResolver} and also
      * saves the file to the storage in the background. The width and height
      * will be obtained directly from the image data.
      *
diff --git a/src/com/android/camera/app/OrientationManager.java b/src/com/android/camera/app/OrientationManager.java
index 0acab44..8af07b4 100644
--- a/src/com/android/camera/app/OrientationManager.java
+++ b/src/com/android/camera/app/OrientationManager.java
@@ -1,35 +1,68 @@
 package com.android.camera.app;
 
-import android.os.Handler;
-import android.view.OrientationEventListener;
-
 /**
  * An interface which defines the orientation manager.
  */
 public interface OrientationManager {
-    public final static int ORIENTATION_UNKNOWN = OrientationEventListener.ORIENTATION_UNKNOWN;
+    public static enum DeviceOrientation {
+        UNKNOWN(-1),
+        CLOCKWISE_0(0),
+        CLOCKWISE_90(90),
+        CLOCKWISE_180(180),
+        CLOCKWISE_270(270);
+
+        private final int mDegrees;
+
+        private DeviceOrientation(int degrees) {
+            mDegrees = degrees;
+        }
+
+        /**
+         * Returns the degree in clockwise.
+         */
+        public int getDegrees() {
+            return mDegrees;
+        }
+    }
 
     public interface OnOrientationChangeListener {
         /**
          * Called when the orientation changes.
          *
-         * @param orientation The current orientation.
+         * @param orientationManager The orientation manager detects the change.
+         * @param orientation The new rounded orientation.
          */
-        public void onOrientationChanged(int orientation);
+        public void onOrientationChanged(OrientationManager orientationManager,
+                                         DeviceOrientation orientation);
     }
 
     /**
      * Adds the
      * {@link com.android.camera.app.OrientationManager.OnOrientationChangeListener}.
      */
-    public void addOnOrientationChangeListener(
-            Handler handler, OnOrientationChangeListener listener);
+    public void addOnOrientationChangeListener(OnOrientationChangeListener listener);
 
     /**
      * Removes the listener.
      */
-    public void removeOnOrientationChangeListener(
-            Handler handler, OnOrientationChangeListener listener);
+    public void removeOnOrientationChangeListener(OnOrientationChangeListener listener);
+
+    /**
+     * Returns the current rounded device orientation.
+     */
+    public DeviceOrientation getDeviceOrientation();
+
+    /**
+     * Returns whether the device is in landscape based on the natural orientation
+     * and rotation from natural orientation.
+     */
+    public boolean isInLandscape();
+
+    /**
+     * Returns whether the device is in portrait based on the natural orientation
+     * and rotation from natural orientation.
+     */
+    public boolean isInPortrait();
 
     /**
      * Lock the framework orientation to the current device orientation
@@ -43,14 +76,8 @@
      */
     void unlockOrientation();
 
-    /** @return Whether the orientation is locked by the app or the system. */
-    boolean isOrientationLocked();
-
     /**
-     * Returns the display rotation degrees relative to the natural orientation
-     * in clockwise.
-     *
-     * @return 0, 90, 180, or 270.
+     * Return whether the orientation is locked by the app or the system.
      */
-    int getDisplayRotation();
+    boolean isOrientationLocked();
 }
diff --git a/src/com/android/camera/app/OrientationManagerImpl.java b/src/com/android/camera/app/OrientationManagerImpl.java
index 0549407..6078470 100644
--- a/src/com/android/camera/app/OrientationManagerImpl.java
+++ b/src/com/android/camera/app/OrientationManagerImpl.java
@@ -21,10 +21,13 @@
 import android.content.Context;
 import android.content.pm.ActivityInfo;
 import android.content.res.Configuration;
+import android.graphics.Point;
 import android.os.Handler;
 import android.provider.Settings;
+import android.view.Display;
 import android.view.OrientationEventListener;
 import android.view.Surface;
+import android.view.WindowManager;
 
 import com.android.camera.debug.Log;
 import com.android.camera.util.ApiHelper;
@@ -35,16 +38,25 @@
 /**
  * The implementation of {@link com.android.camera.app.OrientationManager}
  * by {@link android.view.OrientationEventListener}.
- * TODO: make this class package-private
  */
 public class OrientationManagerImpl implements OrientationManager {
     private static final Log.Tag TAG = new Log.Tag("OrientMgrImpl");
 
-    // Orientation hysteresis amount used in rounding, in degrees
+    // DeviceOrientation hysteresis amount used in rounding, in degrees
     private static final int ORIENTATION_HYSTERESIS = 5;
 
     private final Activity mActivity;
+
+    // The handler used to invoke listener callback.
+    private final Handler mHandler;
+
     private final MyOrientationEventListener mOrientationListener;
+
+    // We keep the last known orientation. So if the user first orient
+    // the camera then point the camera to floor or sky, we still have
+    // the correct orientation.
+    private DeviceOrientation mLastDeviceOrientation = DeviceOrientation.UNKNOWN;
+
     // If the framework orientation is locked.
     private boolean mOrientationLocked = false;
 
@@ -52,43 +64,22 @@
     // don't allow the orientation to be unlocked if the value is true.
     private boolean mRotationLockedSetting = false;
 
-    private final List<OrientationChangeCallback> mListeners =
-            new ArrayList<OrientationChangeCallback>();
+    private final List<OnOrientationChangeListener> mListeners =
+            new ArrayList<OnOrientationChangeListener>();
 
-    private static class OrientationChangeCallback {
-        private final Handler mHandler;
-        private final OnOrientationChangeListener mListener;
+    private final boolean mIsDefaultToPortrait;
 
-        OrientationChangeCallback(Handler handler, OnOrientationChangeListener listener) {
-            mHandler = handler;
-            mListener = listener;
-        }
-
-        public void postOrientationChangeCallback(final int orientation) {
-            mHandler.post(new Runnable() {
-                @Override
-                public void run() {
-                    mListener.onOrientationChanged(orientation);
-                }
-            });
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (o != null && o instanceof OrientationChangeCallback) {
-                OrientationChangeCallback c = (OrientationChangeCallback) o;
-                if (mHandler == c.mHandler && mListener == c.mListener) {
-                    return true;
-                }
-                return false;
-            }
-            return false;
-        }
-    }
-
-    public OrientationManagerImpl(Activity activity) {
+    /**
+     * Instantiates a new orientation manager.
+     *
+     * @param activity The main activity object.
+     * @param handler The handler used to invoke listener callback.
+     */
+    public OrientationManagerImpl(Activity activity, Handler handler) {
         mActivity = activity;
         mOrientationListener = new MyOrientationEventListener(activity);
+        mHandler = handler;
+        mIsDefaultToPortrait = isDefaultToPortrait(activity);
     }
 
     public void resume() {
@@ -102,35 +93,47 @@
         mOrientationListener.disable();
     }
 
-    ////////////////////////////////////////////////////////////////////////////
-    //  Orientation handling
-    //
-    //  We can choose to lock the framework orientation or not. If we lock the
-    //  framework orientation, we calculate a a compensation value according to
-    //  current device orientation and send it to listeners. If we don't lock
-    //  the framework orientation, we always set the compensation value to 0.
-    ////////////////////////////////////////////////////////////////////////////
-
     @Override
-    public void addOnOrientationChangeListener(Handler handler,
-            OnOrientationChangeListener listener) {
-        OrientationChangeCallback callback = new OrientationChangeCallback(handler, listener);
-        if (mListeners.contains(callback)) {
-            return;
-        }
-        mListeners.add(callback);
+    public DeviceOrientation getDeviceOrientation() {
+        return mLastDeviceOrientation;
     }
 
     @Override
-    public void removeOnOrientationChangeListener(Handler handler,
-            OnOrientationChangeListener listener) {
-        OrientationChangeCallback callback = new OrientationChangeCallback(handler, listener);
-        if (!mListeners.remove(callback)) {
+    public void addOnOrientationChangeListener(OnOrientationChangeListener listener) {
+        if (mListeners.contains(listener)) {
+            return;
+        }
+        mListeners.add(listener);
+    }
+
+    @Override
+    public void removeOnOrientationChangeListener(OnOrientationChangeListener listener) {
+        if (!mListeners.remove(listener)) {
             Log.v(TAG, "Removing non-existing listener.");
         }
     }
 
     @Override
+    public boolean isInLandscape() {
+        int roundedOrientationDegrees = mLastDeviceOrientation.getDegrees();
+        if (mIsDefaultToPortrait) {
+            if (roundedOrientationDegrees % 180 == 90) {
+                return true;
+            }
+        } else {
+            if (roundedOrientationDegrees % 180 == 0) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public boolean isInPortrait() {
+        return !isInLandscape();
+    }
+
+    @Override
     public void lockOrientation() {
         if (mOrientationLocked || mRotationLockedSetting) {
             return;
@@ -159,7 +162,7 @@
     }
 
     private int calculateCurrentScreenOrientation() {
-        int displayRotation = getDisplayRotation();
+        int displayRotation = getDisplayRotation(mActivity);
         // Display rotation >= 180 means we need to use the REVERSE landscape/portrait
         boolean standard = displayRotation < 180;
         if (mActivity.getResources().getConfiguration().orientation
@@ -189,40 +192,54 @@
 
         @Override
         public void onOrientationChanged(int orientation) {
-            // We keep the last known orientation. So if the user first orient
-            // the camera then point the camera to floor or sky, we still have
-            // the correct orientation.
             if (orientation == ORIENTATION_UNKNOWN) {
                 return;
             }
-            // TODO: We have two copies of the rounding method: one is CameraUtil.roundOrientation
-            // and the other is OrientationManagerImpl.roundOrientation. The same computation is
-            // done twice when orientation is changed. We should remove the duplicate. b/17440795
-            final int roundedOrientation = roundOrientation(orientation, 0);
-            for (OrientationChangeCallback l : mListeners) {
-                l.postOrientationChangeCallback(roundedOrientation);
+
+            final DeviceOrientation roundedDeviceOrientation =
+                    roundOrientation(mLastDeviceOrientation, orientation);
+            if (roundedDeviceOrientation == mLastDeviceOrientation) {
+                return;
+            }
+            Log.v(TAG, "orientation changed (from:to) " + mLastDeviceOrientation +
+                    ":" + roundedDeviceOrientation);
+            mLastDeviceOrientation = roundedDeviceOrientation;
+
+            for (final OnOrientationChangeListener listener : mListeners) {
+                mHandler.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        listener.onOrientationChanged(OrientationManagerImpl.this, roundedDeviceOrientation);
+                    }
+                });
             }
         }
     }
 
-    @Override
-    public int getDisplayRotation() {
-        return getDisplayRotation(mActivity);
-    }
-
-    private static int roundOrientation(int orientation, int orientationHistory) {
-        boolean changeOrientation = false;
-        if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) {
-            changeOrientation = true;
+    private static DeviceOrientation roundOrientation(DeviceOrientation oldDeviceOrientation,
+                                                      int newRawOrientation) {
+        boolean isOrientationChanged = false;
+        if (oldDeviceOrientation == DeviceOrientation.UNKNOWN) {
+            isOrientationChanged = true;
         } else {
-            int dist = Math.abs(orientation - orientationHistory);
+            int dist = Math.abs(newRawOrientation - oldDeviceOrientation.getDegrees());
             dist = Math.min(dist, 360 - dist);
-            changeOrientation = (dist >= 45 + ORIENTATION_HYSTERESIS);
+            isOrientationChanged = (dist >= 45 + ORIENTATION_HYSTERESIS);
         }
-        if (changeOrientation) {
-            return ((orientation + 45) / 90 * 90) % 360;
+        if (isOrientationChanged) {
+            int newRoundedOrientation = ((newRawOrientation + 45) / 90 * 90) % 360;
+            switch (newRoundedOrientation) {
+                case 0:
+                    return DeviceOrientation.CLOCKWISE_0;
+                case 90:
+                    return DeviceOrientation.CLOCKWISE_90;
+                case 180:
+                    return DeviceOrientation.CLOCKWISE_180;
+                case 270:
+                    return DeviceOrientation.CLOCKWISE_270;
+            }
         }
-        return orientationHistory;
+        return oldDeviceOrientation;
     }
 
     private static int getDisplayRotation(Activity activity) {
@@ -236,4 +253,28 @@
         }
         return 0;
     }
+
+    /**
+     * Calculate the default orientation of the device based on the width and
+     * height of the display when rotation = 0 (i.e. natural width and height)
+     *
+     * @param context current context
+     * @return whether the default orientation of the device is portrait
+     */
+    private static boolean isDefaultToPortrait(Context context) {
+        Display currentDisplay = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE))
+                .getDefaultDisplay();
+        Point displaySize = new Point();
+        currentDisplay.getSize(displaySize);
+        int orientation = currentDisplay.getRotation();
+        int naturalWidth, naturalHeight;
+        if (orientation == Surface.ROTATION_0 || orientation == Surface.ROTATION_180) {
+            naturalWidth = displaySize.x;
+            naturalHeight = displaySize.y;
+        } else {
+            naturalWidth = displaySize.y;
+            naturalHeight = displaySize.x;
+        }
+        return naturalWidth < naturalHeight;
+    }
 }
diff --git a/src/com/android/camera/burst/BurstController.java b/src/com/android/camera/burst/BurstController.java
index fdf040b..c501906 100644
--- a/src/com/android/camera/burst/BurstController.java
+++ b/src/com/android/camera/burst/BurstController.java
@@ -14,6 +14,8 @@
 
 package com.android.camera.burst;
 
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+
 /**
  * Controls the interactions with burst.
  * <p/>
@@ -42,7 +44,7 @@
  * Once post processing is complete, the burst module returns the final results
  * by calling {@link BurstResultsListener#onBurstCompleted(BurstResult)} method.
  */
-public interface BurstController {
+interface BurstController {
 
     /**
      * Starts the burst.
@@ -85,7 +87,6 @@
      * analyzing preview frames. Preview frames should have exact timestamps as
      * the high-res images held in the internal image buffer.
      */
-   // TODO: Change the return value to FrameDistributor.FrameConsumer is
-   // checked in.
-   public Object getPreviewFrameConsumer();
+    public FrameConsumer getPreviewFrameConsumer();
+
 }
diff --git a/src/com/android/camera/burst/BurstFacade.java b/src/com/android/camera/burst/BurstFacade.java
new file mode 100644
index 0000000..27c0c00
--- /dev/null
+++ b/src/com/android/camera/burst/BurstFacade.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.burst;
+
+import android.content.ContentResolver;
+
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+import com.android.camera.one.OneCamera;
+
+/**
+ * Facade for {@link BurstController} provides a simpler interface.
+ */
+public interface BurstFacade {
+    /**
+     * Set the content resolver to be updated when saving burst results.
+     *
+     * @param contentResolver to be updated when burst results are saved.
+     */
+    public void setContentResolver(ContentResolver contentResolver);
+
+    /**
+     * Called when camera is available.
+     *
+     * @param camera an instance of {@link OneCamera} that is used to start or
+     *            stop the burst.
+     */
+    public void onCameraAttached(OneCamera camera);
+
+    /**
+     * Called when camera becomes unavailable.
+     */
+    public void onCameraDetached();
+
+    /**
+     * Returns the frame consumer to use for preview frames.
+     */
+    public FrameConsumer getPreviewFrameConsumer();
+
+    /**
+     * Starts the burst.
+     */
+    public void startBurst();
+
+    /**
+     * Returns true if burst is running.
+     */
+    public boolean isBurstRunning();
+
+    /**
+     * Stops the burst.
+     */
+    public void stopBurst();
+}
diff --git a/src/com/android/camera/burst/BurstFacadeFactory.java b/src/com/android/camera/burst/BurstFacadeFactory.java
new file mode 100644
index 0000000..1129d1e
--- /dev/null
+++ b/src/com/android/camera/burst/BurstFacadeFactory.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.burst;
+
+import android.content.ContentResolver;
+
+import com.android.camera.app.AppController;
+import com.android.camera.app.LocationManager;
+import com.android.camera.app.MediaSaver;
+import com.android.camera.app.OrientationManager;
+import com.android.camera.gl.FrameDistributor;
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+import com.android.camera.one.OneCamera;
+
+import java.io.File;
+
+/**
+ * Factory for creating burst manager objects.
+ */
+public class BurstFacadeFactory {
+    private BurstFacadeFactory() {/* cannot be instantiated */}
+
+    /**
+     * An empty burst manager that is instantiated when burst is not supported.
+     */
+    private static class BurstFacadeStub implements BurstFacade {
+        @Override
+        public void setContentResolver(ContentResolver contentResolver) {}
+
+        @Override
+        public void onCameraAttached(OneCamera camera) {}
+
+        @Override
+        public void onCameraDetached() {}
+
+        @Override
+        public FrameConsumer getPreviewFrameConsumer() {
+            return new FrameConsumer() {
+
+                @Override
+                public void onStop() {}
+
+                @Override
+                public void onStart() {}
+
+                @Override
+                public void onNewFrameAvailable(FrameDistributor frameDistributor,
+                        long timestampNs) {}
+            };
+        }
+
+        @Override
+        public void startBurst() {}
+
+        @Override
+        public boolean isBurstRunning() {
+            return false;
+        }
+
+        @Override
+        public void stopBurst() {}
+    }
+
+    /**
+     * Creates and returns an instance of {@link BurstFacade}
+     *
+     * @param appController the app level controller for controlling the shutter
+     *            button.
+     * @param mediaSaver the {@link MediaSaver} instance for saving results of
+     *            burst.
+     * @param locationManager for querying location of burst.
+     * @param orientationManager for querying orientation of burst.
+     * @param debugDataDir the debug directory to use for burst.
+     */
+    public static BurstFacade create(AppController appController,
+            MediaSaver mediaSaver,
+            LocationManager locationManager,
+            OrientationManager orientationManager,
+            File debugDataDir) {
+        if (BurstControllerImpl.isBurstModeSupported()) {
+            return new BurstFacadeImpl(appController, mediaSaver,
+                    locationManager, orientationManager,
+                    debugDataDir);
+        } else {
+            // Burst is not supported return a stub instance.
+            return new BurstFacadeStub();
+        }
+    }
+}
diff --git a/src/com/android/camera/burst/BurstFacadeImpl.java b/src/com/android/camera/burst/BurstFacadeImpl.java
new file mode 100644
index 0000000..f3c0527
--- /dev/null
+++ b/src/com/android/camera/burst/BurstFacadeImpl.java
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package com.android.camera.burst;
+
+import android.content.ContentResolver;
+import android.location.Location;
+import android.net.Uri;
+import android.os.AsyncTask;
+import android.text.TextUtils;
+
+import com.android.camera.app.AppController;
+import com.android.camera.app.LocationManager;
+import com.android.camera.app.MediaSaver;
+import com.android.camera.app.OrientationManager;
+import com.android.camera.data.LocalData;
+import com.android.camera.debug.Log;
+import com.android.camera.debug.Log.Tag;
+import com.android.camera.exif.ExifInterface;
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+import com.android.camera.one.OneCamera;
+import com.android.camera.one.OneCamera.BurstParameters;
+import com.android.camera.one.OneCamera.BurstResultsCallback;
+import com.android.camera.session.CaptureSession;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Helper to manage burst, listen to burst results and saves media items.
+ * <p/>
+ * The UI feedback is rudimentary in form of a toast that is displayed on start of the
+ * burst and when artifacts are saved.
+ *
+ * TODO: Move functionality of saving burst items to a
+ * {@link com.android.camera.processing.ProcessingTask} and change API to use
+ * {@link com.android.camera.processing.ProcessingService}.
+ *
+ * TODO: Hook UI to the listener.
+ */
+class BurstFacadeImpl implements BurstFacade {
+    /**
+     * The state of the burst module.
+     */
+    private static enum BurstModuleState {
+        IDLE,
+        RUNNING,
+        STOPPING
+    }
+
+    private static final Tag TAG = new Tag("BurstFacadeImpl");
+
+    /**
+     * The format string of burst media item file name (without extension).
+     * <p/>
+     * An media item file name has the following format: "Burst_" + artifact
+     * type + "_" + index of artifact + "_" + timestamp
+     */
+    private static final String MEDIA_ITEM_FILENAME_FORMAT_STRING = "Burst_%s_%d_%d";
+    /**
+     * The title of Capture session for Burst.
+     * <p/>
+     * Title is of format: Burst_timestamp
+     */
+    private static final String BURST_TITLE_FORMAT_STRING = "Burst_%d";
+
+    private final AtomicReference<BurstModuleState> mBurstModuleState =
+            new AtomicReference<BurstModuleState>(BurstModuleState.IDLE);
+
+    /** Lock to protect starting and stopping of the burst. */
+    private final Object mStartStopBurstLock = new Object();
+
+    private final BurstController mBurstController;
+    private final AppController mAppController;
+    private final File mDebugDataDir;
+
+    private final MediaSaver.OnMediaSavedListener mOnMediaSavedListener =
+            new MediaSaver.OnMediaSavedListener() {
+                @Override
+                public void onMediaSaved(Uri uri) {
+                    if (uri != null) {
+                        mAppController.notifyNewMedia(uri);
+                    }
+                }
+            };
+
+    /**
+     * Results callback that is invoked by camera when results are available.
+     */
+    private final BurstResultsCallback
+            mBurstExtractsResultsCallback = new BurstResultsCallback() {
+                @Override
+                public void onBurstComplete(ResultsAccessor resultAccessor) {
+                    // Pass the results accessor to the controller.
+                    mBurstController.stopBurst(resultAccessor);
+                }
+            };
+
+    /**
+     * Listener for burst controller. Saves the results and interacts with the
+     * UI.
+     */
+    private final BurstResultsListener mBurstResultsListener =
+            new BurstResultsListener() {
+                @Override
+                public void onBurstStarted() {
+                }
+
+                @Override
+                public void onBurstError(Exception error) {
+                    Log.e(TAG, "Exception while running the burst" + error);
+                    mBurstModuleState.set(BurstModuleState.IDLE);
+                    // Re-enable the shutter button.
+                    mAppController.setShutterEnabled(true);
+                }
+
+                @Override
+                public void onBurstCompleted(BurstResult burstResult) {
+                    saveBurstResultAndEnableShutterButton(burstResult);
+                }
+
+                @Override
+                public void onArtifactCountAvailable(
+                        final Map<String, Integer> artifactTypeCount) {
+                    logArtifactCount(artifactTypeCount);
+                }
+            };
+
+    /** Camera instance for starting/stopping the burst. */
+    private OneCamera mCamera;
+
+    private final MediaSaver mMediaSaver;
+    private final LocationManager mLocationManager;
+    private final OrientationManager mOrientationManager;
+    private volatile ContentResolver mContentResolver;
+
+    /**
+     * Create a new BurstManagerImpl instance.
+     *
+     * @param appController the app level controller for controlling the shutter
+     *            button.
+     * @param mediaSaver the {@link MediaSaver} instance for saving results of
+     *            burst.
+     * @param locationManager for querying location of burst.
+     * @param orientationManager for querying orientation of burst.
+     * @param debugDataDir the debug directory to use for burst.
+     */
+    public BurstFacadeImpl(AppController appController,
+            MediaSaver mediaSaver,
+            LocationManager locationManager,
+            OrientationManager orientationManager,
+            File debugDataDir) {
+        mAppController = appController;
+        mMediaSaver = mediaSaver;
+        mLocationManager = locationManager;
+        mDebugDataDir = debugDataDir;
+        mOrientationManager = orientationManager;
+        mBurstController = new BurstControllerImpl(
+                mAppController.getAndroidContext(),
+                mBurstResultsListener);
+    }
+
+    /**
+     * Set the content resolver to be updated when saving burst results.
+     *
+     * @param contentResolver to be updated when burst results are saved.
+     */
+    @Override
+    public void setContentResolver(ContentResolver contentResolver) {
+        mContentResolver = contentResolver;
+    }
+
+    @Override
+    public void onCameraAttached(OneCamera camera) {
+        synchronized (mStartStopBurstLock) {
+            mCamera = camera;
+        }
+    }
+
+    @Override
+    public void onCameraDetached() {
+        synchronized (mStartStopBurstLock) {
+            mCamera = null;
+        }
+    }
+
+    @Override
+    public FrameConsumer getPreviewFrameConsumer() {
+        return mBurstController.getPreviewFrameConsumer();
+    }
+
+    @Override
+    public void startBurst() {
+        startBurstImpl();
+    }
+
+    @Override
+    public boolean isBurstRunning() {
+        return (mBurstModuleState.get() == BurstModuleState.RUNNING
+                || mBurstModuleState.get() == BurstModuleState.STOPPING);
+    }
+
+    private void startBurstImpl() {
+        synchronized (mStartStopBurstLock) {
+            if (mCamera != null &&
+                    mBurstModuleState.compareAndSet(BurstModuleState.IDLE,
+                            BurstModuleState.RUNNING)) {
+                // TODO: Use localized strings everywhere.
+                Log.d(TAG, "Starting burst.");
+                Location location = mLocationManager.getCurrentLocation();
+
+                // Set up the capture session.
+                long sessionTime = System.currentTimeMillis();
+                String title = String.format(BURST_TITLE_FORMAT_STRING, sessionTime);
+
+                // TODO: Fix the capture session and use it for saving
+                // intermediate results.
+                CaptureSession session = null;
+
+                BurstConfiguration burstConfig = mBurstController.startBurst();
+                BurstParameters params = new BurstParameters();
+                params.callback = mBurstExtractsResultsCallback;
+                params.burstConfiguration = burstConfig;
+                params.title = title;
+                params.orientation = mOrientationManager.getDeviceOrientation().getDegrees();
+                params.debugDataFolder = mDebugDataDir;
+                params.location = location;
+
+                // Disable the shutter button.
+                mAppController.setShutterEnabled(false);
+
+                // start burst.
+                mCamera.startBurst(params, session);
+            }
+        }
+    }
+
+    @Override
+    public void stopBurst() {
+        synchronized (mStartStopBurstLock) {
+            if (mBurstModuleState.compareAndSet(BurstModuleState.RUNNING,
+                    BurstModuleState.STOPPING)) {
+                if (mCamera != null) {
+                    mCamera.stopBurst();
+                }
+            }
+        }
+    }
+
+    /**
+     * Saves the burst result and on completion re-enables the shutter button.
+     *
+     * @param burstResult the result of the burst.
+     */
+    private void saveBurstResultAndEnableShutterButton(final BurstResult burstResult) {
+        Log.i(TAG, "Saving results of of the burst.");
+
+        AsyncTask<Void, String, Void> saveTask =
+                new AsyncTask<Void, String, Void>() {
+                    @Override
+                    protected Void doInBackground(Void... arg0) {
+                        for (String artifactType : burstResult.getTypes()) {
+                            publishProgress(artifactType);
+                            saveArtifacts(burstResult, artifactType);
+                        }
+                        return null;
+                    }
+
+                    @Override
+                    protected void onPostExecute(Void result) {
+                        mBurstModuleState.set(BurstModuleState.IDLE);
+                        // Re-enable the shutter button.
+                        mAppController.setShutterEnabled(true);
+                    }
+
+                    @Override
+                    protected void onProgressUpdate(String... artifactTypes) {
+                        logProgressUpdate(artifactTypes, burstResult);
+                    }
+                };
+        saveTask.execute(null, null, null);
+    }
+
+    /**
+     * Save individual artifacts for bursts.
+     */
+    private void saveArtifacts(final BurstResult burstResult,
+            final String artifactType) {
+        int index = 0;
+        for (BurstArtifact artifact : burstResult.getArtifactsByType(artifactType)) {
+            for (BurstMediaItem mediaItem : artifact.getMediaItems()) {
+                saveBurstMediaItem(mediaItem,
+                        artifactType, ++index);
+            }
+        }
+    }
+
+    private void saveBurstMediaItem(BurstMediaItem mediaItem,
+            String artifactType,
+            int index) {
+        long timestamp = System.currentTimeMillis();
+        final String mimeType = mediaItem.getMimeType();
+        final String title = String.format(MEDIA_ITEM_FILENAME_FORMAT_STRING,
+                artifactType, index, timestamp);
+        byte[] data = mediaItem.getData();
+        ExifInterface exif = null;
+        if (LocalData.MIME_TYPE_JPEG.equals(mimeType)) {
+            exif = new ExifInterface();
+            exif.addDateTimeStampTag(
+                    ExifInterface.TAG_DATE_TIME,
+                    timestamp,
+                    TimeZone.getDefault());
+
+        }
+        mMediaSaver.addImage(data,
+                title,
+                timestamp,
+                mLocationManager.getCurrentLocation(),
+                mediaItem.getWidth(),
+                mediaItem.getHeight(),
+                mOrientationManager.getDeviceOrientation().getDegrees(),
+                exif, // exif,
+                mOnMediaSavedListener,
+                mContentResolver,
+                mimeType);
+    }
+
+    private void logArtifactCount(final Map<String, Integer> artifactTypeCount) {
+        final String prefix = "Finished burst. Creating ";
+        List<String> artifactDescription = new ArrayList<String>();
+        for (Map.Entry<String, Integer> entry :
+                artifactTypeCount.entrySet()) {
+            artifactDescription.add(entry.getValue() + " " + entry.getKey());
+        }
+
+        String message = prefix + TextUtils.join(" and ", artifactDescription) + ".";
+        Log.d(TAG, message);
+    }
+
+    private void logProgressUpdate(String[] artifactTypes, BurstResult burstResult) {
+        for (String artifactType : artifactTypes) {
+            List<BurstArtifact> artifacts =
+                    burstResult.getArtifactsByType(artifactType);
+            if (!artifacts.isEmpty()) {
+                Log.d(TAG, "Saving " + artifacts.size()
+                        + " " + artifactType + "s.");
+            }
+        }
+    }
+}
diff --git a/src/com/android/camera/data/LocalData.java b/src/com/android/camera/data/LocalData.java
index d42bf97..8af9247 100644
--- a/src/com/android/camera/data/LocalData.java
+++ b/src/com/android/camera/data/LocalData.java
@@ -45,6 +45,7 @@
     static final Log.Tag TAG = new Log.Tag("LocalData");
 
     public static final String MIME_TYPE_JPEG = "image/jpeg";
+    public static final String MIME_TYPE_GIF = "image/gif";
 
     // Data actions.
     public static final int DATA_ACTION_NONE = 0;
diff --git a/src/com/android/camera/gl/CopyShader.java b/src/com/android/camera/gl/CopyShader.java
new file mode 100644
index 0000000..cff3d3c
--- /dev/null
+++ b/src/com/android/camera/gl/CopyShader.java
@@ -0,0 +1,244 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Allows copying a GL texture to a {@link RenderTarget}.
+ */
+// TODO: Document this class a bit more and add a test for the class.
+public class CopyShader {
+
+    private static final String VERTEX_SHADER =
+            "attribute vec4 a_position;\n" +
+                    "attribute vec2 a_texcoord;\n" +
+                    "varying vec2 v_texcoord;\n" +
+                    "void main() {\n" +
+                    "  gl_Position = a_position;\n" +
+                    "  v_texcoord = a_texcoord;\n" +
+                    "}\n";
+
+    private static final String FRAGMENT_SHADER =
+            "precision mediump float;\n" +
+                    "uniform sampler2D tex_sampler;\n" +
+                    "varying vec2 v_texcoord;\n" +
+                    "void main() {\n" +
+                    "  gl_FragColor = texture2D(tex_sampler, v_texcoord);\n" +
+                    "}\n";
+
+    private static final String FRAGMENT_SHADER_EXTERNAL =
+            "#extension GL_OES_EGL_image_external : require\n" +
+                    "precision mediump float;\n" +
+                    "uniform samplerExternalOES tex_sampler;\n" +
+                    "varying vec2 v_texcoord;\n" +
+                    "void main() {\n" +
+                    "  gl_FragColor = texture2D(tex_sampler, v_texcoord);\n" +
+                    "}\n";
+
+    private static final float[] SOURCE_COORDS = new float[] {
+            0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
+    private static final float[] TARGET_COORDS = new float[] {
+            -1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f };
+
+    private final int mProgram;
+    private final int mTextureTarget;
+
+    private final FloatBuffer mSourceCoords;
+    private final FloatBuffer mTargetCoords;
+
+    private final int mSourceAttrib;
+    private final int mTargetAttrib;
+
+    private final int mTextureUniform;
+
+    private CopyShader(int program, int textureTarget) {
+        mProgram = program;
+        mTextureTarget = textureTarget;
+        mSourceCoords = readFloats(SOURCE_COORDS);
+        mTargetCoords = readFloats(TARGET_COORDS);
+        mSourceAttrib = GLES20.glGetAttribLocation(mProgram, "a_texcoord");
+        mTargetAttrib = GLES20.glGetAttribLocation(mProgram, "a_position");
+        mTextureUniform = GLES20.glGetUniformLocation(mProgram, "tex_sampler");
+    }
+
+    /**
+     * Compiles a new shader that is valid in the current context.
+     *
+     * @return a new shader instance that is valid in the current context
+     */
+    public static CopyShader compileNewShader() {
+        return new CopyShader(createProgram(VERTEX_SHADER, FRAGMENT_SHADER), GLES20.GL_TEXTURE);
+    }
+
+    /**
+     * Compiles a new shader that binds textures as GL_TEXTURE_EXTERNAL_OES.
+     *
+     * @return a new shader instance that is valid in the current context
+     */
+    public static CopyShader compileNewExternalShader() {
+        return new CopyShader(createProgram(VERTEX_SHADER, FRAGMENT_SHADER_EXTERNAL),
+                GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+    }
+
+    /**
+     * Sets the 4x4 transform matrix to apply when copying the texture.
+     * <p/>
+     * Note: Non-affine components of the transformation are ignored.
+     *
+     * @param matrix a 16-length float array containing the transform matrix in
+     *            column-major order.
+     */
+    public void setTransform(float[] matrix) {
+        /**
+         * Multiply transformation matrix by column vectors (s,t, 0, 1) for
+         * coordinates {(0,0),(1,0), (0,1), (1,1) } and store (s,t) values of
+         * the resulting matrix in column-major order.
+         */
+        float[] coords = new float[] {
+                matrix[12], matrix[13],
+                matrix[0] + matrix[12],
+                matrix[1] + matrix[13],
+                matrix[4] + matrix[12],
+                matrix[5] + matrix[13],
+                matrix[0] + matrix[4] + matrix[12],
+                matrix[1] + matrix[5] + matrix[13]
+        };
+        mSourceCoords.put(coords).position(0);
+    }
+
+    /**
+     * Renders the specified texture to the specified target.
+     *
+     * @param texName name of a valid texture
+     * @param target to render into
+     * @param width of output
+     * @param height of output
+     */
+    public void renderTextureToTarget(int texName, RenderTarget target, int width, int height) {
+        useProgram();
+        focusTarget(target, width, height);
+        assignAttribute(mSourceAttrib, mSourceCoords);
+        assignAttribute(mTargetAttrib, mTargetCoords);
+        bindTexture(mTextureUniform, texName, mTextureTarget);
+        render();
+    }
+
+    /**
+     * Releases the current shader.
+     * <p>
+     * This must be called in the shader's GL thread.
+     */
+    public void release() {
+        GLES20.glDeleteProgram(mProgram);
+    }
+
+    private void focusTarget(RenderTarget target, int width, int height) {
+        target.focus();
+        GLES20.glViewport(0, 0, width, height);
+        GLToolbox.checkGlError("focus");
+    }
+
+    private void useProgram() {
+        GLES20.glUseProgram(mProgram);
+        GLToolbox.checkGlError("glUseProgram");
+    }
+
+    private void render() {
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+        GLToolbox.checkGlError("glDrawArrays");
+    }
+
+    private void bindTexture(int uniformName, int texName, int texTarget) {
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(texTarget, texName);
+        GLES20.glUniform1i(uniformName, 0);
+        GLToolbox.checkGlError(
+                "bindTexture(" + uniformName + "," + texName + "," + texTarget + ")");
+    }
+
+    private void assignAttribute(int index, FloatBuffer values) {
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+        GLES20.glVertexAttribPointer(index, 2, GLES20.GL_FLOAT, false, 8, values);
+        GLES20.glEnableVertexAttribArray(index);
+        GLToolbox.checkGlError("glVertexAttribPointer(" + index + ")");
+    }
+
+    private static FloatBuffer readFloats(float[] values) {
+        FloatBuffer result = ByteBuffer.allocateDirect(values.length * 4)
+                .order(ByteOrder.nativeOrder()).asFloatBuffer();
+        result.put(values).position(0);
+        return result;
+    }
+
+    private static int loadShader(int shaderType, String source) {
+        int shader = GLES20.glCreateShader(shaderType);
+        if (shader != 0) {
+            GLES20.glShaderSource(shader, source);
+            GLES20.glCompileShader(shader);
+            int[] compiled = new int[1];
+            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+            if (compiled[0] == 0) {
+                String info = GLES20.glGetShaderInfoLog(shader);
+                GLES20.glDeleteShader(shader);
+                shader = 0;
+                throw new RuntimeException("Could not compile shader " + shaderType + ":" + info);
+            }
+        }
+        return shader;
+    }
+
+    private static int createProgram(String vertexSource, String fragmentSource) {
+        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertexShader == 0) {
+            throw new RuntimeException("Could not create shader-program as vertex shader "
+                    + "could not be compiled!");
+        }
+        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (pixelShader == 0) {
+            throw new RuntimeException("Could not create shader-program as fragment shader "
+                    + "could not be compiled!");
+        }
+
+        int program = GLES20.glCreateProgram();
+        if (program != 0) {
+            GLES20.glAttachShader(program, vertexShader);
+            GLToolbox.checkGlError("glAttachShader");
+            GLES20.glAttachShader(program, pixelShader);
+            GLToolbox.checkGlError("glAttachShader");
+            GLES20.glLinkProgram(program);
+            int[] linkStatus = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+            if (linkStatus[0] != GLES20.GL_TRUE) {
+                String info = GLES20.glGetProgramInfoLog(program);
+                GLES20.glDeleteProgram(program);
+                program = 0;
+                throw new RuntimeException("Could not link program: " + info);
+            }
+        }
+
+        GLES20.glDeleteShader(vertexShader);
+        GLES20.glDeleteShader(pixelShader);
+
+        return program;
+    }
+}
diff --git a/src/com/android/camera/gl/FrameDistributor.java b/src/com/android/camera/gl/FrameDistributor.java
new file mode 100644
index 0000000..1ecb5bf
--- /dev/null
+++ b/src/com/android/camera/gl/FrameDistributor.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.graphics.SurfaceTexture;
+
+/**
+ * Distributes frames from a {@link SurfaceTexture} to multiple consumers.
+ * <p>
+ * Frames are distributed as OpenGL textures, that can be used for further
+ * processing. This is a flexible approach that allows processing both using GL
+ * methods (e.g. shaders) and CPU methods by sending frames through an
+ * {@link ImageReader}.
+ * <p>
+ * Consumers receive {@link FrameConsumer#onNewFrameAvailable(FrameDistributor)}
+ * callbacks for new frames. Each consumer can grab the most current frame from
+ * its GL thread by calling {@link #acquireNextFrame(int, float[])}. After
+ * accessing the data, the consumer needs to call {@link #releaseFrame()}.
+ */
+public interface FrameDistributor {
+    /**
+     * Consumes frames streamed from a distributor.
+     */
+    public interface FrameConsumer {
+        /**
+         * Called when frame processing is about to start.
+         * <p>
+         * You can use this to do any setup required to process frames. Note
+         * that this is called on the distributor's thread.
+         */
+        public void onStart();
+
+        /**
+         * Called when a new frame is available for processing.
+         * <p>
+         * Note that as this is called on the frameProducer's thread, you should
+         * typically not call {@code acquireNextFrame()} from this callback.
+         * Instead, call it from your GL thread, after receiving this callback.
+         * When you are done processing the frame, you must call
+         * {@code releaseFrame()}.
+         *
+         * @param frameDistributor that issued the callback
+         * @param timestampNs timestamp in nanoseconds of the available frame.
+         */
+        public void onNewFrameAvailable(FrameDistributor frameDistributor, long timestampNs);
+
+        /**
+         * Called when frame processing is about to stop.
+         * <p>
+         * You can use this to release any resources that your consumer uses.
+         * You must reinstate resources when {@link #onStart()} is called again.
+         */
+        public void onStop();
+    }
+
+    /**
+     * Acquire the next available frame.
+     * <p>
+     * Call this after having received a
+     * {@link FrameConsumer#onNewFrameAvailable(FrameDistributor)} callback. You
+     * must call this from the thread in which your texture name is current and
+     * valid. The texture will be filled with the frame data and must be bound
+     * using GL_TEXTURE_EXTERNAL_OES. It must be a valid texture name created
+     * with {@code glGenTextures()} . You must call {@link #releaseFrame()} as
+     * soon as you are done processing this frame. All other consumers are
+     * blocked from processing frames until you have released it, so any
+     * processing should be done as efficiently as possible.
+     *
+     * @param textureName to fill with image data. Must be a valid texture name.
+     * @param transform of the frame that needs to be applied for an upright
+     *            image.
+     * @return the timestamp in nanoseconds of the frame.
+     */
+    public long acquireNextFrame(int textureName, float[] transform);
+
+    /**
+     * Release the currently acquired frame.
+     */
+    public void releaseFrame();
+
+    /**
+     * Get the {@link RenderTarget} that the producer uses for GL operations.
+     * <p>
+     * You should rarely need to use this method. It is used exclusively by
+     * consumers that reuse the FrameProducer's EGL context, and must be handled
+     * with great care.
+     *
+     * @return the RenderTarget used by the FrameProducer.
+     */
+    public RenderTarget getRenderTarget();
+}
diff --git a/src/com/android/camera/gl/FrameDistributorImpl.java b/src/com/android/camera/gl/FrameDistributorImpl.java
new file mode 100644
index 0000000..54998ac
--- /dev/null
+++ b/src/com/android/camera/gl/FrameDistributorImpl.java
@@ -0,0 +1,340 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.graphics.SurfaceTexture;
+import android.graphics.SurfaceTexture.OnFrameAvailableListener;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Size;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Implementation of {@link FrameDistributor}.
+ * <p>
+ * The typical way to use this is as follows:
+ * <ol>
+ * <li>Create a new instance and add all the {@link FrameConsumer} instances.
+ * </li>
+ * <li>Call {@link #start()} on the distributor.</li>
+ * <li>Obtain the {@link SurfaceTexture} used by the distributor and hook it up
+ * to your producer e.g. a Camera instance.</li>
+ * <li>Your consumers now start receiving
+ * {@link FrameConsumer#onNewFrameAvailable(FrameDistributor)} callbacks for new
+ * frames.</li>
+ * <li>Each consumer grabs the most current frame from its GL thread by calling
+ * {@link #acquireNextFrame(int, float[])}.</li>
+ * <li>After accessing the data, the consumer calls {@link #releaseFrame()}.
+ * </li>
+ * <li>When all processing is complete, call {@link #stop()}.</li>
+ * </ol>
+ */
+class FrameDistributorImpl implements FrameDistributor, AutoCloseable {
+
+    private final DistributionHandler mDistributionHandler;
+
+    private final HandlerThread mDistributionThread;
+
+    private static class DistributionHandler extends Handler implements OnFrameAvailableListener {
+
+        public static final int MSG_SETUP = 1;
+        public static final int MSG_RELEASE = 2;
+        public static final int MSG_UPDATE_SURFACE = 3;
+        public static final int MSG_UPDATE_PREVIEW_BUFFER_SIZE = 4;
+
+        private static final int DEFAULT_SURFACE_BUFFER_WIDTH = 1440;
+        private static final int DEFAULT_SURFACE_BUFFER_HEIGHT = 1080;
+
+        private final FrameDistributorImpl mDistributor;
+
+        private final AtomicBoolean mNewFrame = new AtomicBoolean(false);
+
+        final ConditionVariable mCommandDoneCondition = new ConditionVariable(true);
+
+        private final Lock mSurfaceTextureAccessLock = new ReentrantLock();
+
+        private final List<FrameConsumer> mConsumers;
+
+        private SurfaceTexture mSurfaceTexture;
+
+        private long mTimestamp;
+
+        private int mTexture;
+
+        private RenderTarget mServerTarget;
+
+        private boolean mIsSetup = false;
+
+        public DistributionHandler(FrameDistributorImpl distributor, Looper looper,
+                List<FrameConsumer> consumers) {
+            super(looper);
+            mDistributor = distributor;
+            mConsumers = consumers;
+        }
+
+        @Override
+        public void handleMessage(Message message) {
+            try {
+                switch (message.what) {
+                    case MSG_SETUP:
+                        setup();
+                        break;
+                    case MSG_UPDATE_SURFACE:
+                        updateSurfaceTexture();
+                        break;
+                    case MSG_RELEASE:
+                        release();
+                        break;
+                    case MSG_UPDATE_PREVIEW_BUFFER_SIZE:
+                        updatePreviewBufferSize((Size) message.obj);
+                        break;
+                    default:
+                        throw new IllegalStateException("Unknown message: " + message + "!");
+                }
+            } finally {
+                mCommandDoneCondition.open();
+            }
+        }
+
+        private synchronized void setup() {
+            if (!mIsSetup) {
+                mServerTarget = RenderTarget.newTarget(1, 1);
+                mServerTarget.focus();
+                mSurfaceTexture = createSurfaceTexture();
+                informListenersOfStart();
+                mIsSetup = true;
+            }
+        }
+
+        private synchronized void release() {
+            if (mIsSetup) {
+                // Notify listeners
+                informListenersOfStop();
+
+                // Release our resources
+                mServerTarget.close();
+                mServerTarget = null;
+                mSurfaceTexture.release();
+                mSurfaceTexture = null;
+                GLToolbox.deleteTexture(mTexture);
+
+                // It is VERY important we unfocus the current EGL context, as
+                // the SurfaceTextures
+                // will not properly detach if this is not done.
+                RenderTarget.focusNone();
+
+                // Update internal state
+                mNewFrame.set(false);
+                mIsSetup = false;
+            }
+        }
+
+        private void updateSurfaceTexture() {
+            if (mIsSetup) {
+                mSurfaceTextureAccessLock.lock();
+                mSurfaceTexture.attachToGLContext(mTexture);
+                mSurfaceTexture.updateTexImage();
+                mSurfaceTexture.detachFromGLContext();
+                mSurfaceTextureAccessLock.unlock();
+                mTimestamp = mSurfaceTexture.getTimestamp();
+                informListenersOfNewFrame(mTimestamp);
+            }
+        }
+
+        private void updatePreviewBufferSize(Size size) {
+            if (size != null && mIsSetup) {
+                mSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
+            }
+        }
+
+        public void postMessageType(int kind) {
+            mCommandDoneCondition.close();
+            sendMessage(Message.obtain(this, kind));
+        }
+
+        public void postMessageType(int kind, Object params) {
+            mCommandDoneCondition.close();
+            sendMessage(Message.obtain(this, kind, params));
+        }
+
+        private void informListenersOfStart() {
+            synchronized (mConsumers) {
+                for (FrameConsumer consumer : mConsumers) {
+                    consumer.onStart();
+                }
+            }
+        }
+
+        private void informListenersOfNewFrame(long timestamp) {
+            synchronized (mConsumers) {
+                for (FrameConsumer consumer : mConsumers) {
+                    consumer.onNewFrameAvailable(mDistributor, timestamp);
+                }
+            }
+        }
+
+        private void informListenersOfStop() {
+            synchronized (mConsumers) {
+                for (FrameConsumer consumer : mConsumers) {
+                    consumer.onStop();
+                }
+            }
+        }
+
+        public long acquireNextFrame(int textureName, float[] transform) {
+            if (transform == null || transform.length != 16) {
+                throw new
+                IllegalArgumentException("acquireNextFrame: invalid transform array.");
+            }
+            mSurfaceTextureAccessLock.lock();
+            mSurfaceTexture.attachToGLContext(textureName);
+            mSurfaceTexture.getTransformMatrix(transform);
+
+            return mTimestamp;
+        }
+
+        public void releaseFrame() {
+            mSurfaceTexture.detachFromGLContext();
+            mSurfaceTextureAccessLock.unlock();
+        }
+
+        public synchronized SurfaceTexture getSurfaceTexture() {
+            return mSurfaceTexture;
+        }
+
+        public synchronized RenderTarget getRenderTarget() {
+            return mServerTarget;
+        }
+
+        @Override
+        public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+            postMessageType(MSG_UPDATE_SURFACE);
+        }
+
+        private SurfaceTexture createSurfaceTexture() {
+            mTexture = GLToolbox.generateTexture();
+            SurfaceTexture surfaceTexture = new SurfaceTexture(mTexture);
+            surfaceTexture.setDefaultBufferSize(DEFAULT_SURFACE_BUFFER_WIDTH,
+                    DEFAULT_SURFACE_BUFFER_HEIGHT);
+            surfaceTexture.setOnFrameAvailableListener(this);
+            surfaceTexture.detachFromGLContext();
+            return surfaceTexture;
+        }
+
+    }
+
+    /**
+     * Creates a new distributor with the specified consumers.
+     *
+     * @param consumers list of consumers that will process incoming frames.
+     */
+    public FrameDistributorImpl(List<FrameConsumer> consumers) {
+        mDistributionThread = new HandlerThread("FrameDistributor");
+        mDistributionThread.start();
+        mDistributionHandler = new DistributionHandler(this, mDistributionThread.getLooper(),
+                consumers);
+    }
+
+    /**
+     * Start processing frames and sending them to consumers.
+     */
+    public void start() {
+        mDistributionHandler.postMessageType(DistributionHandler.MSG_SETUP);
+    }
+
+    /**
+     * Stop processing frames and release any resources required for doing so.
+     */
+    public void stop() {
+        mDistributionHandler.postMessageType(DistributionHandler.MSG_RELEASE);
+    }
+
+    /**
+     * Wait until the current start/stop command has finished executing.
+     * <p>
+     * Use this command if you need to make sure that the distributor has fully
+     * started or stopped.
+     */
+    public void waitForCommand() {
+        mDistributionHandler.mCommandDoneCondition.block();
+    }
+
+    /**
+     * Close the current distributor and release its resources.
+     * <p>
+     * You must not use the distributor after calling this method.
+     */
+    @Override
+    public void close() {
+        stop();
+        mDistributionThread.quitSafely();
+    }
+
+    @Override
+    public long acquireNextFrame(int textureName, float[] transform) {
+        return mDistributionHandler.acquireNextFrame(textureName, transform);
+    }
+
+    @Override
+    public void releaseFrame() {
+        mDistributionHandler.releaseFrame();
+    }
+
+    /**
+     * Get the {@link SurfaceTexture} whose frames will be distributed.
+     * <p>
+     * You must call this after distribution has started with a call to
+     * {@link #start()}.
+     *
+     * @return the input SurfaceTexture or null, if none is yet available.
+     */
+    public SurfaceTexture getInputSurfaceTexture() {
+        return mDistributionHandler.getSurfaceTexture();
+    }
+
+    /**
+     * Get the {@link RenderTarget} that the distributor uses for GL operations.
+     * <p>
+     * You should rarely need to use this method. It is used exclusively by
+     * consumers that reuse the FrameDistributor's EGL context, and must be
+     * handled with great care.
+     *
+     * @return the RenderTarget used by the FrameDistributor.
+     */
+    @Override
+    public RenderTarget getRenderTarget() {
+        return mDistributionHandler.getRenderTarget();
+    }
+
+    /**
+     * Update the default buffer size of the input {@link SurfaceTexture}.
+     *
+     * @param width the new value of width of the preview buffer.
+     * @param height the new value of height of the preview buffer.
+     */
+    public void updatePreviewBufferSize(int width, int height) {
+        mDistributionHandler.postMessageType(DistributionHandler.MSG_UPDATE_PREVIEW_BUFFER_SIZE,
+                new Size(width, height));
+    }
+}
diff --git a/src/com/android/camera/gl/FrameDistributorWrapper.java b/src/com/android/camera/gl/FrameDistributorWrapper.java
new file mode 100644
index 0000000..687d16a
--- /dev/null
+++ b/src/com/android/camera/gl/FrameDistributorWrapper.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.graphics.SurfaceTexture;
+import android.os.Looper;
+
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * A wrapper class for {@link FrameDistributorImpl} that provides thread safe
+ * access to the frame distributor.
+ */
+public class FrameDistributorWrapper implements AutoCloseable {
+    private final AtomicReference<FrameDistributorImpl> mFrameDistributor =
+            new AtomicReference<FrameDistributorImpl>();
+
+    /**
+     * Start processing frames and sending them to consumers.
+     * <p/>
+     * Can only be called from the main thread.
+     *
+     * @param consumers list of consumers that will process incoming frames.
+     */
+    public void start(List<FrameConsumer> consumers) {
+        assertOnMainThread();
+        if (mFrameDistributor.get() != null) {
+            throw new IllegalStateException("FrameDistributorWrapper: start called before close.");
+        } else {
+            FrameDistributorImpl distributor = new FrameDistributorImpl(consumers);
+            mFrameDistributor.set(distributor);
+            distributor.start();
+            distributor.waitForCommand();
+        }
+    }
+
+    /**
+     * Get the {@link SurfaceTexture} whose frames will be distributed.
+     * <p>
+     * You must call this after distribution has started with a call to
+     * {@link #start(List)}.
+     *
+     * @return the input SurfaceTexture or null, if none is yet available.
+     */
+    public SurfaceTexture getInputSurfaceTexture() {
+        FrameDistributorImpl distributor = mFrameDistributor.get();
+        return (distributor != null) ? distributor.getInputSurfaceTexture() : null;
+    }
+
+    /**
+     * Update the default buffer size of the input {@link SurfaceTexture}.
+     *
+     * @param width the new value of width of the preview buffer.
+     * @param height the new value of height of the preview buffer.
+     */
+    public void updatePreviewBufferSize(int width, int height) {
+        FrameDistributorImpl distributor = mFrameDistributor.get();
+        if (distributor != null) {
+            distributor.updatePreviewBufferSize(width, height);
+        }
+    }
+
+    /**
+     * Close the current distributor and release its resources.
+     * <p>
+     * Can only be called from the main thread.
+     */
+    @Override
+    public void close() {
+        assertOnMainThread();
+        if (mFrameDistributor.get() != null) {
+            mFrameDistributor.get().close();
+            mFrameDistributor.set(null);
+        } else {
+            throw new IllegalStateException("FrameDistributorWrapper: close called before start.");
+        }
+    }
+
+    private static void assertOnMainThread() {
+        if (Looper.getMainLooper().getThread() != Thread.currentThread()) {
+            throw new IllegalStateException("Must be called on the main thread.");
+        }
+    }
+}
diff --git a/src/com/android/camera/gl/GLToolbox.java b/src/com/android/camera/gl/GLToolbox.java
new file mode 100644
index 0000000..581f811
--- /dev/null
+++ b/src/com/android/camera/gl/GLToolbox.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.opengl.GLES20;
+import android.os.Looper;
+
+/**
+ * Collection of utility functions used for GL operations.
+ */
+public class GLToolbox {
+
+    public static int textureNone() {
+        return 0;
+    }
+
+    public static boolean isTexture(int texId) {
+        return GLES20.glIsTexture(texId);
+    }
+
+    public static void deleteTexture(int texId) {
+        int[] textures = new int[] {
+                texId };
+        assertNonUiThread("glDeleteTextures");
+        GLES20.glDeleteTextures(1, textures, 0);
+        checkGlError("glDeleteTextures");
+    }
+
+    public static void deleteFbo(int fboId) {
+        int[] fbos = new int[] {
+                fboId };
+        assertNonUiThread("glDeleteFramebuffers");
+        GLES20.glDeleteFramebuffers(1, fbos, 0);
+        checkGlError("glDeleteFramebuffers");
+    }
+
+    public static int generateTexture() {
+        int[] textures = new int[1];
+        GLES20.glGenTextures(1, textures, 0);
+        checkGlError("glGenTextures");
+        return textures[0];
+    }
+
+    public static int generateFbo() {
+        int[] fbos = new int[1];
+        GLES20.glGenFramebuffers(1, fbos, 0);
+        checkGlError("glGenFramebuffers");
+        return fbos[0];
+    }
+
+    public static int attachedTexture(int fboId) {
+        int[] params = new int[1];
+        GLES20.glGetFramebufferAttachmentParameteriv(
+                GLES20.GL_FRAMEBUFFER,
+                GLES20.GL_COLOR_ATTACHMENT0,
+                GLES20.GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME,
+                params, 0);
+        checkGlError("glGetFramebufferAttachmentParameteriv");
+        return params[0];
+    }
+
+    public static void attachTextureToFbo(int texId, int fboId) {
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
+                GLES20.GL_COLOR_ATTACHMENT0,
+                GLES20.GL_TEXTURE_2D,
+                texId,
+                0);
+        checkGlError("glFramebufferTexture2D");
+    }
+
+    public static void setDefaultTexParams() {
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
+                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+        checkGlError("glTexParameteri");
+    }
+
+    public static void checkGlError(String operation) {
+        int error;
+        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+            throw new RuntimeException("GL Operation '" + operation + "' caused error "
+                    + Integer.toHexString(error) + "!");
+        }
+    }
+
+    /**
+     * Make sure we are not operating in the UI thread. It is often tricky to
+     * track down bugs that happen when issuing GL commands in the UI thread.
+     * This is especially true when releasing GL resources. Often this will
+     * cause errors much later on. Therefore we make sure we do not do these
+     * dangerous operations on the UI thread.
+     */
+    private static void assertNonUiThread(String operation) {
+        if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
+            throw new RuntimeException("Attempting to perform GL operation '" + operation
+                    + "' on UI thread!");
+        }
+    }
+}
diff --git a/src/com/android/camera/gl/RenderTarget.java b/src/com/android/camera/gl/RenderTarget.java
new file mode 100644
index 0000000..d126fa1
--- /dev/null
+++ b/src/com/android/camera/gl/RenderTarget.java
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Encapsulates a target into which a GL operation can draw into.
+ * <p>
+ * A RenderTarget can take on many forms, such as an offscreen buffer, an FBO
+ * attached to a texture, or a SurfaceTexture target. Regardless of output type,
+ * once a RenderTarget is focused, any issued OpenGL draw commands will be
+ * rasterized into that target.
+ * <p>
+ * Note, that this class is a simplified version of the MFF's
+ * {@code RenderTarget} class.
+ */
+// TODO: Add a test for the class.
+public class RenderTarget implements AutoCloseable {
+
+    private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+    private static final int EGL_OPENGL_ES2_BIT = 4;
+
+    /** The cached EGLConfig instance. */
+    private static EGLConfig mEglConfig = null;
+
+    /** The display for which the EGLConfig was chosen. We expect only one. */
+    private static EGLDisplay mConfiguredDisplay;
+
+    private final EGL10 mEgl;
+    private final EGLDisplay mDisplay;
+    private final EGLContext mContext;
+    private final EGLSurface mSurface;
+    private final int mFbo;
+
+    private final boolean mOwnsContext;
+    private final boolean mOwnsSurface;
+
+    private static int sRedSize = 8;
+    private static int sGreenSize = 8;
+    private static int sBlueSize = 8;
+    private static int sAlphaSize = 8;
+    private static int sDepthSize = 0;
+    private static int sStencilSize = 0;
+
+    public static RenderTarget newTarget(int width, int height) {
+        EGL10 egl = (EGL10) EGLContext.getEGL();
+        EGLDisplay eglDisplay = createDefaultDisplay(egl);
+        EGLConfig eglConfig = chooseEglConfig(egl, eglDisplay);
+        EGLContext eglContext = createContext(egl, eglDisplay, eglConfig);
+        EGLSurface eglSurface = createSurface(egl, eglDisplay, width, height);
+        RenderTarget result = new RenderTarget(eglDisplay, eglContext, eglSurface, 0, true, true);
+        return result;
+    }
+
+    public RenderTarget forTexture(int texName, int texTarget, int width, int height) {
+        // NOTE: We do not need to lookup any previous bindings of this texture
+        // to an FBO, as
+        // multiple FBOs to a single texture is valid.
+        int fbo = GLToolbox.generateFbo();
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
+        GLToolbox.checkGlError("glBindFramebuffer");
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
+                GLES20.GL_COLOR_ATTACHMENT0,
+                texName,
+                texTarget,
+                0);
+        GLToolbox.checkGlError("glFramebufferTexture2D");
+        return new RenderTarget(mDisplay, mContext, surface(), fbo, false, false);
+    }
+
+    public RenderTarget forSurfaceHolder(SurfaceHolder surfaceHolder) {
+        EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
+        EGLSurface eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surfaceHolder, null);
+        checkEglError(mEgl, "eglCreateWindowSurface");
+        checkSurface(mEgl, eglSurf);
+        RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
+        return result;
+    }
+
+    public RenderTarget forSurfaceTexture(SurfaceTexture surfaceTexture) {
+        EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
+        EGLSurface eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surfaceTexture, null);
+        checkEglError(mEgl, "eglCreateWindowSurface");
+        checkSurface(mEgl, eglSurf);
+        RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
+        return result;
+    }
+
+    public RenderTarget forSurface(Surface surface) {
+        EGLConfig eglConfig = chooseEglConfig(mEgl, mDisplay);
+        EGLSurface eglSurf = mEgl.eglCreateWindowSurface(mDisplay, eglConfig, surface, null);
+        checkEglError(mEgl, "eglCreateWindowSurface");
+        checkSurface(mEgl, eglSurf);
+        RenderTarget result = new RenderTarget(mDisplay, mContext, eglSurf, 0, false, true);
+        return result;
+    }
+
+    public static void setEGLConfigChooser(int redSize, int greenSize, int blueSize, int alphaSize,
+            int depthSize, int stencilSize) {
+        sRedSize = redSize;
+        sGreenSize = greenSize;
+        sBlueSize = blueSize;
+        sAlphaSize = alphaSize;
+        sDepthSize = depthSize;
+        sStencilSize = stencilSize;
+    }
+
+    public void focus() {
+        mEgl.eglMakeCurrent(mDisplay, surface(), surface(), mContext);
+        if (getCurrentFbo() != mFbo) {
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFbo);
+            GLToolbox.checkGlError("glBindFramebuffer");
+        }
+    }
+
+    public static void focusNone() {
+        EGL10 egl = (EGL10) EGLContext.getEGL();
+        egl.eglMakeCurrent(egl.eglGetCurrentDisplay(),
+                EGL10.EGL_NO_SURFACE,
+                EGL10.EGL_NO_SURFACE,
+                EGL10.EGL_NO_CONTEXT);
+        checkEglError(egl, "eglMakeCurrent");
+    }
+
+    public void swapBuffers() {
+        mEgl.eglSwapBuffers(mDisplay, surface());
+    }
+
+    public EGLContext getContext() {
+        return mContext;
+    }
+
+    @Override
+    public void close() {
+        if (mOwnsContext) {
+            mEgl.eglDestroyContext(mDisplay, mContext);
+        }
+        if (mOwnsSurface) {
+            mEgl.eglDestroySurface(mDisplay, mSurface);
+        }
+        if (mFbo != 0) {
+            GLToolbox.deleteFbo(mFbo);
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "RenderTarget(" + mDisplay + ", " + mContext + ", " + mSurface + ", " + mFbo + ")";
+    }
+
+    private static EGLConfig chooseEglConfig(EGL10 egl, EGLDisplay display) {
+        if (mEglConfig == null || !display.equals(mConfiguredDisplay)) {
+            int[] configsCount = new int[1];
+            EGLConfig[] configs = new EGLConfig[1];
+            int[] configSpec = getDesiredConfig();
+            if (!egl.eglChooseConfig(display, configSpec, configs, 1, configsCount)) {
+                throw new IllegalArgumentException("EGL Error: eglChooseConfig failed " +
+                        getEGLErrorString(egl));
+            } else if (configsCount[0] > 0) {
+                mEglConfig = configs[0];
+                mConfiguredDisplay = display;
+            }
+        }
+        return mEglConfig;
+    }
+
+    private static int[] getDesiredConfig() {
+        return new int[] {
+                EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+                EGL10.EGL_RED_SIZE, sRedSize,
+                EGL10.EGL_GREEN_SIZE, sGreenSize,
+                EGL10.EGL_BLUE_SIZE, sBlueSize,
+                EGL10.EGL_ALPHA_SIZE, sAlphaSize,
+                EGL10.EGL_DEPTH_SIZE, sDepthSize,
+                EGL10.EGL_STENCIL_SIZE, sStencilSize,
+                EGL10.EGL_NONE
+        };
+    }
+
+    private RenderTarget(EGLDisplay display, EGLContext context, EGLSurface surface, int fbo,
+            boolean ownsContext, boolean ownsSurface) {
+        mEgl = (EGL10) EGLContext.getEGL();
+        mDisplay = display;
+        mContext = context;
+        mSurface = surface;
+        mFbo = fbo;
+        mOwnsContext = ownsContext;
+        mOwnsSurface = ownsSurface;
+    }
+
+    private EGLSurface surface() {
+        return mSurface;
+    }
+
+    private static void initEgl(EGL10 egl, EGLDisplay display) {
+        int[] version = new int[2];
+        if (!egl.eglInitialize(display, version)) {
+            throw new RuntimeException("EGL Error: eglInitialize failed " + getEGLErrorString(egl));
+        }
+    }
+
+    private static EGLDisplay createDefaultDisplay(EGL10 egl) {
+        EGLDisplay display = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+        checkDisplay(egl, display);
+        initEgl(egl, display);
+        return display;
+    }
+
+    private static EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig config) {
+        int[] attrib_list = {
+                EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+        EGLContext ctxt = egl.eglCreateContext(display, config, EGL10.EGL_NO_CONTEXT, attrib_list);
+        checkContext(egl, ctxt);
+        return ctxt;
+    }
+
+    private static EGLSurface createSurface(EGL10 egl, EGLDisplay display, int width, int height) {
+        EGLConfig eglConfig = chooseEglConfig(egl, display);
+        int[] attribs = {
+                EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE };
+        return egl.eglCreatePbufferSurface(display, eglConfig, attribs);
+    }
+
+    private static int getCurrentFbo() {
+        int[] result = new int[1];
+        GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, result, 0);
+        return result[0];
+    }
+
+    private static void checkDisplay(EGL10 egl, EGLDisplay display) {
+        if (display == EGL10.EGL_NO_DISPLAY) {
+            throw new RuntimeException("EGL Error: Bad display: " + getEGLErrorString(egl));
+        }
+    }
+
+    private static void checkContext(EGL10 egl, EGLContext context) {
+        if (context == EGL10.EGL_NO_CONTEXT) {
+            throw new RuntimeException("EGL Error: Bad context: " + getEGLErrorString(egl));
+        }
+    }
+
+    private static void checkSurface(EGL10 egl, EGLSurface surface) {
+        if (surface == EGL10.EGL_NO_SURFACE) {
+            throw new RuntimeException("EGL Error: Bad surface: " + getEGLErrorString(egl));
+        }
+    }
+
+    private static void checkEglError(EGL10 egl, String command) {
+        int error = egl.eglGetError();
+        if (error != EGL10.EGL_SUCCESS) {
+            throw new RuntimeException("Error executing " + command + "! EGL error = 0x"
+                    + Integer.toHexString(error));
+        }
+    }
+
+    private static String getEGLErrorString(EGL10 egl) {
+        int eglError = egl.eglGetError();
+        return "EGL Error 0x" + Integer.toHexString(eglError);
+    }
+
+}
diff --git a/src/com/android/camera/gl/SurfaceTextureConsumer.java b/src/com/android/camera/gl/SurfaceTextureConsumer.java
new file mode 100644
index 0000000..b16517c
--- /dev/null
+++ b/src/com/android/camera/gl/SurfaceTextureConsumer.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.gl;
+
+import android.graphics.SurfaceTexture;
+
+import com.android.camera.gl.FrameDistributor;
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+
+/**
+ * Consumes frames from a {@link FrameDistributor} and passes them into a
+ * SurfaceTexture.
+ */
+//TODO: Document this class a bit more and add a test for this class.
+public class SurfaceTextureConsumer implements FrameConsumer {
+
+    private SurfaceTexture mSurfaceTexture;
+    private final float[] mTransform = new float[16];
+    private CopyShader mCopyShader;
+    private RenderTarget mTarget;
+    private int mWidth;
+    private int mHeight;
+
+    @Override
+    public synchronized void onStart() {}
+
+    @Override
+    public synchronized void onNewFrameAvailable(FrameDistributor frameDistributor,
+            long timestampNs) {
+        if (mSurfaceTexture == null) {
+            throw new IllegalStateException("Receiving frames without a SurfaceTexture!");
+        }
+        if (mTarget == null) {
+            mTarget = frameDistributor.getRenderTarget().forSurfaceTexture(mSurfaceTexture);
+        }
+        renderFrameToTarget(frameDistributor);
+    }
+
+    @Override
+    public synchronized void onStop() {
+        releaseResources();
+    }
+
+    public synchronized SurfaceTexture getSurfaceTexture() {
+        return mSurfaceTexture;
+    }
+
+    public synchronized void setSurfaceTexture(SurfaceTexture surfaceTexture,
+            int width,
+            int height) {
+        mSurfaceTexture = surfaceTexture;
+        mWidth = width;
+        mHeight = height;
+        releaseResources();
+    }
+
+    public synchronized void setSize(int width, int height) {
+        mWidth = width;
+        mHeight = height;
+    }
+
+    public synchronized void setDefaultBufferSize(int width, int height) {
+        if (mSurfaceTexture != null) {
+            mSurfaceTexture.setDefaultBufferSize(width, height);
+        }
+    }
+
+    public synchronized int getWidth() {
+        return mWidth;
+    }
+
+    public synchronized int getHeight() {
+        return mHeight;
+    }
+
+    private void releaseResources() {
+        if (mTarget != null) {
+            mTarget.close();
+            mTarget = null;
+        }
+        if (mCopyShader != null) {
+            mCopyShader.release();
+            mCopyShader = null;
+        }
+    }
+
+    private void renderFrameToTarget(FrameDistributor frameDistributor) {
+        CopyShader shader = getCopyShader();
+        int texture = GLToolbox.generateTexture();
+        frameDistributor.acquireNextFrame(texture, mTransform);
+        try {
+            shader.setTransform(mTransform);
+            shader.renderTextureToTarget(texture, mTarget, mWidth, mHeight);
+        } finally {
+            frameDistributor.releaseFrame();
+            GLToolbox.deleteTexture(texture);
+        }
+        mTarget.swapBuffers();
+    }
+
+    private CopyShader getCopyShader() {
+        if (mCopyShader == null) {
+            mCopyShader = CopyShader.compileNewExternalShader();
+        }
+        return mCopyShader;
+    }
+
+}
diff --git a/src/com/android/camera/module/ModuleController.java b/src/com/android/camera/module/ModuleController.java
index 3747a33..f93ee4d 100644
--- a/src/com/android/camera/module/ModuleController.java
+++ b/src/com/android/camera/module/ModuleController.java
@@ -89,14 +89,6 @@
     public void onLayoutOrientationChanged(boolean isLandscape);
 
     /**
-     * Called when the UI orientation is changed.
-     *
-     * @param orientation The new orientation, valid values are 0, 90, 180 and
-     *                    270.
-     */
-    public void onOrientationChanged(int orientation);
-
-    /**
      * Called when back key is pressed.
      *
      * @return Whether the back key event is processed.
diff --git a/src/com/android/camera/one/AbstractOneCamera.java b/src/com/android/camera/one/AbstractOneCamera.java
index 9b473a7..6342fcf 100644
--- a/src/com/android/camera/one/AbstractOneCamera.java
+++ b/src/com/android/camera/one/AbstractOneCamera.java
@@ -16,6 +16,8 @@
 
 package com.android.camera.one;
 
+import com.android.camera.session.CaptureSession;
+
 import java.io.File;
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -120,4 +122,14 @@
     public void setZoom(float zoom) {
         // If not implemented, no-op.
     }
+
+    @Override
+    public void startBurst(BurstParameters params, CaptureSession session) {
+        throw new UnsupportedOperationException("Not implemented yet.");
+    }
+
+    @Override
+    public void stopBurst() {
+        throw new UnsupportedOperationException("Not implemented yet.");
+    }
 }
diff --git a/src/com/android/camera/one/OneCamera.java b/src/com/android/camera/one/OneCamera.java
index 24e0a5c..0af93be 100644
--- a/src/com/android/camera/one/OneCamera.java
+++ b/src/com/android/camera/one/OneCamera.java
@@ -21,6 +21,8 @@
 import android.net.Uri;
 import android.view.Surface;
 
+import com.android.camera.burst.BurstConfiguration;
+import com.android.camera.burst.ResultsAccessor;
 import com.android.camera.session.CaptureSession;
 import com.android.camera.util.Size;
 
@@ -215,9 +217,37 @@
     }
 
     /**
+     * Parameters to be given to capture requests.
+     */
+    public static abstract class CaptureParameters {
+        /** The device orientation so we can compute the right JPEG rotation. */
+        public int orientation = Integer.MIN_VALUE;
+
+        /** The location of this capture. */
+        public Location location = null;
+
+        /** Set this to provide a debug folder for this capture. */
+        public File debugDataFolder;
+
+        protected static void checkRequired(int num) {
+            if (num == Integer.MIN_VALUE) {
+                throw new RuntimeException("Photo capture parameter missing.");
+            }
+        }
+
+        protected static void checkRequired(Object obj) {
+            if (obj == null) {
+                throw new RuntimeException("Photo capture parameter missing.");
+            }
+        }
+
+        public abstract void checkSanity();
+    }
+
+    /**
      * Parameters to be given to photo capture requests.
      */
-    public static final class PhotoCaptureParameters {
+    public static class PhotoCaptureParameters extends CaptureParameters {
         /**
          * Flash modes.
          * <p>
@@ -231,26 +261,20 @@
         public String title = null;
         /** Called when the capture is completed or failed. */
         public PictureCallback callback = null;
-        /** The device orientation so we can compute the right JPEG rotation. */
-        public int orientation = Integer.MIN_VALUE;
         /** The heading of the device at time of capture. In degrees. */
         public int heading = Integer.MIN_VALUE;
         /** Flash mode for this capture. */
         public Flash flashMode = Flash.AUTO;
-        /** The location of this capture. */
-        public Location location = null;
         /** Zoom value. */
         public float zoom = 1f;
         /** Timer duration in seconds or null for no timer. */
         public Float timerSeconds = null;
 
-        /** Set this to provide a debug folder for this capture. */
-        public File debugDataFolder;
-
         /**
          * Checks whether all required values are set. If one is missing, it
          * throws a {@link RuntimeException}.
          */
+        @Override
         public void checkSanity() {
             checkRequired(title);
             checkRequired(callback);
@@ -258,16 +282,34 @@
             checkRequired(heading);
         }
 
-        private void checkRequired(int num) {
-            if (num == Integer.MIN_VALUE) {
-                throw new RuntimeException("Photo capture parameter missing.");
-            }
-        }
+    }
 
-        private void checkRequired(Object obj) {
-            if (obj == null) {
-                throw new RuntimeException("Photo capture parameter missing.");
-            }
+    /**
+     * The callback to be invoked when results are available.
+     */
+    public interface BurstResultsCallback {
+        void onBurstComplete(ResultsAccessor resultAccessor);
+    }
+
+    /**
+     * Parameters to be given to burst requests.
+     */
+    public static class BurstParameters extends CaptureParameters {
+        /** The title/filename (without suffix) for this capture. */
+        public String title = null;
+        public BurstConfiguration burstConfiguration;
+        public BurstResultsCallback callback;
+
+        /**
+         * Checks whether all required values are set. If one is missing, it
+         * throws a {@link RuntimeException}.
+         */
+        @Override
+        public void checkSanity() {
+            checkRequired(title);
+            checkRequired(callback);
+            checkRequired(burstConfiguration);
+
         }
     }
 
@@ -291,6 +333,21 @@
     public void takePicture(PhotoCaptureParameters params, CaptureSession session);
 
     /**
+     * Call this to take a burst.
+     *
+     * @param params parameters for taking burst.
+     * @param session the capture session for this burst.
+     */
+
+    public void startBurst(BurstParameters params, CaptureSession session);
+
+    /**
+     * Call this to stop taking burst.
+     *
+     */
+    public void stopBurst();
+
+    /**
      * Sets or replaces a listener that is called whenever the camera encounters
      * an error.
      */
diff --git a/src/com/android/camera/one/v2/ImageCaptureManager.java b/src/com/android/camera/one/v2/ImageCaptureManager.java
index 09c2bdb..4240a89 100644
--- a/src/com/android/camera/one/v2/ImageCaptureManager.java
+++ b/src/com/android/camera/one/v2/ImageCaptureManager.java
@@ -29,6 +29,7 @@
 import android.os.SystemClock;
 import android.util.Pair;
 
+import com.android.camera.burst.BurstConfiguration.EvictionHandler;
 import com.android.camera.debug.Log;
 import com.android.camera.debug.Log.Tag;
 import com.android.camera.util.ConcurrentSharedRingBuffer;
@@ -44,13 +45,15 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executor;
 import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
 
 /**
  * Implements {@link android.media.ImageReader.OnImageAvailableListener} and
  * {@link android.hardware.camera2.CameraCaptureSession.CaptureListener} to
  * store the results of capture requests (both {@link Image}s and
- * {@link TotalCaptureResult}s in a ring-buffer from which they may be saved. 
+ * {@link TotalCaptureResult}s in a ring-buffer from which they may be saved.
  * <br>
  * This also manages the lifecycle of {@link Image}s within the application as
  * they are passed in from the lower-level camera2 API.
@@ -215,8 +218,50 @@
         public TotalCaptureResult tryGetMetadata() {
             return mMetadata;
         }
+
+        /**
+         * Returs the timestamp of the image if present, -1 otherwise.
+         */
+        public long tryGetTimestamp() {
+            if (mImage != null) {
+                return mImage.getTimestamp();
+            }
+            if (mMetadata != null) {
+                return mMetadata.get(TotalCaptureResult.SENSOR_TIMESTAMP);
+            }
+            return -1;
+        }
     }
 
+    /**
+     * A stub implementation of eviction handler that returns -1 as timestamp of
+     * the frame to be dropped.
+     * <p/>
+     * This forces the ring buffer to use its default eviction strategy.
+     */
+    private static class DefaultEvictionHandler implements EvictionHandler {
+        @Override
+        public long selectFrameToDrop() {
+            return -1;
+        }
+
+        @Override
+        public void onFrameCaptureResultAvailable(long timestamp,
+                TotalCaptureResult captureResult) {
+        }
+
+        @Override
+        public void onFrameInserted(long timestamp) {
+        }
+
+        @Override
+        public void onFrameDropped(long timestamp) {
+        }
+    }
+
+    private static final EvictionHandler DEFAULT_EVICTION_HANDLER =
+            new DefaultEvictionHandler();
+
     private static final Tag TAG = new Tag("ZSLImageListener");
 
     /**
@@ -274,6 +319,10 @@
      */
     private final Executor mImageCaptureListenerExecutor;
 
+    private final AtomicReference<EvictionHandler> mEvictionHandler =
+            new AtomicReference<EvictionHandler>(DEFAULT_EVICTION_HANDLER);
+    private final AtomicBoolean mIsCapturingBurst = new AtomicBoolean(false);
+
     /**
      * The set of constraints which must be satisfied for a newly acquired image
      * to be captured and sent to {@link #mPendingImageCaptureCallback}. null if
@@ -304,6 +353,12 @@
             mMetadataChangeListeners = new ConcurrentHashMap<Key<?>, Set<MetadataChangeListener>>();
 
     /**
+     * The lock for guarding installation and uninstallation of burst eviction
+     * handler.
+     */
+    private final Object mBurstLock = new Object();
+
+    /**
      * @param maxImages the maximum number of images provided by the
      *            {@link ImageReader}. This must be greater than 2.
      * @param listenerHandler the handler on which to invoke listeners. Note
@@ -433,28 +488,7 @@
         // Find the CapturedImage in the ring-buffer and attach the
         // TotalCaptureResult to it.
         // See documentation for swapLeast() for details.
-        boolean swapSuccess = mCapturedImageBuffer.swapLeast(timestamp,
-                new SwapTask<CapturedImage>() {
-                @Override
-                    public CapturedImage create() {
-                        CapturedImage image = new CapturedImage();
-                        image.addMetadata(result);
-                        return image;
-                    }
-
-                @Override
-                    public CapturedImage swap(CapturedImage oldElement) {
-                        oldElement.reset();
-                        oldElement.addMetadata(result);
-                        return oldElement;
-                    }
-
-                @Override
-                    public void update(CapturedImage existingElement) {
-                        existingElement.addMetadata(result);
-                    }
-                });
-
+        boolean swapSuccess = doMetaDataSwap(result, timestamp);
         if (!swapSuccess) {
             // Do nothing on failure to swap in.
             Log.v(TAG, "Unable to add new image metadata to ring-buffer.");
@@ -463,6 +497,86 @@
         tryExecutePendingCaptureRequest(timestamp);
     }
 
+    private boolean doMetaDataSwap(final TotalCaptureResult newMetadata, final long timestamp) {
+        mEvictionHandler.get().onFrameCaptureResultAvailable(timestamp, newMetadata);
+
+        if (mIsCapturingBurst.get()) {
+            // In case of burst we do not swap metadata in the ring buffer. This
+            // is to avoid the following scenario. If image for frame with
+            // timestamp A arrives first and the eviction handler decides to
+            // evict timestamp A. But when metadata for timestamp A arrives
+            // the eviction handler chooses to keep timestamp A. In this case
+            // the image for A will never be available.
+            return false;
+        }
+
+        return mCapturedImageBuffer.swapLeast(timestamp,
+                new SwapTask<CapturedImage>() {
+                @Override
+                    public CapturedImage create() {
+                        CapturedImage image = new CapturedImage();
+                        image.addMetadata(newMetadata);
+                        return image;
+                    }
+
+                @Override
+                    public CapturedImage swap(CapturedImage oldElement) {
+                        oldElement.reset();
+                        oldElement.addMetadata(newMetadata);
+                        return oldElement;
+                    }
+
+                @Override
+                    public void update(CapturedImage existingElement) {
+                        existingElement.addMetadata(newMetadata);
+                    }
+
+                @Override
+                    public long getSwapKey() {
+                        return -1;
+                    }
+                });
+    }
+
+    private boolean doImageSwap(final Image newImage) {
+        return mCapturedImageBuffer.swapLeast(newImage.getTimestamp(),
+                new SwapTask<CapturedImage>() {
+                @Override
+                    public CapturedImage create() {
+                        mEvictionHandler.get().onFrameInserted(newImage.getTimestamp());
+                        CapturedImage image = new CapturedImage();
+                        image.addImage(newImage);
+                        return image;
+                    }
+
+                @Override
+                    public CapturedImage swap(CapturedImage oldElement) {
+                        mEvictionHandler.get().onFrameInserted(newImage.getTimestamp());
+                        long timestamp = oldElement.tryGetTimestamp();
+                        mEvictionHandler.get().onFrameDropped(timestamp);
+                        oldElement.reset();
+                        CapturedImage image = new CapturedImage();
+                        image.addImage(newImage);
+                        return image;
+                    }
+
+                @Override
+                    public void update(CapturedImage existingElement) {
+                        mEvictionHandler.get().onFrameInserted(newImage.getTimestamp());
+                        existingElement.addImage(newImage);
+                    }
+
+                @Override
+                    public long getSwapKey() {
+                    final long toDropTimestamp = mEvictionHandler.get().selectFrameToDrop();
+                        if (toDropTimestamp > 0) {
+                            mCapturedImageBuffer.releaseIfPinned(toDropTimestamp);
+                        }
+                        return toDropTimestamp;
+                    }
+                });
+    }
+
     @Override
     public void onImageAvailable(ImageReader reader) {
         long startTime = SystemClock.currentThreadTimeMillis();
@@ -475,29 +589,9 @@
                 Log.v(TAG, "Acquired an image. Number of open images = " + numOpenImages);
             }
 
+            long timestamp = img.getTimestamp();
             // Try to place the newly-acquired image into the ring buffer.
-            boolean swapSuccess = mCapturedImageBuffer.swapLeast(
-                    img.getTimestamp(), new SwapTask<CapturedImage>() {
-                            @Override
-                        public CapturedImage create() {
-                            CapturedImage image = new CapturedImage();
-                            image.addImage(img);
-                            return image;
-                        }
-
-                            @Override
-                        public CapturedImage swap(CapturedImage oldElement) {
-                            oldElement.reset();
-                            oldElement.addImage(img);
-                            return oldElement;
-                        }
-
-                            @Override
-                        public void update(CapturedImage existingElement) {
-                            existingElement.addImage(img);
-                        }
-                    });
-
+            boolean swapSuccess = doImageSwap(img);
             if (!swapSuccess) {
                 // If we were unable to save the image to the ring buffer, we
                 // must close it now.
@@ -507,9 +601,14 @@
                 if (DEBUG_PRINT_OPEN_IMAGE_COUNT) {
                     Log.v(TAG, "Closed an image. Number of open images = " + numOpenImages);
                 }
+            } else {
+                if (mIsCapturingBurst.get()) {
+                    // In case of burst we pin every image.
+                    mCapturedImageBuffer.tryPin(timestamp);
+                }
             }
 
-            tryExecutePendingCaptureRequest(img.getTimestamp());
+            tryExecutePendingCaptureRequest(timestamp);
 
             long endTime = SystemClock.currentThreadTimeMillis();
             long totTime = endTime - startTime;
@@ -526,16 +625,7 @@
      * s.
      */
     public void close() {
-        try {
-            mCapturedImageBuffer.close(new Task<CapturedImage>() {
-                    @Override
-                public void run(CapturedImage e) {
-                    e.reset();
-                }
-            });
-        } catch (InterruptedException e) {
-            e.printStackTrace();
-        }
+        closeBuffer();
     }
 
     /**
@@ -692,4 +782,107 @@
             return true;
         }
     }
+
+    /**
+     * Tries to capture a pinned image for the given key from the ring-buffer.
+     *
+     * @return the pair of (image, captureResult) if image is found, null
+     *         otherwise.
+     */
+    public Pair<Image, TotalCaptureResult>
+            tryCapturePinnedImage(long timestamp) {
+        final Pair<Long, CapturedImage> toCapture =
+                mCapturedImageBuffer.tryGetPinned(timestamp);
+        Image pinnedImage = null;
+        TotalCaptureResult imageCaptureResult = null;
+        // Return an Image
+        if (toCapture != null && toCapture.second != null) {
+            pinnedImage = toCapture.second.tryGetImage();
+            imageCaptureResult = toCapture.second.tryGetMetadata();
+        }
+        return Pair.create(pinnedImage, imageCaptureResult);
+    }
+
+    /**
+     * Sets a new burst eviction handler for the internal buffer.
+     * <p/>
+     * Also clears the buffer. If there was an old burst eviction handler
+     * already installed this method will throw an exception.
+     *
+     * @param evictionHandler the handler to install on the internal image
+     *            buffer.
+     */
+    public void setBurstEvictionHandler(EvictionHandler evictionHandler) {
+        if (evictionHandler == null) {
+            throw new IllegalArgumentException("setBurstEvictionHandler: evictionHandler is null.");
+        }
+        synchronized (mBurstLock) {
+            if (mIsCapturingBurst.compareAndSet(false, true)) {
+                if (!mEvictionHandler.compareAndSet(DEFAULT_EVICTION_HANDLER,
+                        evictionHandler)) {
+                    throw new IllegalStateException(
+                            "Trying to set eviction handler before restoring the original.");
+                } else {
+                    clearCapturedImageBuffer(0);
+                }
+            } else {
+                throw new IllegalStateException("Trying to start burst when it was already running.");
+            }
+        }
+    }
+
+    /**
+     * Removes the burst eviction handler from the buffer.
+     */
+    public void resetEvictionHandler() {
+        synchronized (mBurstLock) {
+            mEvictionHandler.set(DEFAULT_EVICTION_HANDLER);
+        }
+    }
+
+    /**
+     * Clears the underlying buffer and reset the eviction handler.
+     */
+    public void resetCaptureState() {
+        synchronized (mBurstLock) {
+            if (mIsCapturingBurst.compareAndSet(true, false)) {
+                // By default the image buffer has 1 slot that is reserved for
+                // unpinned elements.
+                clearCapturedImageBuffer(1);
+                mEvictionHandler.set(DEFAULT_EVICTION_HANDLER);
+            }
+        }
+    }
+
+    /**
+     * Clear the buffer and reserves <code>unpinnedReservedSlots</code> in the buffer.
+     *
+     * @param unpinnedReservedSlots the number of unpinned slots that are never
+     *            allowed to be pinned.
+     */
+    private void clearCapturedImageBuffer(int unpinnedReservedSlots) {
+        mCapturedImageBuffer.releaseAll();
+        closeBuffer();
+        try {
+            mCapturedImageBuffer.reopenBuffer(unpinnedReservedSlots);
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+    /**
+     * Closes the buffer and frees up any images in the buffer.
+     */
+    private void closeBuffer() {
+        try {
+            mCapturedImageBuffer.close(new Task<CapturedImage>() {
+                @Override
+                public void run(CapturedImage e) {
+                    e.reset();
+                }
+            });
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
 }
diff --git a/src/com/android/camera/one/v2/OneCameraZslImpl.java b/src/com/android/camera/one/v2/OneCameraZslImpl.java
index ed9aa02..c9905fe 100644
--- a/src/com/android/camera/one/v2/OneCameraZslImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraZslImpl.java
@@ -39,10 +39,13 @@
 import android.os.HandlerThread;
 import android.os.SystemClock;
 import android.support.v4.util.Pools;
+import android.util.Pair;
 import android.view.Surface;
 
 import com.android.camera.CaptureModuleUtil;
 import com.android.camera.app.MediaSaver.OnMediaSavedListener;
+import com.android.camera.burst.BurstImage;
+import com.android.camera.burst.ResultsAccessor;
 import com.android.camera.debug.Log;
 import com.android.camera.debug.Log.Tag;
 import com.android.camera.exif.ExifInterface;
@@ -56,8 +59,8 @@
 import com.android.camera.one.v2.ImageCaptureManager.MetadataChangeListener;
 import com.android.camera.session.CaptureSession;
 import com.android.camera.util.CameraUtil;
-import com.android.camera.util.ListenerCombiner;
 import com.android.camera.util.JpegUtilNative;
+import com.android.camera.util.ListenerCombiner;
 import com.android.camera.util.Size;
 
 import java.nio.ByteBuffer;
@@ -67,9 +70,12 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
 
 /**
  * {@link OneCamera} implementation directly on top of the Camera2 API with zero
@@ -84,12 +90,12 @@
     private static final int JPEG_QUALITY =
             CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
     /**
-     * The maximum number of images to store in the full-size ZSL ring buffer. 
+     * The maximum number of images to store in the full-size ZSL ring buffer.
      * <br>
      * TODO: Determine this number dynamically based on available memory and the
      * size of frames.
      */
-    private static final int MAX_CAPTURE_IMAGES = 10;
+    private static final int MAX_CAPTURE_IMAGES = 12;
     /**
      * True if zero-shutter-lag images should be captured. Some devices produce
      * lower-quality images for the high-frequency stream, so we may wish to
@@ -188,6 +194,9 @@
 
     private MediaActionSound mMediaActionSound = new MediaActionSound();
 
+    private final AtomicReference<BurstParameters>
+            mBurstParams = new AtomicReference<BurstParameters>();
+
     /**
      * Ready state (typically displayed by the UI shutter-button) depends on two
      * things:<br>
@@ -610,8 +619,8 @@
     private void savePicture(Image image, final PhotoCaptureParameters captureParams,
             CaptureSession session) {
         int heading = captureParams.heading;
+        int degrees = CameraUtil.getJpegRotation(captureParams.orientation, mCharacteristics);
 
-        int degrees = (captureParams.orientation + 270) % 360;
         ExifInterface exif = null;
 
         exif = new ExifInterface();
@@ -635,7 +644,6 @@
             exif.setTag(directionRefTag);
             exif.setTag(directionTag);
         }
-        // TODO Find out why this is off by -90 degrees.
         session.saveAndFinish(acquireJpegBytes(image, degrees),
                 size.getWidth(), size.getHeight(), 0, exif, new OnMediaSavedListener() {
                         @Override
@@ -972,7 +980,7 @@
      *
      * @param img the image from which to extract jpeg bytes or compress to
      *            jpeg.
-     * @param degrees the angle to rotate the image, in degrees. Rotation is
+     * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is
      *            only applied to YUV images.
      * @return The bytes of the JPEG image. Newly allocated.
      */
@@ -1080,4 +1088,70 @@
     private Rect cropRegionForZoom(float zoom) {
         return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom);
     }
+
+    @Override
+    public void startBurst(BurstParameters params, CaptureSession session) {
+        params.checkSanity();
+        if (!mBurstParams.compareAndSet(null, params)) {
+            throw new IllegalStateException(
+                    "Attempting to start burst, when burst is already running.");
+        }
+        mCaptureManager.setBurstEvictionHandler(params.
+                burstConfiguration.getEvictionHandler());
+    }
+
+    private class ImageExtractor implements ResultsAccessor {
+        private final int mOrientation;
+
+        public ImageExtractor(int orientation) {
+            mOrientation = orientation;
+        }
+
+        @Override
+        public Future<BurstImage> extractImage(final long timestampToExtract) {
+            final Pair<Image, TotalCaptureResult> pinnedImageData =
+                    mCaptureManager.tryCapturePinnedImage(timestampToExtract);
+            return mImageSaverThreadPool.submit(new Callable<BurstImage>() {
+
+                @Override
+                public BurstImage call() throws Exception {
+                    BurstImage burstImage = null;
+                    Image image = pinnedImageData.first;
+                    if (image != null) {
+                        burstImage = new BurstImage();
+                        int degrees = CameraUtil.getJpegRotation(mOrientation, mCharacteristics);
+                        Size size = getImageSizeForOrientation(image.getWidth(),
+                                image.getHeight(),
+                                degrees);
+                        burstImage.width = size.getWidth();
+                        burstImage.height = size.getHeight();
+                        burstImage.data = acquireJpegBytes(image,
+                                degrees);
+                        burstImage.captureResult = pinnedImageData.second;
+                        burstImage.timestamp = timestampToExtract;
+                    } else {
+                        Log.e(TAG, "Failed to extract burst image for timestamp: "
+                                + timestampToExtract);
+                    }
+                    return burstImage;
+                }
+            });
+        }
+
+        @Override
+        public void close() {
+            mCaptureManager.resetCaptureState();
+        }
+    }
+
+    @Override
+    public void stopBurst() {
+        if (mBurstParams.get() == null) {
+            throw new IllegalStateException("Burst parameters should not be null.");
+        }
+        mCaptureManager.resetEvictionHandler();
+        mBurstParams.get().callback.onBurstComplete(
+                new ImageExtractor(mBurstParams.get().orientation));
+        mBurstParams.set(null);
+    }
 }
diff --git a/src/com/android/camera/util/ApiHelper.java b/src/com/android/camera/util/ApiHelper.java
index 7f6f59c..7e8a748 100644
--- a/src/com/android/camera/util/ApiHelper.java
+++ b/src/com/android/camera/util/ApiHelper.java
@@ -21,6 +21,9 @@
 import java.lang.reflect.Field;
 
 public class ApiHelper {
+    // Documented value of CPU_ABI on x86 architectures
+    private static final String X86ABI = "x86";
+
     public static final boolean AT_LEAST_16 = Build.VERSION.SDK_INT >= 16;
 
     public static final boolean HAS_APP_GALLERY =
@@ -55,6 +58,10 @@
 
     public static final boolean HAS_HIDEYBARS = isKitKatOrHigher();
 
+    // Don't use renderscript for x86 K, L is OK. See b/18435492
+    public static final boolean HAS_RENDERSCRIPT =
+            !(Build.VERSION.SDK_INT == Build.VERSION_CODES.KITKAT && X86ABI.equals(Build.CPU_ABI));
+
     public static final boolean IS_NEXUS_4 = "mako".equalsIgnoreCase(Build.DEVICE);
     public static final boolean IS_NEXUS_5 = "LGE".equalsIgnoreCase(Build.MANUFACTURER)
             && "hammerhead".equalsIgnoreCase(Build.DEVICE);
diff --git a/src/com/android/camera/util/CameraUtil.java b/src/com/android/camera/util/CameraUtil.java
index ccd12e7..c587ff4 100644
--- a/src/com/android/camera/util/CameraUtil.java
+++ b/src/com/android/camera/util/CameraUtil.java
@@ -451,45 +451,6 @@
         return 0;
     }
 
-    /**
-     * Calculate the default orientation of the device based on the width and
-     * height of the display when rotation = 0 (i.e. natural width and height)
-     *
-     * @param context current context
-     * @return whether the default orientation of the device is portrait
-     */
-    public static boolean isDefaultToPortrait(Context context) {
-        Display currentDisplay = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE))
-                .getDefaultDisplay();
-        Point displaySize = new Point();
-        currentDisplay.getSize(displaySize);
-        int orientation = currentDisplay.getRotation();
-        int naturalWidth, naturalHeight;
-        if (orientation == Surface.ROTATION_0 || orientation == Surface.ROTATION_180) {
-            naturalWidth = displaySize.x;
-            naturalHeight = displaySize.y;
-        } else {
-            naturalWidth = displaySize.y;
-            naturalHeight = displaySize.x;
-        }
-        return naturalWidth < naturalHeight;
-    }
-
-    public static int roundOrientation(int orientation, int orientationHistory) {
-        boolean changeOrientation = false;
-        if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) {
-            changeOrientation = true;
-        } else {
-            int dist = Math.abs(orientation - orientationHistory);
-            dist = Math.min(dist, 360 - dist);
-            changeOrientation = (dist >= 45 + ORIENTATION_HYSTERESIS);
-        }
-        if (changeOrientation) {
-            return ((orientation + 45) / 90 * 90) % 360;
-        }
-        return orientationHistory;
-    }
-
     private static Size getDefaultDisplaySize(Context context) {
         WindowManager windowManager = (WindowManager) context
                 .getSystemService(Context.WINDOW_SERVICE);
@@ -1371,20 +1332,40 @@
      * Given the device orientation and Camera2 characteristics, this returns
      * the required JPEG rotation for this camera.
      *
-     * @param deviceOrientationDegrees the device orientation in degrees.
-     * @return The JPEG orientation in degrees.
+     * @param deviceOrientationDegrees the clockwise angle of the device orientation from its
+     *                                 natural orientation in degrees.
+     * @return The angle to rotate image clockwise in degrees. It should be 0, 90, 180, or 270.
      */
     public static int getJpegRotation(int deviceOrientationDegrees,
-            CameraCharacteristics characteristics) {
+                                      CameraCharacteristics characteristics) {
         if (deviceOrientationDegrees == OrientationEventListener.ORIENTATION_UNKNOWN) {
             return 0;
         }
-        int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
+        boolean isFrontCamera = characteristics.get(CameraCharacteristics.LENS_FACING) ==
+                CameraMetadata.LENS_FACING_FRONT;
         int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
-        if (facing == CameraMetadata.LENS_FACING_FRONT) {
-            return (sensorOrientation + deviceOrientationDegrees) % 360;
-        } else {
-            return (sensorOrientation - deviceOrientationDegrees + 360) % 360;
+        return getImageRotation(sensorOrientation, deviceOrientationDegrees, isFrontCamera);
+    }
+
+    /**
+     * Given the camera sensor orientation and device orientation, this returns a clockwise angle
+     * which the final image needs to be rotated to be upright on the device screen.
+     *
+     * @param sensorOrientation Clockwise angle through which the output image needs to be rotated
+     *                          to be upright on the device screen in its native orientation.
+     * @param deviceOrientation Clockwise angle of the device orientation from its
+     *                          native orientation when front camera faces user.
+     * @param isFrontCamera True if the camera is front-facing.
+     * @return The angle to rotate image clockwise in degrees. It should be 0, 90, 180, or 270.
+     */
+    public static int getImageRotation(int sensorOrientation,
+                                       int deviceOrientation,
+                                       boolean isFrontCamera) {
+        int rotation = (sensorOrientation + deviceOrientation) % 360;
+        // The sensor of front camera faces in the opposite direction from back camera.
+        if (isFrontCamera) {
+            return (360 - rotation) % 360;
         }
+        return rotation;
     }
 }
diff --git a/src/com/android/camera/util/ConcurrentSharedRingBuffer.java b/src/com/android/camera/util/ConcurrentSharedRingBuffer.java
index 4e44972..249cc19 100644
--- a/src/com/android/camera/util/ConcurrentSharedRingBuffer.java
+++ b/src/com/android/camera/util/ConcurrentSharedRingBuffer.java
@@ -70,6 +70,16 @@
          * @param existingElement the element to be updated.
          */
         public void update(E existingElement);
+
+        /**
+         * Returns the key of the element that the ring buffer should prefer
+         * when considering a swapping candidate. If the returned key is not an
+         * unpinned element then ring buffer will replace the element with least
+         * key.
+         *
+         * @return a key of an existing unpinned element or a negative value.
+         */
+        public long getSwapKey();
     }
 
     /**
@@ -119,6 +129,29 @@
         }
     }
 
+    /**
+     * A Semaphore that allows to reduce permits to negative values.
+     */
+    private static class NegativePermitsSemaphore extends Semaphore {
+        public NegativePermitsSemaphore(int permits) {
+            super(permits);
+        }
+
+        /**
+         * Reduces the number of permits by <code>permits</code>.
+         * <p/>
+         * This method can only be called when number of available permits is
+         * zero.
+         */
+        @Override
+        public void reducePermits(int permits) {
+            if (availablePermits() != 0) {
+                throw new IllegalStateException("Called without draining the semaphore.");
+            }
+            super.reducePermits(permits);
+        }
+    }
+
     /** Allow only one swapping operation at a time. */
     private final Object mSwapLock = new Object();
     /**
@@ -137,7 +170,7 @@
     /** Used to acquire space in mElements. */
     private final Semaphore mCapacitySemaphore;
     /** This must be acquired while an element is pinned. */
-    private final Semaphore mPinSemaphore;
+    private final NegativePermitsSemaphore mPinSemaphore;
     private boolean mClosed = false;
 
     private Handler mPinStateHandler = null;
@@ -159,7 +192,7 @@
         // Start with -1 permits to pin elements since we must always have at
         // least one unpinned
         // element available to swap out as the head of the buffer.
-        mPinSemaphore = new Semaphore(-1);
+        mPinSemaphore = new NegativePermitsSemaphore(-1);
     }
 
     /**
@@ -239,20 +272,36 @@
                     if (mClosed) {
                         return false;
                     }
-
-                    Map.Entry<Long, Pinnable<E>> toSwapEntry = mUnpinnedElements.pollFirstEntry();
-
-                    if (toSwapEntry == null) {
-                        // We should never get here.
-                        throw new RuntimeException("No unpinned element available.");
+                    Pair<Long, Pinnable<E>> toSwapEntry = null;
+                    long swapKey = swapper.getSwapKey();
+                    // If swapKey is same as the inserted key return early.
+                    if (swapKey == newKey) {
+                        return false;
                     }
 
-                    toSwap = toSwapEntry.getValue();
+                    if (mUnpinnedElements.containsKey(swapKey)) {
+                        toSwapEntry = Pair.create(swapKey, mUnpinnedElements.remove(swapKey));
+                    } else {
+                        // The returned key from getSwapKey was not found in the
+                        // unpinned elements use the least entry from the
+                        // unpinned elements.
+                        Map.Entry<Long, Pinnable<E>> swapEntry = mUnpinnedElements.pollFirstEntry();
+                        if (swapEntry != null) {
+                            toSwapEntry = Pair.create(swapEntry.getKey(), swapEntry.getValue());
+                        }
+                    }
+
+                    if (toSwapEntry == null) {
+                        // We can get here if no unpinned element was found.
+                        return false;
+                    }
+
+                    toSwap = toSwapEntry.second;
 
                     // We must remove the element from both mElements and
                     // mUnpinnedElements because it must be re-added after the
                     // swap to be placed in the correct order with newKey.
-                    mElements.remove(toSwapEntry.getKey());
+                    mElements.remove(toSwapEntry.first);
                 }
 
                 try {
@@ -335,7 +384,8 @@
             Pinnable<E> element = mElements.get(key);
 
             if (element == null) {
-                throw new InvalidParameterException("No entry found for the given key.");
+                throw new InvalidParameterException(
+                        "No entry found for the given key: " + key + ".");
             }
 
             if (!element.isPinned()) {
@@ -458,6 +508,8 @@
 
         for (Pinnable<E> element : mElements.values()) {
             task.run(element.mElement);
+            // Release the capacity permits.
+            mCapacitySemaphore.release();
         }
 
         mUnpinnedElements.clear();
@@ -465,6 +517,107 @@
         mElements.clear();
     }
 
+    /**
+     * Attempts to get a pinned element for the given key.
+     *
+     * @param key the key of the pinned element.
+     * @return (key, value) pair if found otherwise null.
+     */
+    public Pair<Long, E> tryGetPinned(long key) {
+        synchronized (mLock) {
+            if (mClosed) {
+                return null;
+            }
+            for (java.util.Map.Entry<Long, Pinnable<E>> element : mElements.entrySet()) {
+                if (element.getKey() == key) {
+                    if (element.getValue().isPinned()) {
+                        return Pair.create(element.getKey(), element.getValue().getElement());
+                    } else {
+                        return null;
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Reopens previously closed buffer.
+     * <p/>
+     * Buffer should be closed before calling this method. If called with an
+     * open buffer an {@link IllegalStateException} is thrown.
+     *
+     * @param unpinnedReservedSlotCount a non-negative integer for number of
+     *            slots to reserve for unpinned elements. These slots can never
+     *            be pinned and will always be available for swapping.
+     * @throws InterruptedException
+     */
+    public void reopenBuffer(int unpinnedReservedSlotCount)
+            throws InterruptedException {
+        if (unpinnedReservedSlotCount < 0
+                || unpinnedReservedSlotCount >= mCapacitySemaphore.availablePermits()) {
+            throw new IllegalArgumentException("Invalid unpinned reserved slot count: " +
+                    unpinnedReservedSlotCount);
+        }
+
+        // Ensure that any pending swap tasks complete before closing.
+        synchronized (mSwapLock) {
+            synchronized (mLock) {
+                if (!mClosed) {
+                    throw new IllegalStateException(
+                            "Attempt to reopen the buffer when it is not closed.");
+                }
+
+                mPinSemaphore.drainPermits();
+                mPinSemaphore.reducePermits(unpinnedReservedSlotCount);
+                mClosed = false;
+            }
+        }
+    }
+
+    /**
+     * Releases a pinned element for the given key.
+     * <p/>
+     * If element is unpinned, it is not released.
+     *
+     * @param key the key of the element, if the element is not present an
+     *            {@link IllegalArgumentException} is thrown.
+     */
+    public void releaseIfPinned(long key) {
+        synchronized (mLock) {
+            Pinnable<E> element = mElements.get(key);
+
+            if (element == null) {
+                throw new IllegalArgumentException("Invalid key." + key);
+            }
+
+            if (element.isPinned()) {
+                release(key);
+            }
+        }
+    }
+
+    /**
+     * Releases all pinned elements in the buffer.
+     * <p/>
+     * Note: it only calls {@link #release(long)} only once on a pinned element.
+     */
+    public void releaseAll() {
+        synchronized (mSwapLock) {
+            synchronized (mLock) {
+                if (mClosed || mElements.isEmpty()
+                        || mElements.size() == mUnpinnedElements.size()) {
+                    return;
+                }
+                for (java.util.Map.Entry<Long, Pinnable<E>> entry : mElements.entrySet()) {
+                    if (entry.getValue().isPinned()) {
+                        release(entry.getKey());
+                    }
+                }
+            }
+        }
+    }
+
     private void notifyPinStateChange(final boolean pinsAvailable) {
         synchronized (mLock) {
             // We must synchronize on mPinStateHandler and mPinStateListener.
diff --git a/src/com/android/camera/util/JpegUtilNative.java b/src/com/android/camera/util/JpegUtilNative.java
index ff288cb..94d4b19 100644
--- a/src/com/android/camera/util/JpegUtilNative.java
+++ b/src/com/android/camera/util/JpegUtilNative.java
@@ -69,6 +69,8 @@
      * @param pStride the stride between adjacent pixels in the same row of
      *            planeBuf
      * @param rStride the stride between adjacent rows in planeBuf
+     * @param outBitmap the output bitmap object
+     * @param rot90 the multiple of 90 degrees to rotate counterclockwise, one of {0, 1, 2, 3}.
      */
     private static native void copyImagePlaneToBitmap(int width, int height, Object planeBuf,
             int pStride, int rStride, Object outBitmap, int rot90);
@@ -165,7 +167,7 @@
      * @param img the image to compress
      * @param outBuf a direct byte buffer to hold the output jpeg.
      * @param quality the jpeg encoder quality (0 to 100)
-     * @param rotation the amount to rotate the image clockwise, in degrees.
+     * @param degrees the amount to rotate the image clockwise, in degrees.
      * @return The number of bytes written to outBuf
      */
     public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality,
@@ -220,7 +222,9 @@
 
             Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);
 
-            copyImagePlaneToBitmap(plane, bitmap, degrees / 90);
+            // TODO: Make copyImagePlaneToBitmap take clockwise angle to rotate the bitmap.
+            int counterClockwiseDegrees = (360 - degrees) % 360;
+            copyImagePlaneToBitmap(plane, bitmap, counterClockwiseDegrees / 90);
 
             Bitmap rotatedBitmap = bitmap;
 
diff --git a/src/com/android/camera/widget/ModeOptionsOverlay.java b/src/com/android/camera/widget/ModeOptionsOverlay.java
index 4da24d5..a4a4434 100644
--- a/src/com/android/camera/widget/ModeOptionsOverlay.java
+++ b/src/com/android/camera/widget/ModeOptionsOverlay.java
@@ -114,6 +114,11 @@
         // noop
     }
 
+    @Override
+    public void onShutterButtonLongPressed() {
+        // noop
+    }
+
     /**
      * Schedule (or re-schedule) the options menu to be closed after a number
      * of milliseconds.  If the options menu is already closed, nothing is
diff --git a/src/com/android/camera/widget/VideoRecordingHints.java b/src/com/android/camera/widget/VideoRecordingHints.java
index 1c0b7a3..32754e0 100644
--- a/src/com/android/camera/widget/VideoRecordingHints.java
+++ b/src/com/android/camera/widget/VideoRecordingHints.java
@@ -23,10 +23,9 @@
 import android.graphics.Canvas;
 import android.graphics.drawable.Drawable;
 import android.util.AttributeSet;
-import android.view.OrientationEventListener;
 import android.view.View;
 
-import com.android.camera.util.CameraUtil;
+import com.android.camera.app.OrientationManager;
 import com.android.camera2.R;
 
 import java.lang.ref.WeakReference;
@@ -51,14 +50,12 @@
     private final Drawable mRotateArrows;
     private final Drawable mPhoneGraphic;
     private final int mPhoneGraphicHalfHeight;
-    private final boolean mIsDefaultToPortrait;
     private float mRotation = INITIAL_ROTATION;
     private final ValueAnimator mRotationAnimation;
     private final ObjectAnimator mAlphaAnimator;
     private boolean mIsInLandscape = false;
     private int mCenterX = UNSET;
     private int mCenterY = UNSET;
-    private int mLastOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
 
     private static class RotationAnimatorListener implements Animator.AnimatorListener {
         private final WeakReference<VideoRecordingHints> mHints;
@@ -168,7 +165,6 @@
         mAlphaAnimator = ObjectAnimator.ofFloat(this, "alpha", 1f, 0f);
         mAlphaAnimator.setDuration(FADE_OUT_DURATION_MS);
         mAlphaAnimator.addListener(new AlphaAnimatorListener(this));
-        mIsDefaultToPortrait = CameraUtil.isDefaultToPortrait(context);
     }
 
     /**
@@ -187,7 +183,7 @@
     @Override
     public void onVisibilityChanged(View v, int visibility) {
         super.onVisibilityChanged(v, visibility);
-        if (getVisibility() == VISIBLE && !isInLandscape()) {
+        if (getVisibility() == VISIBLE && !mIsInLandscape) {
             continueRotationAnimation();
         } else if (getVisibility() != VISIBLE) {
             mRotationAnimation.cancel();
@@ -230,19 +226,12 @@
     }
 
     /**
-     * Handles orientation change by starting/stopping the video hint based on the
-     * new orientation.
+     * Handles deviceOrientation change by starting/stopping the video hint based on the
+     * new deviceOrientation.
      */
-    public void onOrientationChanged(int orientation) {
-        if (mLastOrientation == orientation) {
-            return;
-        }
-        mLastOrientation = orientation;
-        if (mLastOrientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
-            return;
-        }
-
-        mIsInLandscape = isInLandscape();
+    public void onOrientationChanged(OrientationManager orientationManager,
+                                     OrientationManager.DeviceOrientation deviceOrientation) {
+        mIsInLandscape = orientationManager.isInLandscape();
         if (getVisibility() == VISIBLE) {
             if (mIsInLandscape) {
                 // Landscape.
@@ -258,13 +247,4 @@
             }
         }
     }
-
-    /**
-     * Returns whether the device is in landscape based on the natural orientation
-     * and rotation from natural orientation.
-     */
-    private boolean isInLandscape() {
-        return (mLastOrientation % 180 == 90 && mIsDefaultToPortrait)
-                || (mLastOrientation % 180 == 0 && !mIsDefaultToPortrait);
-    }
 }
diff --git a/src_pd/com/android/camera/burst/BurstControllerImpl.java b/src_pd/com/android/camera/burst/BurstControllerImpl.java
new file mode 100644
index 0000000..dbf8d77
--- /dev/null
+++ b/src_pd/com/android/camera/burst/BurstControllerImpl.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package com.android.camera.burst;
+
+import android.content.Context;
+
+import com.android.camera.gl.FrameDistributor.FrameConsumer;
+
+/**
+ * Stub implementation for burst controller.
+ */
+class BurstControllerImpl implements BurstController {
+    /**
+     * Create a new BurstController.
+     *
+     * @param context the context of the application.
+     * @param resultsListener listener for listening to burst events.
+     */
+    public BurstControllerImpl(Context context, BurstResultsListener resultsListener) {
+    }
+
+    /**
+     * Returns true if burst mode is supported by camera.
+     */
+    public static boolean isBurstModeSupported() {
+        return false;
+    }
+
+    @Override
+    public BurstConfiguration startBurst() {
+        return null;
+    }
+
+    @Override
+    public void stopBurst(ResultsAccessor resultsAccessor) {
+        // no op
+    }
+
+    @Override
+    public void onPreviewSizeChanged(int width, int height) {
+    }
+
+    @Override
+    public void onOrientationChanged(int orientation, boolean isMirrored) {
+    }
+
+    @Override
+    public FrameConsumer getPreviewFrameConsumer() {
+        throw new IllegalStateException("Not implemented.");
+    }
+}
diff --git a/tests/src/com/android/camera/unittest/CameraUnitTest.java b/tests/src/com/android/camera/unittest/CameraUnitTest.java
index 70faa5c..2df3718 100644
--- a/tests/src/com/android/camera/unittest/CameraUnitTest.java
+++ b/tests/src/com/android/camera/unittest/CameraUnitTest.java
@@ -25,50 +25,6 @@
 
 @SmallTest
 public class CameraUnitTest extends TestCase {
-    public void testRoundOrientation() {
-        int h = CameraUtil.ORIENTATION_HYSTERESIS;
-        assertEquals(0, CameraUtil.roundOrientation(0, 0));
-        assertEquals(0, CameraUtil.roundOrientation(359, 0));
-        assertEquals(0, CameraUtil.roundOrientation(0 + 44 + h, 0));
-        assertEquals(90, CameraUtil.roundOrientation(0 + 45 + h, 0));
-        assertEquals(0, CameraUtil.roundOrientation(360 - 44 - h, 0));
-        assertEquals(270, CameraUtil.roundOrientation(360 - 45 - h, 0));
-
-        assertEquals(90, CameraUtil.roundOrientation(90, 90));
-        assertEquals(90, CameraUtil.roundOrientation(90 + 44 + h, 90));
-        assertEquals(180, CameraUtil.roundOrientation(90 + 45 + h, 90));
-        assertEquals(90, CameraUtil.roundOrientation(90 - 44 - h, 90));
-        assertEquals(0, CameraUtil.roundOrientation(90 - 45 - h, 90));
-
-        assertEquals(180, CameraUtil.roundOrientation(180, 180));
-        assertEquals(180, CameraUtil.roundOrientation(180 + 44 + h, 180));
-        assertEquals(270, CameraUtil.roundOrientation(180 + 45 + h, 180));
-        assertEquals(180, CameraUtil.roundOrientation(180 - 44 - h, 180));
-        assertEquals(90, CameraUtil.roundOrientation(180 - 45 - h, 180));
-
-        assertEquals(270, CameraUtil.roundOrientation(270, 270));
-        assertEquals(270, CameraUtil.roundOrientation(270 + 44 + h, 270));
-        assertEquals(0, CameraUtil.roundOrientation(270 + 45 + h, 270));
-        assertEquals(270, CameraUtil.roundOrientation(270 - 44 - h, 270));
-        assertEquals(180, CameraUtil.roundOrientation(270 - 45 - h, 270));
-
-        assertEquals(90, CameraUtil.roundOrientation(90, 0));
-        assertEquals(180, CameraUtil.roundOrientation(180, 0));
-        assertEquals(270, CameraUtil.roundOrientation(270, 0));
-
-        assertEquals(0, CameraUtil.roundOrientation(0, 90));
-        assertEquals(180, CameraUtil.roundOrientation(180, 90));
-        assertEquals(270, CameraUtil.roundOrientation(270, 90));
-
-        assertEquals(0, CameraUtil.roundOrientation(0, 180));
-        assertEquals(90, CameraUtil.roundOrientation(90, 180));
-        assertEquals(270, CameraUtil.roundOrientation(270, 180));
-
-        assertEquals(0, CameraUtil.roundOrientation(0, 270));
-        assertEquals(90, CameraUtil.roundOrientation(90, 270));
-        assertEquals(180, CameraUtil.roundOrientation(180, 270));
-    }
-
     public void testPrepareMatrix() {
         Matrix matrix = new Matrix();
         float[] points;
diff --git a/tests_camera/src/com/android/camera/unittest/CameraUnitTest.java b/tests_camera/src/com/android/camera/unittest/CameraUnitTest.java
index 70faa5c..2df3718 100644
--- a/tests_camera/src/com/android/camera/unittest/CameraUnitTest.java
+++ b/tests_camera/src/com/android/camera/unittest/CameraUnitTest.java
@@ -25,50 +25,6 @@
 
 @SmallTest
 public class CameraUnitTest extends TestCase {
-    public void testRoundOrientation() {
-        int h = CameraUtil.ORIENTATION_HYSTERESIS;
-        assertEquals(0, CameraUtil.roundOrientation(0, 0));
-        assertEquals(0, CameraUtil.roundOrientation(359, 0));
-        assertEquals(0, CameraUtil.roundOrientation(0 + 44 + h, 0));
-        assertEquals(90, CameraUtil.roundOrientation(0 + 45 + h, 0));
-        assertEquals(0, CameraUtil.roundOrientation(360 - 44 - h, 0));
-        assertEquals(270, CameraUtil.roundOrientation(360 - 45 - h, 0));
-
-        assertEquals(90, CameraUtil.roundOrientation(90, 90));
-        assertEquals(90, CameraUtil.roundOrientation(90 + 44 + h, 90));
-        assertEquals(180, CameraUtil.roundOrientation(90 + 45 + h, 90));
-        assertEquals(90, CameraUtil.roundOrientation(90 - 44 - h, 90));
-        assertEquals(0, CameraUtil.roundOrientation(90 - 45 - h, 90));
-
-        assertEquals(180, CameraUtil.roundOrientation(180, 180));
-        assertEquals(180, CameraUtil.roundOrientation(180 + 44 + h, 180));
-        assertEquals(270, CameraUtil.roundOrientation(180 + 45 + h, 180));
-        assertEquals(180, CameraUtil.roundOrientation(180 - 44 - h, 180));
-        assertEquals(90, CameraUtil.roundOrientation(180 - 45 - h, 180));
-
-        assertEquals(270, CameraUtil.roundOrientation(270, 270));
-        assertEquals(270, CameraUtil.roundOrientation(270 + 44 + h, 270));
-        assertEquals(0, CameraUtil.roundOrientation(270 + 45 + h, 270));
-        assertEquals(270, CameraUtil.roundOrientation(270 - 44 - h, 270));
-        assertEquals(180, CameraUtil.roundOrientation(270 - 45 - h, 270));
-
-        assertEquals(90, CameraUtil.roundOrientation(90, 0));
-        assertEquals(180, CameraUtil.roundOrientation(180, 0));
-        assertEquals(270, CameraUtil.roundOrientation(270, 0));
-
-        assertEquals(0, CameraUtil.roundOrientation(0, 90));
-        assertEquals(180, CameraUtil.roundOrientation(180, 90));
-        assertEquals(270, CameraUtil.roundOrientation(270, 90));
-
-        assertEquals(0, CameraUtil.roundOrientation(0, 180));
-        assertEquals(90, CameraUtil.roundOrientation(90, 180));
-        assertEquals(270, CameraUtil.roundOrientation(270, 180));
-
-        assertEquals(0, CameraUtil.roundOrientation(0, 270));
-        assertEquals(90, CameraUtil.roundOrientation(90, 270));
-        assertEquals(180, CameraUtil.roundOrientation(180, 270));
-    }
-
     public void testPrepareMatrix() {
         Matrix matrix = new Matrix();
         float[] points;