am ea77c6b2: Don\'t release resources until camera is closed.

* commit 'ea77c6b2f9a602712c19ed0f441c6439b2f168de':
  Don't release resources until camera is closed.
diff --git a/jni/Android.mk b/jni/Android.mk
index 0f95c9b..e128705 100755
--- a/jni/Android.mk
+++ b/jni/Android.mk
@@ -19,7 +19,7 @@
 
 LOCAL_CFLAGS := -std=c++11
 LOCAL_NDK_STL_VARIANT := c++_static
-LOCAL_LDFLAGS   := -llog -ldl
+LOCAL_LDFLAGS   := -llog -ldl -ljnigraphics
 LOCAL_SDK_VERSION := 9
 LOCAL_MODULE    := libjni_jpegutil
 LOCAL_SRC_FILES := jpegutil.cpp jpegutilnative.cpp
diff --git a/jni/jpegutilnative.cpp b/jni/jpegutilnative.cpp
index cd5a2b3..7012b97 100644
--- a/jni/jpegutilnative.cpp
+++ b/jni/jpegutilnative.cpp
@@ -57,3 +57,68 @@
   return jpegutil::compress(yP, cbP, crP, (unsigned char*)out, outBufCapacity,
                             flush, quality);
 }
+
+/**
+ * Copies the Image.Plane specified by planeBuf, pStride, and rStride to the
+ * Bitmap.
+ *
+ * @param env the JNI environment
+ * @param clazz the java class
+ * @param width the width of the output image
+ * @param height the height of the output image
+ * @param planeBuf the native ByteBuffer containing the image plane data
+ * @param pStride the stride between adjacent pixels in the same row of planeBuf
+ * @param rStride the stride between adjacent rows in planeBuf
+ * @param rot90 the multiple of 90 degrees to rotate, one of {0, 1, 2, 3}.
+ */
+extern "C" JNIEXPORT void JNICALL
+    Java_com_android_camera_util_JpegUtilNative_copyImagePlaneToBitmap(
+        JNIEnv* env, jclass clazz, jint width, jint height, jobject planeBuf,
+        jint pStride, jint rStride, jobject outBitmap, jint rot90) {
+  jbyte* src = (jbyte*)env->GetDirectBufferAddress(planeBuf);
+
+  char* dst = 0;
+  AndroidBitmap_lockPixels(env, outBitmap, (void**) &dst);
+
+  if (rot90 == 0) {
+    // No rotation
+    for (int y = 0; y < height; y++) {
+      char* srcPtr = reinterpret_cast<char*>(&src[y * rStride]);
+      char* dstPtr = &dst[y * width];
+      for (int x = 0; x < width; x++) {
+        *dstPtr = *srcPtr;
+        srcPtr += pStride;
+        dstPtr++;
+      }
+    }
+  } else if (rot90 == 1) {
+    // 90-degree rotation
+    for (int y = 0; y < height; y++) {
+      for (int x = 0; x < width; x++) {
+        int srcX = height - 1 - y;
+        int srcY = x;
+        dst[y * width + x] = src[srcX * pStride + rStride * srcY];
+      }
+    }
+  } else if (rot90 == 2) {
+    // 180-degree rotation
+    for (int y = 0; y < height; y++) {
+      for (int x = 0; x < width; x++) {
+        int srcX = width - 1 - x;
+        int srcY = height - 1 - y;
+        dst[y * width + x] = src[srcX * pStride + rStride * srcY];
+      }
+    }
+  } else if (rot90 == 3) {
+    // 270-degree rotation
+    for (int y = 0; y < height; y++) {
+      for (int x = 0; x < width; x++) {
+        int srcX = y;
+        int srcY = width - 1 - x;
+        dst[y * width + x] = src[srcX * pStride + rStride * srcY];
+      }
+    }
+  }
+
+  AndroidBitmap_unlockPixels(env, outBitmap);
+}
diff --git a/src/com/android/camera/CameraActivity.java b/src/com/android/camera/CameraActivity.java
index af9af0f..8b3e401 100644
--- a/src/com/android/camera/CameraActivity.java
+++ b/src/com/android/camera/CameraActivity.java
@@ -141,7 +141,9 @@
 import com.bumptech.glide.Glide;
 import com.bumptech.glide.GlideBuilder;
 import com.bumptech.glide.MemoryCategory;
+import com.bumptech.glide.load.DecodeFormat;
 import com.bumptech.glide.load.engine.executor.FifoPriorityThreadPoolExecutor;
+
 import com.google.common.logging.eventprotos;
 import com.google.common.logging.eventprotos.ForegroundEvent.ForegroundSource;
 import com.google.common.logging.eventprotos.MediaInteraction;
@@ -1371,7 +1373,8 @@
         CameraPerformanceTracker.onEvent(CameraPerformanceTracker.ACTIVITY_START);
         if (!Glide.isSetup()) {
             Glide.setup(new GlideBuilder(this)
-                .setResizeService(new FifoPriorityThreadPoolExecutor(2)));
+                    .setDecodeFormat(DecodeFormat.ALWAYS_ARGB_8888)
+                    .setResizeService(new FifoPriorityThreadPoolExecutor(2)));
             Glide.get(this).setMemoryCategory(MemoryCategory.HIGH);
         }
 
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index 0e639d8..9b20ee9 100644
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -1031,6 +1031,7 @@
             mState = ModuleState.UPDATE_TRANSFORM_ON_NEXT_SURFACE_TEXTURE_UPDATE;
         }
         mAppController.onPreviewStarted();
+        onReadyStateChanged(true);
     }
 
     /**
@@ -1351,11 +1352,6 @@
         // Important: Camera2 buffers are already rotated to the natural
         // orientation of the device (at least for the back-camera).
 
-        // TODO: Remove this hack for the front camera as soon as b/16637957 is
-        // fixed.
-        if (mCameraFacing == Facing.FRONT) {
-            deviceOrientationDegrees += 180;
-        }
         return (360 - deviceOrientationDegrees) % 360;
     }
 
diff --git a/src/com/android/camera/app/CameraApp.java b/src/com/android/camera/app/CameraApp.java
index 9006183..6c35c53 100644
--- a/src/com/android/camera/app/CameraApp.java
+++ b/src/com/android/camera/app/CameraApp.java
@@ -19,6 +19,7 @@
 import android.app.Application;
 import android.app.NotificationManager;
 import android.content.Context;
+import android.os.Debug;
 
 import com.android.camera.MediaSaverImpl;
 import com.android.camera.debug.LogHelper;
@@ -40,6 +41,15 @@
  * to be used across modules.
  */
 public class CameraApp extends Application implements CameraServices {
+    /**
+     * This is for debugging only: If set to true, application will not start
+     * until a debugger is attached.
+     * <p>
+     * Use this if you need to debug code that is executed while the app starts
+     * up and it would be too late to attach a debugger afterwards.
+     */
+    private static final boolean WAIT_FOR_DEBUGGER_ON_START = false;
+
     private MediaSaver mMediaSaver;
     private CaptureSessionManager mSessionManager;
     private SessionStorageManager mSessionStorageManager;
@@ -53,6 +63,10 @@
     public void onCreate() {
         super.onCreate();
 
+        if (WAIT_FOR_DEBUGGER_ON_START) {
+            Debug.waitForDebugger();
+        }
+
         Context context = getApplicationContext();
         LogHelper.initialize(context);
 
diff --git a/src/com/android/camera/data/LocalDataUtil.java b/src/com/android/camera/data/LocalDataUtil.java
index 00ace49..c2cdb99 100644
--- a/src/com/android/camera/data/LocalDataUtil.java
+++ b/src/com/android/camera/data/LocalDataUtil.java
@@ -47,6 +47,8 @@
     }
 
     /**
+     * Checks whether the MIME type represents an image media item.
+     *
      * @param mimeType The MIME type to check.
      * @return Whether the MIME is a image type.
      */
diff --git a/src/com/android/camera/data/LocalMediaData.java b/src/com/android/camera/data/LocalMediaData.java
index 3f7e413..d5a6885 100644
--- a/src/com/android/camera/data/LocalMediaData.java
+++ b/src/com/android/camera/data/LocalMediaData.java
@@ -34,9 +34,9 @@
 import com.android.camera.debug.Log;
 import com.android.camera.util.CameraUtil;
 import com.android.camera2.R;
-import com.bumptech.glide.BitmapRequestBuilder;
+import com.bumptech.glide.DrawableRequestBuilder;
 import com.bumptech.glide.Glide;
-import com.bumptech.glide.load.resource.bitmap.BitmapEncoder;
+import com.bumptech.glide.load.engine.DiskCacheStrategy;
 
 import java.io.File;
 import java.text.DateFormat;
@@ -73,10 +73,6 @@
     protected final double mLongitude;
     protected final Bundle mMetaData;
 
-    private static final int JPEG_COMPRESS_QUALITY = 90;
-    private static final BitmapEncoder JPEG_ENCODER =
-            new BitmapEncoder(Bitmap.CompressFormat.JPEG, JPEG_COMPRESS_QUALITY);
-
     /**
      * Used for thumbnail loading optimization. True if this data has a
      * corresponding visible view.
@@ -213,7 +209,6 @@
         v.setContentDescription(context.getResources().getString(
                 R.string.media_date_content_description,
                 getReadableDate(mDateModifiedInSeconds)));
-
         return v;
     }
 
@@ -383,7 +378,7 @@
 
         static List<LocalData> query(ContentResolver cr, Uri uri, long lastId) {
             return queryLocalMediaData(cr, uri, QUERY_PROJECTION, lastId, QUERY_ORDER,
-                    new PhotoDataBuilder());
+                new PhotoDataBuilder());
         }
 
         private static PhotoData buildFromCursor(Cursor c) {
@@ -538,7 +533,8 @@
 
             final int overrideWidth;
             final int overrideHeight;
-            final BitmapRequestBuilder<Uri, Bitmap> thumbnailRequest;
+
+            final DrawableRequestBuilder<Uri> thumbnailRequest;
             if (full) {
                 // Load up to the maximum size Bitmap we can render.
                 overrideWidth = Math.min(getWidth(), MAXIMUM_TEXTURE_SIZE);
@@ -551,6 +547,7 @@
                     .override(thumbWidth, thumbHeight)
                     .fitCenter()
                     .thumbnail(loadMediaStoreThumb(context));
+
             } else {
                 // Load a medium quality thumbWidth/thumbHeight image.
                 overrideWidth = thumbWidth;
@@ -561,6 +558,7 @@
             }
 
             loadUri(context)
+                .diskCacheStrategy(full ? DiskCacheStrategy.NONE : DiskCacheStrategy.RESULT)
                 .placeholder(placeHolderResourceId)
                 .fitCenter()
                 .override(overrideWidth, overrideHeight)
@@ -568,18 +566,16 @@
                 .into(imageView);
         }
 
-        /** Loads a thumbnail with a size targeted to use MediaStore.Images.Thumbnails. */
-        private BitmapRequestBuilder<Uri, Bitmap> loadMediaStoreThumb(Context context) {
-            return loadUri(context)
-                .override(MEDIASTORE_THUMB_WIDTH, MEDIASTORE_THUMB_HEIGHT);
+        /** Loads an image using a MediaStore Uri with our default options. */
+        private DrawableRequestBuilder<Uri> loadUri(Context context) {
+            return Glide.with(context)
+                .loadFromMediaStore(getUri(), mMimeType, mDateModifiedInSeconds, mOrientation);
         }
 
-        /** Loads an image using a MediaStore Uri with our default options. */
-        private BitmapRequestBuilder<Uri, Bitmap> loadUri(Context context) {
-            return Glide.with(context)
-                .loadFromMediaStore(getUri(), mMimeType, mDateModifiedInSeconds, mOrientation)
-                .asBitmap()
-                .encoder(JPEG_ENCODER);
+        /** Loads a thumbnail with a size targeted to use MediaStore.Images.Thumbnails. */
+        private DrawableRequestBuilder<Uri> loadMediaStoreThumb(Context context) {
+            return loadUri(context)
+                .override(MEDIASTORE_THUMB_WIDTH, MEDIASTORE_THUMB_HEIGHT);
         }
 
         @Override
@@ -821,12 +817,8 @@
 
             Glide.with(context)
                 .loadFromMediaStore(getUri(), mMimeType, mDateModifiedInSeconds, 0)
-                .asBitmap()
-                .encoder(JPEG_ENCODER)
                 .thumbnail(Glide.with(context)
                     .loadFromMediaStore(getUri(), mMimeType, mDateModifiedInSeconds, 0)
-                    .asBitmap()
-                    .encoder(JPEG_ENCODER)
                     .override(MEDIASTORE_THUMB_WIDTH, MEDIASTORE_THUMB_HEIGHT))
                 .placeholder(placeHolderResourceId)
                 .fitCenter()
@@ -896,7 +888,7 @@
 
         @Override
         public VideoData build(Cursor cursor) {
-            return LocalMediaData.VideoData.buildFromCursor(cursor);
+            return VideoData.buildFromCursor(cursor);
         }
     }
 
diff --git a/src/com/android/camera/data/LocalSessionData.java b/src/com/android/camera/data/LocalSessionData.java
index 8254baa..13a722d 100644
--- a/src/com/android/camera/data/LocalSessionData.java
+++ b/src/com/android/camera/data/LocalSessionData.java
@@ -27,6 +27,8 @@
 import com.android.camera2.R;
 import com.bumptech.glide.Glide;
 import com.bumptech.glide.load.DecodeFormat;
+import com.bumptech.glide.load.engine.DiskCacheStrategy;
+import com.bumptech.glide.signature.StringSignature;
 
 import java.util.Date;
 import java.util.concurrent.TimeUnit;
@@ -71,8 +73,9 @@
         byte[] jpegData = Storage.getJpegForSession(mUri);
         int currentVersion = Storage.getJpegVersionForSession(mUri);
         Glide.with(context)
-            .loadFromImage(jpegData, mUri.toString() + currentVersion)
-            .skipDiskCache(true)
+            .load(jpegData)
+            .diskCacheStrategy(DiskCacheStrategy.NONE)
+            .signature(new StringSignature(mUri.toString() + currentVersion))
             .fitCenter()
             .into(imageView);
 
diff --git a/src/com/android/camera/debug/DebugPropertyHelper.java b/src/com/android/camera/debug/DebugPropertyHelper.java
index d04e163..dd1d82a 100644
--- a/src/com/android/camera/debug/DebugPropertyHelper.java
+++ b/src/com/android/camera/debug/DebugPropertyHelper.java
@@ -19,9 +19,6 @@
 import com.android.camera.util.SystemProperties;
 
 public class DebugPropertyHelper {
-    /** Make app start with CaptureModule + ZSL. */
-    private static final boolean FORCE_ZSL_APP = false;
-
     private static final String OFF_VALUE = "0";
     private static final String ON_VALUE = "1";
 
@@ -38,7 +35,7 @@
      */
     private static final String PROP_CAPTURE_DEBUG_UI = PREFIX + ".debug_ui";
     /** Switch between OneCameraImpl and OneCameraZslImpl. */
-    private static final String PROP_ENABLE_ZSL = PREFIX + ".zsl";
+    private static final String PROP_FORCE_LEGACY_ONE_CAMERA = PREFIX + ".legacy";
     /** Write data about each capture request to disk. */
     private static final String PROP_WRITE_CAPTURE_DATA = PREFIX + ".capture_write";
 
@@ -47,11 +44,11 @@
     }
 
     public static boolean isCaptureModuleEnabled() {
-        return isPropertyOn(PROP_ENABLE_CAPTURE_MODULE) || FORCE_ZSL_APP;
+        return isPropertyOn(PROP_ENABLE_CAPTURE_MODULE);
     }
 
-    public static boolean isZslEnabled() {
-        return isPropertyOn(PROP_ENABLE_ZSL) || FORCE_ZSL_APP;
+    public static boolean forceLegacyOneCamera() {
+        return isPropertyOn(PROP_FORCE_LEGACY_ONE_CAMERA);
     }
 
     public static boolean showFrameDebugLog() {
diff --git a/src/com/android/camera/module/ModulesInfo.java b/src/com/android/camera/module/ModulesInfo.java
index 62ad8e5..7f87f4e 100644
--- a/src/com/android/camera/module/ModulesInfo.java
+++ b/src/com/android/camera/module/ModulesInfo.java
@@ -25,6 +25,7 @@
 import com.android.camera.app.ModuleManager;
 import com.android.camera.debug.DebugPropertyHelper;
 import com.android.camera.debug.Log;
+import com.android.camera.util.ApiHelper;
 import com.android.camera.util.GcamHelper;
 import com.android.camera.util.PhotoSphereHelper;
 import com.android.camera.util.RefocusHelper;
@@ -39,8 +40,7 @@
     private static final Log.Tag TAG = new Log.Tag("ModulesInfo");
 
     /** Selects CaptureModule if true, PhotoModule if false. */
-    private static final boolean ENABLE_CAPTURE_MODULE =
-            DebugPropertyHelper.isCaptureModuleEnabled();
+    private static final boolean ENABLE_CAPTURE_MODULE = ApiHelper.HAS_CAMERA_2_API;
 
     public static void setupModules(Context context, ModuleManager moduleManager) {
         int photoModuleId = context.getResources().getInteger(R.integer.camera_mode_photo);
diff --git a/src/com/android/camera/one/OneCameraManager.java b/src/com/android/camera/one/OneCameraManager.java
index 7cd57ff..cb6d142 100644
--- a/src/com/android/camera/one/OneCameraManager.java
+++ b/src/com/android/camera/one/OneCameraManager.java
@@ -75,8 +75,7 @@
     }
 
     /**
-     * Creates a new camera manager that is based on Camera2 API, if available,
-     * or otherwise uses the portability API.
+     * Creates a new camera manager that is based on Camera2 API, if available.
      *
      * @throws OneCameraException Thrown if an error occurred while trying to
      *             access the camera.
@@ -92,50 +91,11 @@
             cameraManager = null;
             Log.e(TAG, "Could not get camera service v2", ex);
         }
-        if (cameraManager != null && isCamera2Supported(cameraManager)) {
-            int maxMemoryMB = activity.getServices().getMemoryManager()
-                    .getMaxAllowedNativeMemoryAllocation();
-            return new com.android.camera.one.v2.OneCameraManagerImpl(
-                    activity.getApplicationContext(), cameraManager, maxMemoryMB,
-                    displayMetrics, activity.getSoundPlayer());
-        } else {
-            return new com.android.camera.one.v1.OneCameraManagerImpl();
-        }
-    }
-
-    /**
-     * Returns whether the device fully supports API2
-     *
-     * @param cameraManager the Camera2 API manager.
-     * @return If this device is only emulating Camera2 API on top of an older
-     *         HAL (such as the Nexus 4, 7 or 10), this method returns false. It
-     *         only returns true, if Camera2 is fully supported through newer
-     *         HALs.
-     * @throws OneCameraException Thrown if an error occurred while trying to
-     *             access the camera.
-     */
-    private static boolean isCamera2Supported(CameraManager cameraManager)
-            throws OneCameraException {
-        if (!ApiHelper.HAS_CAMERA_2_API) {
-            return false;
-        }
-        try {
-            String[] cameraIds = cameraManager.getCameraIdList();
-            if (cameraIds.length == 0) {
-                throw new OneCameraException("Camera 2 API supported but no devices available.");
-            }
-            final String id = cameraIds[0];
-            // TODO: We should check for all the flags we need to ensure the
-            // device is capable of taking Camera2 API shots. For now, let's
-            // accept all device that are either 'partial' or 'full' devices
-            // (but not legacy).
-            return cameraManager.getCameraCharacteristics(id).get(
-                    CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
-                != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
-        } catch (CameraAccessException ex) {
-            Log.e(TAG, "Could not access camera to determine hardware-level API support.");
-            return false;
-        }
+        int maxMemoryMB = activity.getServices().getMemoryManager()
+                .getMaxAllowedNativeMemoryAllocation();
+        return new com.android.camera.one.v2.OneCameraManagerImpl(
+                activity.getApplicationContext(), cameraManager, maxMemoryMB,
+                displayMetrics, activity.getSoundPlayer());
     }
 
     private static DisplayMetrics getDisplayMetrics(Context context) {
diff --git a/src/com/android/camera/one/v2/ImageCaptureManager.java b/src/com/android/camera/one/v2/ImageCaptureManager.java
index 0687071..09c2bdb 100644
--- a/src/com/android/camera/one/v2/ImageCaptureManager.java
+++ b/src/com/android/camera/one/v2/ImageCaptureManager.java
@@ -50,7 +50,7 @@
  * Implements {@link android.media.ImageReader.OnImageAvailableListener} and
  * {@link android.hardware.camera2.CameraCaptureSession.CaptureListener} to
  * store the results of capture requests (both {@link Image}s and
- * {@link TotalCaptureResult}s in a ring-buffer from which they may be saved.
+ * {@link TotalCaptureResult}s in a ring-buffer from which they may be saved. 
  * <br>
  * This also manages the lifecycle of {@link Image}s within the application as
  * they are passed in from the lower-level camera2 API.
@@ -102,7 +102,7 @@
      * Callback for saving an image.
      */
     public interface ImageCaptureListener {
-         /**
+        /**
          * Called with the {@link Image} and associated
          * {@link TotalCaptureResult}. A typical implementation would save this
          * to disk.
diff --git a/src/com/android/camera/one/v2/OneCameraImpl.java b/src/com/android/camera/one/v2/OneCameraImpl.java
index 8f7dc44..9c286df 100644
--- a/src/com/android/camera/one/v2/OneCameraImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraImpl.java
@@ -63,7 +63,8 @@
 import java.util.List;
 
 /**
- * {@link OneCamera} implementation directly on top of the Camera2 API.
+ * {@link OneCamera} implementation directly on top of the Camera2 API for
+ * cameras without API 2 FULL support (limited or legacy).
  */
 public class OneCameraImpl extends AbstractOneCamera {
 
@@ -94,7 +95,7 @@
      * ImageFormat.YUV_420_888 to use the software encoder. No other image
      * formats are supported.
      */
-    private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
+    private static final int sCaptureImageFormat = ImageFormat.JPEG;
 
     /** Duration to hold after manual focus tap. */
     private static final int FOCUS_HOLD_MILLIS = Settings3A.getFocusHoldMillis();
diff --git a/src/com/android/camera/one/v2/OneCameraZslImpl.java b/src/com/android/camera/one/v2/OneCameraZslImpl.java
index 63f6a90..e3d2444 100644
--- a/src/com/android/camera/one/v2/OneCameraZslImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraZslImpl.java
@@ -1,17 +1,15 @@
 /*
  * Copyright (C) 2014 The Android Open Source Project
  *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
  */
 
 package com.android.camera.one.v2;
@@ -58,17 +56,20 @@
 import com.android.camera.one.v2.ImageCaptureManager.MetadataChangeListener;
 import com.android.camera.session.CaptureSession;
 import com.android.camera.util.CameraUtil;
-import com.android.camera.util.ConjunctionListenerMux;
+import com.android.camera.util.ListenerCombiner;
 import com.android.camera.util.JpegUtilNative;
 import com.android.camera.util.Size;
 
 import java.nio.ByteBuffer;
+import java.security.InvalidParameterException;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicLong;
 
 /**
  * {@link OneCamera} implementation directly on top of the Camera2 API with zero
@@ -80,10 +81,10 @@
     private static final Tag TAG = new Tag("OneCameraZslImpl2");
 
     /** Default JPEG encoding quality. */
-    private static final int JPEG_QUALITY = CameraProfile.getJpegEncodingQualityParameter(
-            CameraProfile.QUALITY_HIGH);
+    private static final int JPEG_QUALITY =
+            CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
     /**
-     * The maximum number of images to store in the full-size ZSL ring buffer.
+     * The maximum number of images to store in the full-size ZSL ring buffer. 
      * <br>
      * TODO: Determine this number dynamically based on available memory and the
      * size of frames.
@@ -164,17 +165,18 @@
     private ImageCaptureManager mCaptureManager;
 
     /**
-     * The sensor timestamp (which may not be relative to the system time) of
-     * the most recently captured image.
+     * The sensor timestamps (which may not be relative to the system time) of
+     * the most recently captured images.
      */
-    private final AtomicLong mLastCapturedImageTimestamp = new AtomicLong(0);
+    private final Set<Long> mCapturedImageTimestamps = Collections.synchronizedSet(
+            new HashSet<Long>());
 
     /** Thread pool for performing slow jpeg encoding and saving tasks. */
     private final ThreadPoolExecutor mImageSaverThreadPool;
 
     /** Pool of native byte buffers on which to store jpeg-encoded images. */
-    private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool = new
-            Pools.SynchronizedPool<ByteBuffer>(64);
+    private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool =
+            new Pools.SynchronizedPool<ByteBuffer>(64);
 
     /** Current zoom value. 1.0 is no zoom. */
     private float mZoomValue = 1f;
@@ -194,23 +196,22 @@
      * <li>We must not be in the process of capturing a single, high-quality,
      * image.</li>
      * </ol>
-     * See {@link ConjunctionListenerMux} and {@link #mReadyStateManager} for
+     * See {@link ListenerCombiner} and {@link #mReadyStateManager} for
      * details of how this is managed.
      */
     private static enum ReadyStateRequirement {
-        CAPTURE_MANAGER_READY,
-        CAPTURE_NOT_IN_PROGRESS
+        CAPTURE_MANAGER_READY, CAPTURE_NOT_IN_PROGRESS
     }
 
     /**
      * Handles the thread-safe logic of dispatching whenever the logical AND of
      * these constraints changes.
      */
-    private final ConjunctionListenerMux<ReadyStateRequirement>
-            mReadyStateManager = new ConjunctionListenerMux<ReadyStateRequirement>(
-                    ReadyStateRequirement.class, new ConjunctionListenerMux.OutputChangeListener() {
+    private final ListenerCombiner<ReadyStateRequirement>
+            mReadyStateManager = new ListenerCombiner<ReadyStateRequirement>(
+                    ReadyStateRequirement.class, new ListenerCombiner.StateChangeListener() {
                             @Override
-                        public void onOutputChange(boolean state) {
+                        public void onStateChange(boolean state) {
                             broadcastReadyState(state);
                         }
                     });
@@ -223,32 +224,44 @@
         private final PhotoCaptureParameters mParams;
         private final CaptureSession mSession;
 
-        public ImageCaptureTask(PhotoCaptureParameters parameters,
-                CaptureSession session) {
+        public ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session) {
             mParams = parameters;
             mSession = session;
         }
 
         @Override
-        public void onImageCaptured(Image image, TotalCaptureResult
-                captureResult) {
+        public void onImageCaptured(Image image, TotalCaptureResult captureResult) {
             long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
 
-            // We should only capture the image if it's more recent than the
-            // latest one. Synchronization is necessary since this method is
-            // called on {@link #mImageSaverThreadPool}.
-            synchronized (mLastCapturedImageTimestamp) {
-                if (timestamp > mLastCapturedImageTimestamp.get()) {
-                    mLastCapturedImageTimestamp.set(timestamp);
+            // We should only capture the image if it hasn't been captured
+            // before. Synchronization is necessary since
+            // mCapturedImageTimestamps is read & modified elsewhere.
+            synchronized (mCapturedImageTimestamps) {
+                if (!mCapturedImageTimestamps.contains(timestamp)) {
+                    mCapturedImageTimestamps.add(timestamp);
                 } else {
                     // There was a more recent (or identical) image which has
                     // begun being saved, so abort.
                     return;
                 }
+
+                // Clear out old timestamps from the set.
+                // We must keep old timestamps in the set a little longer (a
+                // factor of 2 seems adequate) to ensure they are cleared out of
+                // the ring buffer before their timestamp is removed from the
+                // set.
+                long maxTimestamps = MAX_CAPTURE_IMAGES * 2;
+                if (mCapturedImageTimestamps.size() > maxTimestamps) {
+                    ArrayList<Long> timestamps = new ArrayList<Long>(mCapturedImageTimestamps);
+                    Collections.sort(timestamps);
+                    for (int i = 0; i < timestamps.size()
+                            && mCapturedImageTimestamps.size() > maxTimestamps; i++) {
+                        mCapturedImageTimestamps.remove(timestamps.get(i));
+                    }
+                }
             }
 
-            mReadyStateManager.setInput(
-                    ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
+            mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
 
             mSession.startEmpty();
             savePicture(image, mParams, mSession);
@@ -287,8 +300,9 @@
         mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
                 TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
 
-        mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
-                mImageSaverThreadPool);
+        mCaptureManager =
+                new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
+                        mImageSaverThreadPool);
         mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
                 @Override
             public void onReadyStateChange(boolean capturePossible) {
@@ -304,10 +318,13 @@
                 @Override
                     public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                             CaptureResult result) {
-                        mFocusStateListener.onFocusStatusUpdate(
-                                AutoFocusHelper.stateFromCamera2State(
-                                        result.get(CaptureResult.CONTROL_AF_STATE)),
+                        FocusStateListener listener = mFocusStateListener;
+                        if (listener != null) {
+                            listener.onFocusStatusUpdate(
+                                    AutoFocusHelper.stateFromCamera2State(
+                                            result.get(CaptureResult.CONTROL_AF_STATE)),
                                 result.getFrameNumber());
+                        }
                     }
                 });
 
@@ -330,14 +347,14 @@
      * @return The largest supported picture size.
      */
     public Size getDefaultPictureSize() {
-        StreamConfigurationMap configs = mCharacteristics.get(
-                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        StreamConfigurationMap configs =
+                mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
         android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
 
         // Find the largest supported size.
         android.util.Size largestSupportedSize = supportedSizes[0];
-        long largestSupportedSizePixels = largestSupportedSize.getWidth()
-                * largestSupportedSize.getHeight();
+        long largestSupportedSizePixels =
+                largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
         for (int i = 0; i < supportedSizes.length; i++) {
             long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
             if (numPixels > largestSupportedSizePixels) {
@@ -346,8 +363,7 @@
             }
         }
 
-        return new Size(largestSupportedSize.getWidth(),
-                largestSupportedSize.getHeight());
+        return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
     }
 
     private void onShutterInvokeUI(final PhotoCaptureParameters params) {
@@ -364,15 +380,14 @@
     public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
         params.checkSanity();
 
-        mReadyStateManager.setInput(
-                ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
+        mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
 
         boolean useZSL = ZSL_ENABLED;
 
         // We will only capture images from the zsl ring-buffer which satisfy
         // this constraint.
-        ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints = new ArrayList<
-                ImageCaptureManager.CapturedImageConstraint>();
+        ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints =
+                new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
         zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
                 @Override
             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
@@ -384,14 +399,30 @@
                 Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
                 Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);
 
-                if (timestamp <= mLastCapturedImageTimestamp.get()) {
-                    // Don't save frames older than the most
-                    // recently-captured frame.
-                    // TODO This technically has a race condition in which
-                    // duplicate frames may be saved, but if a user is
-                    // tapping at >30Hz, duplicate images may be what they
-                    // expect.
-                    return false;
+                if (lensState == null) {
+                    lensState = CaptureResult.LENS_STATE_STATIONARY;
+                }
+                if (flashState == null) {
+                    flashState = CaptureResult.FLASH_STATE_UNAVAILABLE;
+                }
+                if (flashMode == null) {
+                    flashMode = CaptureResult.FLASH_MODE_OFF;
+                }
+                if (aeState == null) {
+                    aeState = CaptureResult.CONTROL_AE_STATE_INACTIVE;
+                }
+                if (afState == null) {
+                    afState = CaptureResult.CONTROL_AF_STATE_INACTIVE;
+                }
+                if (awbState == null) {
+                    awbState = CaptureResult.CONTROL_AWB_STATE_INACTIVE;
+                }
+
+                synchronized (mCapturedImageTimestamps) {
+                    if (mCapturedImageTimestamps.contains(timestamp)) {
+                        // Don't save frames which we've already saved.
+                        return false;
+                    }
                 }
 
                 if (lensState == CaptureResult.LENS_STATE_MOVING) {
@@ -434,8 +465,8 @@
         });
         // This constraint lets us capture images which have been explicitly
         // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
-        ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint = new ArrayList<
-                ImageCaptureManager.CapturedImageConstraint>();
+        ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint =
+                new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
         singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
                 @Override
             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
@@ -479,14 +510,15 @@
                             new MetadataChangeListener() {
                             @Override
                                 public void onImageMetadataChange(Key<?> key, Object oldValue,
-                                        Object newValue, CaptureResult result) {
+                                        Object newValue,
+                                        CaptureResult result) {
                                     Log.v(TAG, "AE State Changed");
-                                    if (oldValue.equals(
-                                            Integer.valueOf(
-                                                    CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
+                                    if (oldValue.equals(Integer.valueOf(
+                                            CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
                                         mCaptureManager.removeMetadataChangeListener(key, this);
                                         sendSingleRequest(params);
-                                        // TODO: Delay this until onCaptureStarted().
+                                        // TODO: Delay this until
+                                        // onCaptureStarted().
                                         onShutterInvokeUI(params);
                                     }
                                 }
@@ -539,7 +571,7 @@
             return;
         }
         try {
-            mCaptureSession.abortCaptures();
+            mCaptureSession.stopRepeating();
         } catch (CameraAccessException e) {
             Log.e(TAG, "Could not abort captures in progress.");
         }
@@ -548,12 +580,13 @@
         mCameraThread.quitSafely();
         mDevice.close();
         mCaptureManager.close();
+        mCaptureImageReader.close();
     }
 
     @Override
     public Size[] getSupportedSizes() {
-        StreamConfigurationMap config = mCharacteristics
-                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        StreamConfigurationMap config =
+                mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
         return Size.convert(config.getOutputSizes(sCaptureImageFormat));
     }
 
@@ -578,34 +611,34 @@
             CaptureSession session) {
         int heading = captureParams.heading;
 
-        int width = image.getWidth();
-        int height = image.getHeight();
-        int rotation = 0;
+        int degrees = (captureParams.orientation + 270) % 360;
         ExifInterface exif = null;
 
         exif = new ExifInterface();
         // TODO: Add more exif tags here.
 
-        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, width));
-        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, height));
+        Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(),
+                degrees);
 
-        // TODO: Handle rotation correctly.
+        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth()));
+        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight()));
+
+        exif.setTag(
+                exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT));
 
         // Set GPS heading direction based on sensor, if location is on.
         if (heading >= 0) {
-            ExifTag directionRefTag = exif.buildTag(
-                    ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
+            ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
                     ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
-            ExifTag directionTag = exif.buildTag(
-                    ExifInterface.TAG_GPS_IMG_DIRECTION,
-                    new Rational(heading, 1));
+            ExifTag directionTag =
+                    exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1));
             exif.setTag(directionRefTag);
             exif.setTag(directionTag);
         }
-
-        session.saveAndFinish(acquireJpegBytes(image), width, height, rotation, exif,
-                new OnMediaSavedListener() {
-                @Override
+        // TODO Find out why this is off by -90 degrees.
+        session.saveAndFinish(acquireJpegBytes(image, degrees),
+                size.getWidth(), size.getHeight(), 0, exif, new OnMediaSavedListener() {
+                        @Override
                     public void onMediaSaved(Uri uri) {
                         captureParams.callback.onPictureSaved(uri);
                     }
@@ -697,8 +730,7 @@
                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
                 break;
             case OFF:
-                builder.set(CaptureRequest.CONTROL_AE_MODE,
-                        CaptureRequest.CONTROL_AE_MODE_ON);
+                builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
                 break;
             case AUTO:
@@ -719,8 +751,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -742,8 +773,7 @@
 
             addRegionsToCaptureRequestBuilder(builder);
 
-            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
             return true;
         } catch (CameraAccessException e) {
             if (ZSL_ENABLED) {
@@ -789,8 +819,7 @@
                         CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
             }
 
-            mCaptureSession.capture(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
             return true;
         } catch (CameraAccessException e) {
             Log.v(TAG, "Could not execute single still capture request.", e);
@@ -803,8 +832,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -823,8 +851,7 @@
             addRegionsToCaptureRequestBuilder(builder);
             addFlashToCaptureRequestBuilder(builder, flashMode);
 
-            mCaptureSession.capture(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
 
             return true;
         } catch (CameraAccessException e) {
@@ -840,8 +867,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -859,8 +885,7 @@
             builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
 
-            mCaptureSession.capture(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
 
             return true;
         } catch (CameraAccessException e) {
@@ -881,8 +906,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -924,14 +948,35 @@
     }
 
     /**
+     * @param originalWidth the width of the original image captured from the
+     *            camera
+     * @param originalHeight the height of the original image captured from the
+     *            camera
+     * @param orientation the rotation to apply, in degrees.
+     * @return The size of the final rotated image
+     */
+    private Size getImageSizeForOrientation(int originalWidth, int originalHeight,
+            int orientation) {
+        if (orientation == 0 || orientation == 180) {
+            return new Size(originalWidth, originalHeight);
+        } else if (orientation == 90 || orientation == 270) {
+            return new Size(originalHeight, originalWidth);
+        } else {
+            throw new InvalidParameterException("Orientation not supported.");
+        }
+    }
+
+    /**
      * Given an image reader, extracts the JPEG image bytes and then closes the
      * reader.
      *
      * @param img the image from which to extract jpeg bytes or compress to
      *            jpeg.
+     * @param degrees the angle to rotate the image, in degrees. Rotation is
+     *            only applied to YUV images.
      * @return The bytes of the JPEG image. Newly allocated.
      */
-    private byte[] acquireJpegBytes(Image img) {
+    private byte[] acquireJpegBytes(Image img, int degrees) {
         ByteBuffer buffer;
 
         if (img.getFormat() == ImageFormat.JPEG) {
@@ -948,7 +993,8 @@
                 buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
             }
 
-            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY);
+            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY,
+                    degrees);
 
             if (numBytes < 0) {
                 throw new RuntimeException("Error compressing jpeg.");
diff --git a/src/com/android/camera/session/CaptureSession.java b/src/com/android/camera/session/CaptureSession.java
index 0e3baea..7b3c7f0 100644
--- a/src/com/android/camera/session/CaptureSession.java
+++ b/src/com/android/camera/session/CaptureSession.java
@@ -131,10 +131,11 @@
     public void finishWithFailure(CharSequence reason);
 
     /**
-     * Returns the path to the final output of this session. This is only
-     * available after startSession has been called.
+     * Returns the path to where the final output of this session should be
+     * stored. This is only available after startSession has been called and
+     * will become unavailable after finish() was called.
      */
-    public String getPath();
+    public String getTempOutputPath();
 
     /**
      * Returns the URI to the final output of this session. This is only available
@@ -151,11 +152,11 @@
     public Uri getContentUri();
 
     /**
-     * Whether this session already has a path. This is the case once it has
-     * been started. False is returned, if the session has not been started yet
-     * and no path is available.
+     * Whether this session has been started. Once it has been started it will
+     * have a valid path and can be processed. False is returned, if the session
+     * has not been started yet and no path is available.
      */
-    public boolean hasPath();
+    public boolean isStarted();
 
     /**
      * Updates the preview from a file. {@link #onPreviewAvailable()} will be
diff --git a/src/com/android/camera/session/CaptureSessionManagerImpl.java b/src/com/android/camera/session/CaptureSessionManagerImpl.java
index 14d6ffc..84bd693 100644
--- a/src/com/android/camera/session/CaptureSessionManagerImpl.java
+++ b/src/com/android/camera/session/CaptureSessionManagerImpl.java
@@ -51,7 +51,7 @@
         private Uri mUri;
         /** The title of the item being processed. */
         private final String mTitle;
-        /** The location this session was created at. Used for media store.*/
+        /** The location this session was created at. Used for media store. */
         private Location mLocation;
         /** The current progress of this session in percent. */
         private int mProgressPercent = 0;
@@ -65,18 +65,25 @@
         private final HashSet<ProgressListener> mProgressListeners =
                 new HashSet<ProgressListener>();
         private final long mSessionStartMillis;
+        /**
+         * The path that can be used to write the final JPEG output temporarily,
+         * before it is copied to the final location.
+         */
+        private final String mTempOutputPath;
 
         /**
          * Creates a new {@link CaptureSession}.
          *
          * @param title the title of this session.
-         * @param sessionStartMillis the timestamp of this capture session (since epoch).
+         * @param sessionStartMillis the timestamp of this capture session
+         *            (since epoch).
          * @param location the location of this session, used for media store.
          */
         private CaptureSessionImpl(String title, long sessionStartMillis, Location location) {
             mTitle = title;
             mSessionStartMillis = sessionStartMillis;
             mLocation = location;
+            mTempOutputPath = createTempOutputPath(mTitle);
         }
 
         @Override
@@ -151,7 +158,7 @@
 
         @Override
         public synchronized void cancel() {
-            if (mUri != null) {
+            if (isStarted()) {
                 removeSession(mUri.toString());
             }
         }
@@ -186,14 +193,12 @@
                         "Cannot call finish without calling startSession first.");
             }
 
-            final String path = this.getPath();
-
             AsyncTask.SERIAL_EXECUTOR.execute(new Runnable() {
                 @Override
                 public void run() {
                     byte[] jpegDataTemp;
                     try {
-                        jpegDataTemp = FileUtil.readFileToByteArray(new File(path));
+                        jpegDataTemp = FileUtil.readFileToByteArray(new File(mTempOutputPath));
                     } catch (IOException e) {
                         return;
                     }
@@ -221,29 +226,53 @@
         }
 
         @Override
-        public String getPath() {
-            if (mUri == null) {
+        public String getTempOutputPath() {
+            if (!isStarted()) {
                 throw new IllegalStateException("Cannot retrieve URI of not started session.");
             }
+            return mTempOutputPath;
+        }
 
+        /**
+         * Initializes the directories for storing the final output temporarily
+         * before it is copied to the final location after calling
+         * {@link #finish()}.
+         * <p>
+         * This method will make sure the directories and file exists and is
+         * writeable, otherwise it will throw an exception.
+         *
+         * @param title the title of this session. Will be used to create a
+         *            unique sub-directory.
+         * @return The path to a JPEG file which can be used to write the final
+         *         output to.
+         */
+        private String createTempOutputPath(String title) {
             File tempDirectory = null;
             try {
                 tempDirectory = new File(
-                        getSessionDirectory(TEMP_SESSIONS), mTitle);
+                        getSessionDirectory(TEMP_SESSIONS), title);
             } catch (IOException e) {
                 Log.e(TAG, "Could not get temp session directory", e);
                 throw new RuntimeException("Could not get temp session directory", e);
             }
-            tempDirectory.mkdirs();
-            File tempFile = new File(tempDirectory, mTitle  + ".jpg");
+            if (!tempDirectory.mkdirs()) {
+                throw new IllegalStateException("Could not create output data directory.");
+            }
+            File tempFile = new File(tempDirectory, mTitle + ".jpg");
             try {
                 if (!tempFile.exists()) {
-                    tempFile.createNewFile();
+                    if (!tempFile.createNewFile()) {
+                        throw new IllegalStateException("Could not create output data file.");
+                    }
                 }
             } catch (IOException e) {
                 Log.e(TAG, "Could not create temp session file", e);
                 throw new RuntimeException("Could not create temp session file", e);
             }
+
+            if (!tempFile.canWrite()) {
+                throw new RuntimeException("Temporary output file is not writeable.");
+            }
             return tempFile.getPath();
         }
 
@@ -258,7 +287,7 @@
         }
 
         @Override
-        public boolean hasPath() {
+        public boolean isStarted() {
             return mUri != null;
         }
 
@@ -269,9 +298,7 @@
 
         @Override
         public void updatePreview(String previewPath) {
-
-            final String path = this.getPath();
-
+            final String path = this.getTempOutputPath();
             AsyncTask.SERIAL_EXECUTOR.execute(new Runnable() {
                 @Override
                 public void run() {
@@ -289,7 +316,8 @@
                     int width = options.outWidth;
                     int height = options.outHeight;
 
-                    mPlaceholderManager.replacePlaceholder(mPlaceHolderSession, jpegData, width, height);
+                    mPlaceholderManager.replacePlaceholder(mPlaceHolderSession, jpegData, width,
+                            height);
                     onPreviewAvailable();
                 }
             });
@@ -372,14 +400,14 @@
 
     @Override
     public void putSession(Uri sessionUri, CaptureSession session) {
-        synchronized (mSessions)  {
+        synchronized (mSessions) {
             mSessions.put(sessionUri.toString(), session);
         }
     }
 
     @Override
     public CaptureSession getSession(Uri sessionUri) {
-        synchronized (mSessions)  {
+        synchronized (mSessions) {
             return mSessions.get(sessionUri.toString());
         }
     }
@@ -408,7 +436,7 @@
 
     @Override
     public File getSessionDirectory(String subDirectory) throws IOException {
-      return mSessionStorageManager.getSessionDirectory(subDirectory);
+        return mSessionStorageManager.getSessionDirectory(subDirectory);
     }
 
     private void removeSession(String sessionUri) {
@@ -486,8 +514,8 @@
     }
 
     /**
-     * Notifies all task listeners that the task with the given URI has
-     * changed its progress message.
+     * Notifies all task listeners that the task with the given URI has changed
+     * its progress message.
      */
     private void notifyTaskProgressText(final Uri uri, final CharSequence message) {
         mMainHandler.post(new Runnable() {
diff --git a/src/com/android/camera/session/SessionStorageManagerImpl.java b/src/com/android/camera/session/SessionStorageManagerImpl.java
index d863830..ea7a907 100644
--- a/src/com/android/camera/session/SessionStorageManagerImpl.java
+++ b/src/com/android/camera/session/SessionStorageManagerImpl.java
@@ -98,6 +98,7 @@
 
         final long nowInMillis = System.currentTimeMillis();
         for (File sessionDir : sessionDirs) {
+            Log.v(TAG, "Check for potential clean-up: " + sessionDir.getAbsolutePath());
             if (sessionDir.lastModified() < (nowInMillis - MAX_SESSION_AGE_MILLIS)) {
                 if (!FileUtil.deleteDirectoryRecursively(sessionDir)) {
                     Log.w(TAG, "Could not clean up " + sessionDir.getAbsolutePath());
diff --git a/src/com/android/camera/ui/FilmstripGestureRecognizer.java b/src/com/android/camera/ui/FilmstripGestureRecognizer.java
index 17e8e2f..8106361 100644
--- a/src/com/android/camera/ui/FilmstripGestureRecognizer.java
+++ b/src/com/android/camera/ui/FilmstripGestureRecognizer.java
@@ -18,6 +18,8 @@
 
 import android.content.Context;
 import android.view.GestureDetector;
+import android.view.InputDevice;
+import android.view.KeyEvent;
 import android.view.MotionEvent;
 import android.view.ScaleGestureDetector;
 
@@ -33,6 +35,7 @@
         boolean onSingleTapUp(float x, float y);
         boolean onDoubleTap(float x, float y);
         boolean onScroll(float x, float y, float dx, float dy);
+        boolean onMouseScroll(float hscroll, float vscroll);
         boolean onFling(float velocityX, float velocityY);
         boolean onScaleBegin(float focusX, float focusY);
         boolean onScale(float focusX, float focusY, float scale);
@@ -64,6 +67,23 @@
         return (gestureProcessed | scaleProcessed);
     }
 
+    public boolean onGenericMotionEvent(MotionEvent event) {
+        if ((event.getSource() & InputDevice.SOURCE_CLASS_POINTER) != 0) {
+            switch (event.getAction()) {
+                case MotionEvent.ACTION_SCROLL: {
+                    final float hscroll = event.getAxisValue(MotionEvent.AXIS_HSCROLL);
+                    final float vscroll = -event.getAxisValue(MotionEvent.AXIS_VSCROLL);
+
+                    if (hscroll != 0.0f || vscroll != 0.0f) {
+                        mListener.onMouseScroll(hscroll, vscroll);
+                    }
+                }
+            }
+        }
+
+        return true;
+    }
+
     private class MyGestureListener
                 extends GestureDetector.SimpleOnGestureListener {
         @Override
diff --git a/src/com/android/camera/util/JpegUtilNative.java b/src/com/android/camera/util/JpegUtilNative.java
index 62ac99b..ff288cb 100644
--- a/src/com/android/camera/util/JpegUtilNative.java
+++ b/src/com/android/camera/util/JpegUtilNative.java
@@ -13,8 +13,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package com.android.camera.util;
 
+import android.graphics.Bitmap;
 import android.graphics.ImageFormat;
 import android.media.Image;
 import android.media.Image.Plane;
@@ -35,16 +37,20 @@
      * Compresses an image from YUV422 format to jpeg.
      *
      * @param yBuf the buffer containing the Y component of the image
-     * @param yPStride the stride between adjacent pixels in the same row in yBuf
+     * @param yPStride the stride between adjacent pixels in the same row in
+     *            yBuf
      * @param yRStride the stride between adjacent rows in yBuf
      * @param cbBuf the buffer containing the Cb component of the image
-     * @param cbPStride the stride between adjacent pixels in the same row in cbBuf
+     * @param cbPStride the stride between adjacent pixels in the same row in
+     *            cbBuf
      * @param cbRStride the stride between adjacent rows in cbBuf
      * @param crBuf the buffer containing the Cr component of the image
-     * @param crPStride the stride between adjacent pixels in the same row in crBuf
+     * @param crPStride the stride between adjacent pixels in the same row in
+     *            crBuf
      * @param crRStride the stride between adjacent rows in crBuf
      * @param quality the quality level (0 to 100) to use
-     * @return The number of bytes written, or a negative value indicating an error
+     * @return The number of bytes written, or a negative value indicating an
+     *         error
      */
     private static native int compressJpegFromYUV420pNative(
             int width, int height,
@@ -54,8 +60,35 @@
             Object outBuf, int outBufCapacity, int quality);
 
     /**
-     * @see JpegUtilNative#compressJpegFromYUV420pNative(int, int, java.lang.Object, int, int,
-     *      java.lang.Object, int, int, java.lang.Object, int, int, java.lang.Object, int, int)
+     * Copies the Image.Plane specified by planeBuf, pStride, and rStride to the
+     * Bitmap.
+     *
+     * @param width the width of the image
+     * @param height the height of the image
+     * @param planeBuf the native ByteBuffer containing the image plane data
+     * @param pStride the stride between adjacent pixels in the same row of
+     *            planeBuf
+     * @param rStride the stride between adjacent rows in planeBuf
+     */
+    private static native void copyImagePlaneToBitmap(int width, int height, Object planeBuf,
+            int pStride, int rStride, Object outBitmap, int rot90);
+
+    public static void copyImagePlaneToBitmap(Image.Plane plane, Bitmap bitmap, int rot90) {
+        if (bitmap.getConfig() != Bitmap.Config.ALPHA_8) {
+            throw new RuntimeException("Unsupported bitmap format");
+        }
+
+        int width = bitmap.getWidth();
+        int height = bitmap.getHeight();
+
+        copyImagePlaneToBitmap(width, height, plane.getBuffer(), plane.getPixelStride(),
+                plane.getRowStride(), bitmap, rot90);
+    }
+
+    /**
+     * @see JpegUtilNative#compressJpegFromYUV420pNative(int, int,
+     *      java.lang.Object, int, int, java.lang.Object, int, int,
+     *      java.lang.Object, int, int, java.lang.Object, int, int)
      */
     public static int compressJpegFromYUV420p(
             int width, int height,
@@ -64,15 +97,18 @@
             ByteBuffer crBuf, int crPStride, int crRStride,
             ByteBuffer outBuf, int quality) {
         return compressJpegFromYUV420pNative(width, height, yBuf, yPStride, yRStride, cbBuf,
-                cbPStride, cbRStride, crBuf, crPStride, crRStride, outBuf, outBuf.capacity(), quality);
+                cbPStride, cbRStride, crBuf, crPStride, crRStride, outBuf, outBuf.capacity(),
+                quality);
     }
 
     /**
-     * Compresses the given image to jpeg. Note that only ImageFormat.YUV_420_888 is currently
-     * supported. Furthermore, all planes must use direct byte buffers.
+     * Compresses the given image to jpeg. Note that only
+     * ImageFormat.YUV_420_888 is currently supported. Furthermore, all planes
+     * must use direct byte buffers.
      *
      * @param img the image to compress
      * @param outBuf a direct byte buffer to hold the output jpeg.
+     * @param quality the jpeg encoder quality (0 to 100)
      * @return The number of bytes written to outBuf
      */
     public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality) {
@@ -119,4 +155,104 @@
 
         return numBytesWritten;
     }
+
+    /**
+     * Compresses the given image to jpeg. Note that only
+     * ImageFormat.YUV_420_888 is currently supported. Furthermore, all planes
+     * must use direct byte buffers.<br>
+     * FIXME TODO OPTIMIZE This method is *incredibly* inefficient.
+     *
+     * @param img the image to compress
+     * @param outBuf a direct byte buffer to hold the output jpeg.
+     * @param quality the jpeg encoder quality (0 to 100)
+     * @param rotation the amount to rotate the image clockwise, in degrees.
+     * @return The number of bytes written to outBuf
+     */
+    public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality,
+            int degrees) {
+        if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) {
+            throw new RuntimeException("Unsupported rotation angle");
+        }
+
+        if (degrees == 0) {
+            return compressJpegFromYUV420Image(img, outBuf, quality);
+        }
+
+        if (img.getFormat() != ImageFormat.YUV_420_888) {
+            throw new RuntimeException("Unsupported Image Format.");
+        }
+
+        final int NUM_PLANES = 3;
+
+        if (img.getPlanes().length != NUM_PLANES) {
+            throw new RuntimeException("Output buffer must be direct.");
+        }
+
+        if (!outBuf.isDirect()) {
+            throw new RuntimeException("Output buffer must be direct.");
+        }
+
+        ByteBuffer[] planeBuf = new ByteBuffer[NUM_PLANES];
+        int[] pixelStride = new int[NUM_PLANES];
+        int[] rowStride = new int[NUM_PLANES];
+
+        for (int i = 0; i < NUM_PLANES; i++) {
+            Plane plane = img.getPlanes()[i];
+
+            if (!plane.getBuffer().isDirect()) {
+                return -1;
+            }
+
+            int width = img.getWidth();
+            int height = img.getHeight();
+
+            if (i > 0) {
+                // The image plane for the Cb and Cr channels is downsampled.
+                width /= 2;
+                height /= 2;
+            }
+
+            if (degrees == 90 || degrees == 270) {
+                int tmp = width;
+                width = height;
+                height = tmp;
+            }
+
+            Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);
+
+            copyImagePlaneToBitmap(plane, bitmap, degrees / 90);
+
+            Bitmap rotatedBitmap = bitmap;
+
+            ByteBuffer rotatedBitmapBuffer = ByteBuffer.allocateDirect(
+                    rotatedBitmap.getWidth() * rotatedBitmap.getHeight());
+
+            rotatedBitmap.copyPixelsToBuffer(rotatedBitmapBuffer);
+
+            planeBuf[i] = rotatedBitmapBuffer;
+            pixelStride[i] = 1;
+            rowStride[i] = rotatedBitmap.getWidth();
+        }
+
+        outBuf.clear();
+
+        int width = img.getWidth();
+        int height = img.getHeight();
+        if (degrees == 90 || degrees == 270) {
+            int tmp = width;
+            width = height;
+            height = tmp;
+        }
+
+        int numBytesWritten = compressJpegFromYUV420p(
+                width, height,
+                planeBuf[0], pixelStride[0], rowStride[0],
+                planeBuf[1], pixelStride[1], rowStride[1],
+                planeBuf[2], pixelStride[2], rowStride[2],
+                outBuf, quality);
+
+        outBuf.limit(numBytesWritten);
+
+        return numBytesWritten;
+    }
 }
diff --git a/src/com/android/camera/util/ConjunctionListenerMux.java b/src/com/android/camera/util/ListenerCombiner.java
similarity index 83%
rename from src/com/android/camera/util/ConjunctionListenerMux.java
rename to src/com/android/camera/util/ListenerCombiner.java
index a320724..3479dd5 100644
--- a/src/com/android/camera/util/ConjunctionListenerMux.java
+++ b/src/com/android/camera/util/ListenerCombiner.java
@@ -26,11 +26,11 @@
  * single listener to be invoked upon change in the conjunction (logical AND) of
  * all inputs.
  */
-public class ConjunctionListenerMux<Input extends Enum<Input>> {
+public class ListenerCombiner<Input extends Enum<Input>> {
     /**
      * Callback for listening to changes to the conjunction of all inputs.
      */
-    public static interface OutputChangeListener {
+    public static interface StateChangeListener {
         /**
          * Called whenever the conjunction of all inputs changes. Listeners MUST
          * NOT call {@link #setInput} while still registered as a listener, as
@@ -38,7 +38,7 @@
          *
          * @param state the conjunction of all input values.
          */
-        public void onOutputChange(boolean state);
+        public void onStateChange(boolean state);
     }
 
     /** Mutex for mValues and mState. */
@@ -51,14 +51,14 @@
      * The set of listeners to notify when the output (the conjunction of all
      * inputs) changes.
      */
-    private final List<OutputChangeListener> mListeners = Collections.synchronizedList(
-            new ArrayList<OutputChangeListener>());
+    private final List<StateChangeListener> mListeners = Collections.synchronizedList(
+            new ArrayList<StateChangeListener>());
 
-    public void addListener(OutputChangeListener listener) {
+    public void addListener(StateChangeListener listener) {
         mListeners.add(listener);
     }
 
-    public void removeListener(OutputChangeListener listener) {
+    public void removeListener(StateChangeListener listener) {
         mListeners.remove(listener);
     }
 
@@ -103,12 +103,12 @@
         }
     }
 
-    public ConjunctionListenerMux(Class<Input> clazz, OutputChangeListener listener) {
+    public ListenerCombiner(Class<Input> clazz, StateChangeListener listener) {
         this(clazz);
         addListener(listener);
     }
 
-    public ConjunctionListenerMux(Class<Input> clazz) {
+    public ListenerCombiner(Class<Input> clazz) {
         mInputs = new EnumMap<Input, Boolean>(clazz);
 
         for (Input i : clazz.getEnumConstants()) {
@@ -124,8 +124,8 @@
      */
     public void notifyListeners() {
         synchronized (mLock) {
-            for (OutputChangeListener listener : mListeners) {
-                listener.onOutputChange(mOutput);
+            for (StateChangeListener listener : mListeners) {
+                listener.onStateChange(mOutput);
             }
         }
     }
diff --git a/src/com/android/camera/widget/FilmstripLayout.java b/src/com/android/camera/widget/FilmstripLayout.java
index c943b7e..0dd1d37 100644
--- a/src/com/android/camera/widget/FilmstripLayout.java
+++ b/src/com/android/camera/widget/FilmstripLayout.java
@@ -362,6 +362,14 @@
         }
 
         @Override
+        public boolean onMouseScroll(float hscroll, float vscroll) {
+            if (mFilmstripContentTranslationProgress == 0f) {
+                return mFilmstripGestureListener.onMouseScroll(hscroll, vscroll);
+            }
+            return false;
+        }
+
+        @Override
         public boolean onSingleTapUp(float x, float y) {
             if (mFilmstripContentTranslationProgress == 0f) {
                 return mFilmstripGestureListener.onSingleTapUp(x, y);
diff --git a/src/com/android/camera/widget/FilmstripView.java b/src/com/android/camera/widget/FilmstripView.java
index fbd0d03..36a5dab 100644
--- a/src/com/android/camera/widget/FilmstripView.java
+++ b/src/com/android/camera/widget/FilmstripView.java
@@ -34,6 +34,7 @@
 import android.util.AttributeSet;
 import android.util.DisplayMetrics;
 import android.util.SparseArray;
+import android.view.KeyEvent;
 import android.view.MotionEvent;
 import android.view.View;
 import android.view.ViewGroup;
@@ -85,6 +86,7 @@
     // Only check for intercepting touch events within first 500ms
     private static final int SWIPE_TIME_OUT = 500;
     private static final int DECELERATION_FACTOR = 4;
+    private static final float MOUSE_SCROLL_FACTOR = 128f;
 
     private CameraActivity mActivity;
     private FilmstripGestureRecognizer mGestureRecognizer;
@@ -1627,6 +1629,12 @@
         return mGestureRecognizer.onTouchEvent(ev);
     }
 
+    @Override
+    public boolean onGenericMotionEvent(MotionEvent ev) {
+        mGestureRecognizer.onGenericMotionEvent(ev);
+        return true;
+    }
+
     FilmstripGestureRecognizer.Listener getGestureListener() {
         return mGestureListener;
     }
@@ -2769,6 +2777,30 @@
         }
 
         @Override
+        public boolean onMouseScroll(float hscroll, float vscroll) {
+            final float scroll;
+
+            hscroll *= MOUSE_SCROLL_FACTOR;
+            vscroll *= MOUSE_SCROLL_FACTOR;
+
+            if (vscroll != 0f) {
+                scroll = vscroll;
+            } else {
+                scroll = hscroll;
+            }
+
+            if (inFullScreen()) {
+                onFling(-scroll, 0f);
+            } else if (inZoomView()) {
+                onScroll(0f, 0f, hscroll, vscroll);
+            } else {
+                onScroll(0f, 0f, scroll, 0f);
+            }
+
+            return true;
+        }
+
+        @Override
         public boolean onFling(float velocityX, float velocityY) {
             final ViewItem currItem = mViewItem[mCurrentItem];
             if (currItem == null) {