am 97563a79: color changes for modes, onboarding screens, and tutorial screens

* commit '97563a7926f87ac350bff7f644d43209fe874185':
  color changes for modes, onboarding screens, and tutorial screens
diff --git a/jni/Android.mk b/jni/Android.mk
index 0f95c9b..e128705 100755
--- a/jni/Android.mk
+++ b/jni/Android.mk
@@ -19,7 +19,7 @@
 
 LOCAL_CFLAGS := -std=c++11
 LOCAL_NDK_STL_VARIANT := c++_static
-LOCAL_LDFLAGS   := -llog -ldl
+LOCAL_LDFLAGS   := -llog -ldl -ljnigraphics
 LOCAL_SDK_VERSION := 9
 LOCAL_MODULE    := libjni_jpegutil
 LOCAL_SRC_FILES := jpegutil.cpp jpegutilnative.cpp
diff --git a/jni/jpegutilnative.cpp b/jni/jpegutilnative.cpp
index cd5a2b3..7012b97 100644
--- a/jni/jpegutilnative.cpp
+++ b/jni/jpegutilnative.cpp
@@ -57,3 +57,68 @@
   return jpegutil::compress(yP, cbP, crP, (unsigned char*)out, outBufCapacity,
                             flush, quality);
 }
+
+/**
+ * Copies the Image.Plane specified by planeBuf, pStride, and rStride to the
+ * Bitmap.
+ *
+ * @param env the JNI environment
+ * @param clazz the java class
+ * @param width the width of the output image
+ * @param height the height of the output image
+ * @param planeBuf the native ByteBuffer containing the image plane data
+ * @param pStride the stride between adjacent pixels in the same row of planeBuf
+ * @param rStride the stride between adjacent rows in planeBuf
+ * @param rot90 the multiple of 90 degrees to rotate, one of {0, 1, 2, 3}.
+ */
+extern "C" JNIEXPORT void JNICALL
+    Java_com_android_camera_util_JpegUtilNative_copyImagePlaneToBitmap(
+        JNIEnv* env, jclass clazz, jint width, jint height, jobject planeBuf,
+        jint pStride, jint rStride, jobject outBitmap, jint rot90) {
+  jbyte* src = (jbyte*)env->GetDirectBufferAddress(planeBuf);
+
+  char* dst = 0;
+  AndroidBitmap_lockPixels(env, outBitmap, (void**) &dst);
+
+  if (rot90 == 0) {
+    // No rotation
+    for (int y = 0; y < height; y++) {
+      char* srcPtr = reinterpret_cast<char*>(&src[y * rStride]);
+      char* dstPtr = &dst[y * width];
+      for (int x = 0; x < width; x++) {
+        *dstPtr = *srcPtr;
+        srcPtr += pStride;
+        dstPtr++;
+      }
+    }
+  } else if (rot90 == 1) {
+    // 90-degree rotation
+    for (int y = 0; y < height; y++) {
+      for (int x = 0; x < width; x++) {
+        int srcX = height - 1 - y;
+        int srcY = x;
+        dst[y * width + x] = src[srcX * pStride + rStride * srcY];
+      }
+    }
+  } else if (rot90 == 2) {
+    // 180-degree rotation
+    for (int y = 0; y < height; y++) {
+      for (int x = 0; x < width; x++) {
+        int srcX = width - 1 - x;
+        int srcY = height - 1 - y;
+        dst[y * width + x] = src[srcX * pStride + rStride * srcY];
+      }
+    }
+  } else if (rot90 == 3) {
+    // 270-degree rotation
+    for (int y = 0; y < height; y++) {
+      for (int x = 0; x < width; x++) {
+        int srcX = y;
+        int srcY = width - 1 - x;
+        dst[y * width + x] = src[srcX * pStride + rStride * srcY];
+      }
+    }
+  }
+
+  AndroidBitmap_unlockPixels(env, outBitmap);
+}
diff --git a/src/com/android/camera/app/CameraApp.java b/src/com/android/camera/app/CameraApp.java
index 9006183..6c35c53 100644
--- a/src/com/android/camera/app/CameraApp.java
+++ b/src/com/android/camera/app/CameraApp.java
@@ -19,6 +19,7 @@
 import android.app.Application;
 import android.app.NotificationManager;
 import android.content.Context;
+import android.os.Debug;
 
 import com.android.camera.MediaSaverImpl;
 import com.android.camera.debug.LogHelper;
@@ -40,6 +41,15 @@
  * to be used across modules.
  */
 public class CameraApp extends Application implements CameraServices {
+    /**
+     * This is for debugging only: If set to true, application will not start
+     * until a debugger is attached.
+     * <p>
+     * Use this if you need to debug code that is executed while the app starts
+     * up and it would be too late to attach a debugger afterwards.
+     */
+    private static final boolean WAIT_FOR_DEBUGGER_ON_START = false;
+
     private MediaSaver mMediaSaver;
     private CaptureSessionManager mSessionManager;
     private SessionStorageManager mSessionStorageManager;
@@ -53,6 +63,10 @@
     public void onCreate() {
         super.onCreate();
 
+        if (WAIT_FOR_DEBUGGER_ON_START) {
+            Debug.waitForDebugger();
+        }
+
         Context context = getApplicationContext();
         LogHelper.initialize(context);
 
diff --git a/src/com/android/camera/one/v2/ImageCaptureManager.java b/src/com/android/camera/one/v2/ImageCaptureManager.java
index a815345..7fd6dc3 100644
--- a/src/com/android/camera/one/v2/ImageCaptureManager.java
+++ b/src/com/android/camera/one/v2/ImageCaptureManager.java
@@ -50,7 +50,7 @@
  * Implements {@link android.media.ImageReader.OnImageAvailableListener} and
  * {@link android.hardware.camera2.CameraCaptureSession.CaptureListener} to
  * store the results of capture requests (both {@link Image}s and
- * {@link TotalCaptureResult}s in a ring-buffer from which they may be saved.
+ * {@link TotalCaptureResult}s in a ring-buffer from which they may be saved. 
  * <br>
  * This also manages the lifecycle of {@link Image}s within the application as
  * they are passed in from the lower-level camera2 API.
@@ -102,7 +102,7 @@
      * Callback for saving an image.
      */
     public interface ImageCaptureListener {
-         /**
+        /**
          * Called with the {@link Image} and associated
          * {@link TotalCaptureResult}. A typical implementation would save this
          * to disk.
diff --git a/src/com/android/camera/one/v2/OneCameraZslImpl.java b/src/com/android/camera/one/v2/OneCameraZslImpl.java
index aa03b88..6e617d0 100644
--- a/src/com/android/camera/one/v2/OneCameraZslImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraZslImpl.java
@@ -1,17 +1,15 @@
 /*
  * Copyright (C) 2014 The Android Open Source Project
  *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
  */
 
 package com.android.camera.one.v2;
@@ -63,8 +61,12 @@
 import com.android.camera.util.Size;
 
 import java.nio.ByteBuffer;
+import java.security.InvalidParameterException;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
@@ -80,15 +82,15 @@
     private static final Tag TAG = new Tag("OneCameraZslImpl2");
 
     /** Default JPEG encoding quality. */
-    private static final int JPEG_QUALITY = CameraProfile.getJpegEncodingQualityParameter(
-            CameraProfile.QUALITY_HIGH);
+    private static final int JPEG_QUALITY =
+            CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
     /**
-     * The maximum number of images to store in the full-size ZSL ring buffer.
+     * The maximum number of images to store in the full-size ZSL ring buffer. 
      * <br>
      * TODO: Determine this number dynamically based on available memory and the
      * size of frames.
      */
-    private static final int MAX_CAPTURE_IMAGES = 10;
+    private static final int MAX_CAPTURE_IMAGES = 20;
     /**
      * True if zero-shutter-lag images should be captured. Some devices produce
      * lower-quality images for the high-frequency stream, so we may wish to
@@ -164,17 +166,18 @@
     private ImageCaptureManager mCaptureManager;
 
     /**
-     * The sensor timestamp (which may not be relative to the system time) of
-     * the most recently captured image.
+     * The sensor timestamps (which may not be relative to the system time) of
+     * the most recently captured images.
      */
-    private final AtomicLong mLastCapturedImageTimestamp = new AtomicLong(0);
+    private final Set<Long> mCapturedImageTimestamps = Collections.synchronizedSet(
+            new HashSet<Long>());
 
     /** Thread pool for performing slow jpeg encoding and saving tasks. */
     private final ThreadPoolExecutor mImageSaverThreadPool;
 
     /** Pool of native byte buffers on which to store jpeg-encoded images. */
-    private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool = new
-            Pools.SynchronizedPool<ByteBuffer>(64);
+    private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool =
+            new Pools.SynchronizedPool<ByteBuffer>(64);
 
     /** Current zoom value. 1.0 is no zoom. */
     private float mZoomValue = 1f;
@@ -198,8 +201,7 @@
      * details of how this is managed.
      */
     private static enum ReadyStateRequirement {
-        CAPTURE_MANAGER_READY,
-        CAPTURE_NOT_IN_PROGRESS
+        CAPTURE_MANAGER_READY, CAPTURE_NOT_IN_PROGRESS
     }
 
     /**
@@ -223,32 +225,44 @@
         private final PhotoCaptureParameters mParams;
         private final CaptureSession mSession;
 
-        public ImageCaptureTask(PhotoCaptureParameters parameters,
-                CaptureSession session) {
+        public ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session) {
             mParams = parameters;
             mSession = session;
         }
 
         @Override
-        public void onImageCaptured(Image image, TotalCaptureResult
-                captureResult) {
+        public void onImageCaptured(Image image, TotalCaptureResult captureResult) {
             long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
 
-            // We should only capture the image if it's more recent than the
-            // latest one. Synchronization is necessary since this method is
-            // called on {@link #mImageSaverThreadPool}.
-            synchronized (mLastCapturedImageTimestamp) {
-                if (timestamp > mLastCapturedImageTimestamp.get()) {
-                    mLastCapturedImageTimestamp.set(timestamp);
+            // We should only capture the image if it hasn't been captured
+            // before. Synchronization is necessary since
+            // mCapturedImageTimestamps is read & modified elsewhere.
+            synchronized (mCapturedImageTimestamps) {
+                if (!mCapturedImageTimestamps.contains(timestamp)) {
+                    mCapturedImageTimestamps.add(timestamp);
                 } else {
                     // There was a more recent (or identical) image which has
                     // begun being saved, so abort.
                     return;
                 }
+
+                // Clear out old timestamps from the set.
+                // We must keep old timestamps in the set a little longer (a
+                // factor of 2 seems adequate) to ensure they are cleared out of
+                // the ring buffer before their timestamp is removed from the
+                // set.
+                long maxTimestamps = MAX_CAPTURE_IMAGES * 2;
+                if (mCapturedImageTimestamps.size() > maxTimestamps) {
+                    ArrayList<Long> timestamps = new ArrayList<Long>(mCapturedImageTimestamps);
+                    Collections.sort(timestamps);
+                    for (int i = 0; i < timestamps.size()
+                            && mCapturedImageTimestamps.size() > maxTimestamps; i++) {
+                        mCapturedImageTimestamps.remove(timestamps.get(i));
+                    }
+                }
             }
 
-            mReadyStateManager.setInput(
-                    ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
+            mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
 
             mSession.startEmpty();
             savePicture(image, mParams, mSession);
@@ -287,8 +301,9 @@
         mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
                 TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
 
-        mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
-                mImageSaverThreadPool);
+        mCaptureManager =
+                new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
+                        mImageSaverThreadPool);
         mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
                 @Override
             public void onReadyStateChange(boolean capturePossible) {
@@ -304,10 +319,13 @@
                 @Override
                     public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                             CaptureResult result) {
-                        mFocusStateListener.onFocusStatusUpdate(
-                                AutoFocusHelper.stateFromCamera2State(
-                                        result.get(CaptureResult.CONTROL_AF_STATE)),
+                        FocusStateListener listener = mFocusStateListener;
+                        if (listener != null) {
+                            listener.onFocusStatusUpdate(
+                                    AutoFocusHelper.stateFromCamera2State(
+                                            result.get(CaptureResult.CONTROL_AF_STATE)),
                                 result.getFrameNumber());
+                        }
                     }
                 });
 
@@ -330,14 +348,14 @@
      * @return The largest supported picture size.
      */
     public Size getDefaultPictureSize() {
-        StreamConfigurationMap configs = mCharacteristics.get(
-                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        StreamConfigurationMap configs =
+                mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
         android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
 
         // Find the largest supported size.
         android.util.Size largestSupportedSize = supportedSizes[0];
-        long largestSupportedSizePixels = largestSupportedSize.getWidth()
-                * largestSupportedSize.getHeight();
+        long largestSupportedSizePixels =
+                largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
         for (int i = 0; i < supportedSizes.length; i++) {
             long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
             if (numPixels > largestSupportedSizePixels) {
@@ -346,8 +364,7 @@
             }
         }
 
-        return new Size(largestSupportedSize.getWidth(),
-                largestSupportedSize.getHeight());
+        return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
     }
 
     private void onShutterInvokeUI(final PhotoCaptureParameters params) {
@@ -364,15 +381,14 @@
     public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
         params.checkSanity();
 
-        mReadyStateManager.setInput(
-                ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
+        mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
 
         boolean useZSL = ZSL_ENABLED;
 
         // We will only capture images from the zsl ring-buffer which satisfy
         // this constraint.
-        ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints = new ArrayList<
-                ImageCaptureManager.CapturedImageConstraint>();
+        ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints =
+                new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
         zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
                 @Override
             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
@@ -384,14 +400,30 @@
                 Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
                 Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);
 
-                if (timestamp <= mLastCapturedImageTimestamp.get()) {
-                    // Don't save frames older than the most
-                    // recently-captured frame.
-                    // TODO This technically has a race condition in which
-                    // duplicate frames may be saved, but if a user is
-                    // tapping at >30Hz, duplicate images may be what they
-                    // expect.
-                    return false;
+                if (lensState == null) {
+                    lensState = CaptureResult.LENS_STATE_STATIONARY;
+                }
+                if (flashState == null) {
+                    flashState = CaptureResult.FLASH_STATE_UNAVAILABLE;
+                }
+                if (flashMode == null) {
+                    flashMode = CaptureResult.FLASH_MODE_OFF;
+                }
+                if (aeState == null) {
+                    aeState = CaptureResult.CONTROL_AE_STATE_INACTIVE;
+                }
+                if (afState == null) {
+                    afState = CaptureResult.CONTROL_AF_STATE_INACTIVE;
+                }
+                if (awbState == null) {
+                    awbState = CaptureResult.CONTROL_AWB_STATE_INACTIVE;
+                }
+
+                synchronized (mCapturedImageTimestamps) {
+                    if (mCapturedImageTimestamps.contains(timestamp)) {
+                        // Don't save frames which we've already saved.
+                        return false;
+                    }
                 }
 
                 if (lensState == CaptureResult.LENS_STATE_MOVING) {
@@ -434,8 +466,8 @@
         });
         // This constraint lets us capture images which have been explicitly
         // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
-        ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint = new ArrayList<
-                ImageCaptureManager.CapturedImageConstraint>();
+        ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint =
+                new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
         singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
                 @Override
             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
@@ -479,14 +511,15 @@
                             new MetadataChangeListener() {
                             @Override
                                 public void onImageMetadataChange(Key<?> key, Object oldValue,
-                                        Object newValue, CaptureResult result) {
+                                        Object newValue,
+                                        CaptureResult result) {
                                     Log.v(TAG, "AE State Changed");
-                                    if (oldValue.equals(
-                                            Integer.valueOf(
-                                                    CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
+                                    if (oldValue.equals(Integer.valueOf(
+                                            CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
                                         mCaptureManager.removeMetadataChangeListener(key, this);
                                         sendSingleRequest(params);
-                                        // TODO: Delay this until onCaptureStarted().
+                                        // TODO: Delay this until
+                                        // onCaptureStarted().
                                         onShutterInvokeUI(params);
                                     }
                                 }
@@ -552,8 +585,8 @@
 
     @Override
     public Size[] getSupportedSizes() {
-        StreamConfigurationMap config = mCharacteristics
-                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        StreamConfigurationMap config =
+                mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
         return Size.convert(config.getOutputSizes(sCaptureImageFormat));
     }
 
@@ -578,34 +611,34 @@
             CaptureSession session) {
         int heading = captureParams.heading;
 
-        int width = image.getWidth();
-        int height = image.getHeight();
-        int rotation = 0;
+        int degrees = (captureParams.orientation + 270) % 360;
         ExifInterface exif = null;
 
         exif = new ExifInterface();
         // TODO: Add more exif tags here.
 
-        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, width));
-        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, height));
+        Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(),
+                degrees);
 
-        // TODO: Handle rotation correctly.
+        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth()));
+        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight()));
+
+        exif.setTag(
+                exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT));
 
         // Set GPS heading direction based on sensor, if location is on.
         if (heading >= 0) {
-            ExifTag directionRefTag = exif.buildTag(
-                    ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
+            ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
                     ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
-            ExifTag directionTag = exif.buildTag(
-                    ExifInterface.TAG_GPS_IMG_DIRECTION,
-                    new Rational(heading, 1));
+            ExifTag directionTag =
+                    exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1));
             exif.setTag(directionRefTag);
             exif.setTag(directionTag);
         }
-
-        session.saveAndFinish(acquireJpegBytes(image), width, height, rotation, exif,
-                new OnMediaSavedListener() {
-                @Override
+        // TODO Find out why this is off by -90 degrees.
+        session.saveAndFinish(acquireJpegBytes(image, degrees),
+                size.getWidth(), size.getHeight(), 0, exif, new OnMediaSavedListener() {
+                        @Override
                     public void onMediaSaved(Uri uri) {
                         captureParams.callback.onPictureSaved(uri);
                     }
@@ -697,8 +730,7 @@
                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
                 break;
             case OFF:
-                builder.set(CaptureRequest.CONTROL_AE_MODE,
-                        CaptureRequest.CONTROL_AE_MODE_ON);
+                builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
                 break;
             case AUTO:
@@ -719,8 +751,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -742,8 +773,7 @@
 
             addRegionsToCaptureRequestBuilder(builder);
 
-            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
             return true;
         } catch (CameraAccessException e) {
             if (ZSL_ENABLED) {
@@ -789,8 +819,7 @@
                         CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
             }
 
-            mCaptureSession.capture(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
             return true;
         } catch (CameraAccessException e) {
             Log.v(TAG, "Could not execute single still capture request.", e);
@@ -803,8 +832,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -823,8 +851,7 @@
             addRegionsToCaptureRequestBuilder(builder);
             addFlashToCaptureRequestBuilder(builder, flashMode);
 
-            mCaptureSession.capture(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
 
             return true;
         } catch (CameraAccessException e) {
@@ -840,8 +867,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -859,8 +885,7 @@
             builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
 
-            mCaptureSession.capture(builder.build(), mCaptureManager,
-                    mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
 
             return true;
         } catch (CameraAccessException e) {
@@ -881,8 +906,7 @@
         try {
             CaptureRequest.Builder builder;
             if (ZSL_ENABLED) {
-                builder = mDevice.
-                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
             } else {
                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
             }
@@ -924,14 +948,35 @@
     }
 
     /**
+     * @param originalWidth the width of the original image captured from the
+     *            camera
+     * @param originalHeight the height of the original image captured from the
+     *            camera
+     * @param orientation the rotation to apply, in degrees.
+     * @return The size of the final rotated image
+     */
+    private Size getImageSizeForOrientation(int originalWidth, int originalHeight,
+            int orientation) {
+        if (orientation == 0 || orientation == 180) {
+            return new Size(originalWidth, originalHeight);
+        } else if (orientation == 90 || orientation == 270) {
+            return new Size(originalHeight, originalWidth);
+        } else {
+            throw new InvalidParameterException("Orientation not supported.");
+        }
+    }
+
+    /**
      * Given an image reader, extracts the JPEG image bytes and then closes the
      * reader.
      *
      * @param img the image from which to extract jpeg bytes or compress to
      *            jpeg.
+     * @param degrees the angle to rotate the image, in degrees. Rotation is
+     *            only applied to YUV images.
      * @return The bytes of the JPEG image. Newly allocated.
      */
-    private byte[] acquireJpegBytes(Image img) {
+    private byte[] acquireJpegBytes(Image img, int degrees) {
         ByteBuffer buffer;
 
         if (img.getFormat() == ImageFormat.JPEG) {
@@ -948,7 +993,8 @@
                 buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
             }
 
-            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY);
+            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY,
+                    degrees);
 
             if (numBytes < 0) {
                 throw new RuntimeException("Error compressing jpeg.");
diff --git a/src/com/android/camera/util/JpegUtilNative.java b/src/com/android/camera/util/JpegUtilNative.java
index 62ac99b..ff288cb 100644
--- a/src/com/android/camera/util/JpegUtilNative.java
+++ b/src/com/android/camera/util/JpegUtilNative.java
@@ -13,8 +13,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package com.android.camera.util;
 
+import android.graphics.Bitmap;
 import android.graphics.ImageFormat;
 import android.media.Image;
 import android.media.Image.Plane;
@@ -35,16 +37,20 @@
      * Compresses an image from YUV422 format to jpeg.
      *
      * @param yBuf the buffer containing the Y component of the image
-     * @param yPStride the stride between adjacent pixels in the same row in yBuf
+     * @param yPStride the stride between adjacent pixels in the same row in
+     *            yBuf
      * @param yRStride the stride between adjacent rows in yBuf
      * @param cbBuf the buffer containing the Cb component of the image
-     * @param cbPStride the stride between adjacent pixels in the same row in cbBuf
+     * @param cbPStride the stride between adjacent pixels in the same row in
+     *            cbBuf
      * @param cbRStride the stride between adjacent rows in cbBuf
      * @param crBuf the buffer containing the Cr component of the image
-     * @param crPStride the stride between adjacent pixels in the same row in crBuf
+     * @param crPStride the stride between adjacent pixels in the same row in
+     *            crBuf
      * @param crRStride the stride between adjacent rows in crBuf
      * @param quality the quality level (0 to 100) to use
-     * @return The number of bytes written, or a negative value indicating an error
+     * @return The number of bytes written, or a negative value indicating an
+     *         error
      */
     private static native int compressJpegFromYUV420pNative(
             int width, int height,
@@ -54,8 +60,35 @@
             Object outBuf, int outBufCapacity, int quality);
 
     /**
-     * @see JpegUtilNative#compressJpegFromYUV420pNative(int, int, java.lang.Object, int, int,
-     *      java.lang.Object, int, int, java.lang.Object, int, int, java.lang.Object, int, int)
+     * Copies the Image.Plane specified by planeBuf, pStride, and rStride to the
+     * Bitmap.
+     *
+     * @param width the width of the image
+     * @param height the height of the image
+     * @param planeBuf the native ByteBuffer containing the image plane data
+     * @param pStride the stride between adjacent pixels in the same row of
+     *            planeBuf
+     * @param rStride the stride between adjacent rows in planeBuf
+     */
+    private static native void copyImagePlaneToBitmap(int width, int height, Object planeBuf,
+            int pStride, int rStride, Object outBitmap, int rot90);
+
+    public static void copyImagePlaneToBitmap(Image.Plane plane, Bitmap bitmap, int rot90) {
+        if (bitmap.getConfig() != Bitmap.Config.ALPHA_8) {
+            throw new RuntimeException("Unsupported bitmap format");
+        }
+
+        int width = bitmap.getWidth();
+        int height = bitmap.getHeight();
+
+        copyImagePlaneToBitmap(width, height, plane.getBuffer(), plane.getPixelStride(),
+                plane.getRowStride(), bitmap, rot90);
+    }
+
+    /**
+     * @see JpegUtilNative#compressJpegFromYUV420pNative(int, int,
+     *      java.lang.Object, int, int, java.lang.Object, int, int,
+     *      java.lang.Object, int, int, java.lang.Object, int, int)
      */
     public static int compressJpegFromYUV420p(
             int width, int height,
@@ -64,15 +97,18 @@
             ByteBuffer crBuf, int crPStride, int crRStride,
             ByteBuffer outBuf, int quality) {
         return compressJpegFromYUV420pNative(width, height, yBuf, yPStride, yRStride, cbBuf,
-                cbPStride, cbRStride, crBuf, crPStride, crRStride, outBuf, outBuf.capacity(), quality);
+                cbPStride, cbRStride, crBuf, crPStride, crRStride, outBuf, outBuf.capacity(),
+                quality);
     }
 
     /**
-     * Compresses the given image to jpeg. Note that only ImageFormat.YUV_420_888 is currently
-     * supported. Furthermore, all planes must use direct byte buffers.
+     * Compresses the given image to jpeg. Note that only
+     * ImageFormat.YUV_420_888 is currently supported. Furthermore, all planes
+     * must use direct byte buffers.
      *
      * @param img the image to compress
      * @param outBuf a direct byte buffer to hold the output jpeg.
+     * @param quality the jpeg encoder quality (0 to 100)
      * @return The number of bytes written to outBuf
      */
     public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality) {
@@ -119,4 +155,104 @@
 
         return numBytesWritten;
     }
+
+    /**
+     * Compresses the given image to jpeg. Note that only
+     * ImageFormat.YUV_420_888 is currently supported. Furthermore, all planes
+     * must use direct byte buffers.<br>
+     * FIXME TODO OPTIMIZE This method is *incredibly* inefficient.
+     *
+     * @param img the image to compress
+     * @param outBuf a direct byte buffer to hold the output jpeg.
+     * @param quality the jpeg encoder quality (0 to 100)
+     * @param rotation the amount to rotate the image clockwise, in degrees.
+     * @return The number of bytes written to outBuf
+     */
+    public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality,
+            int degrees) {
+        if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) {
+            throw new RuntimeException("Unsupported rotation angle");
+        }
+
+        if (degrees == 0) {
+            return compressJpegFromYUV420Image(img, outBuf, quality);
+        }
+
+        if (img.getFormat() != ImageFormat.YUV_420_888) {
+            throw new RuntimeException("Unsupported Image Format.");
+        }
+
+        final int NUM_PLANES = 3;
+
+        if (img.getPlanes().length != NUM_PLANES) {
+            throw new RuntimeException("Output buffer must be direct.");
+        }
+
+        if (!outBuf.isDirect()) {
+            throw new RuntimeException("Output buffer must be direct.");
+        }
+
+        ByteBuffer[] planeBuf = new ByteBuffer[NUM_PLANES];
+        int[] pixelStride = new int[NUM_PLANES];
+        int[] rowStride = new int[NUM_PLANES];
+
+        for (int i = 0; i < NUM_PLANES; i++) {
+            Plane plane = img.getPlanes()[i];
+
+            if (!plane.getBuffer().isDirect()) {
+                return -1;
+            }
+
+            int width = img.getWidth();
+            int height = img.getHeight();
+
+            if (i > 0) {
+                // The image plane for the Cb and Cr channels is downsampled.
+                width /= 2;
+                height /= 2;
+            }
+
+            if (degrees == 90 || degrees == 270) {
+                int tmp = width;
+                width = height;
+                height = tmp;
+            }
+
+            Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);
+
+            copyImagePlaneToBitmap(plane, bitmap, degrees / 90);
+
+            Bitmap rotatedBitmap = bitmap;
+
+            ByteBuffer rotatedBitmapBuffer = ByteBuffer.allocateDirect(
+                    rotatedBitmap.getWidth() * rotatedBitmap.getHeight());
+
+            rotatedBitmap.copyPixelsToBuffer(rotatedBitmapBuffer);
+
+            planeBuf[i] = rotatedBitmapBuffer;
+            pixelStride[i] = 1;
+            rowStride[i] = rotatedBitmap.getWidth();
+        }
+
+        outBuf.clear();
+
+        int width = img.getWidth();
+        int height = img.getHeight();
+        if (degrees == 90 || degrees == 270) {
+            int tmp = width;
+            width = height;
+            height = tmp;
+        }
+
+        int numBytesWritten = compressJpegFromYUV420p(
+                width, height,
+                planeBuf[0], pixelStride[0], rowStride[0],
+                planeBuf[1], pixelStride[1], rowStride[1],
+                planeBuf[2], pixelStride[2], rowStride[2],
+                outBuf, quality);
+
+        outBuf.limit(numBytesWritten);
+
+        return numBytesWritten;
+    }
 }