Add experimental RAW(DNG) support.

  Bug: 18272977

This adds RAW writing behind a new debug property:
adb shell setprop persist.camera.capture_dng 1

This will switch to the non-ZSL implementation of one Camera.

The resulting files are stored in a "DNG" folder under DCIM/Camera.

Caveats:
 - Preview aspect ratio will not match.
 - No item in filmstrip

Change-Id: I9d990de119f22a12d37b22d36d84dc15aeee9340
diff --git a/res/drawable/ic_panorama.xml b/res/drawable/ic_panorama.xml
deleted file mode 100644
index dc9f1d7..0000000
--- a/res/drawable/ic_panorama.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2013 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-
-<selector xmlns:android="http://schemas.android.com/apk/res/android"
-          android:constantSize="true">
-    <item android:state_enabled="false">
-        <bitmap
-            android:gravity="center"
-            android:src="@drawable/ic_panorama_disabled" />
-    </item>
-    <item>
-        <bitmap
-            android:gravity="center"
-            android:src="@drawable/ic_panorama_normal" />
-    </item>
-</selector>
diff --git a/src/com/android/camera/CaptureModuleUtil.java b/src/com/android/camera/CaptureModuleUtil.java
index 7be2550..a70dca9 100644
--- a/src/com/android/camera/CaptureModuleUtil.java
+++ b/src/com/android/camera/CaptureModuleUtil.java
@@ -54,6 +54,17 @@
      */
     public static Size getOptimalPreviewSize(Context context, Size[] sizes,
             double targetRatio) {
+        return getOptimalPreviewSize(context, sizes, targetRatio, null );
+    }
+
+    /**
+     * Returns the best preview size based on the current display resolution,
+     * the available preview sizes, the target aspect ratio (typically the
+     * aspect ratio of the picture to be taken) as well as a maximum allowed
+     * tolerance. If tolerance is 'null', a default tolerance will be used.
+     */
+    public static Size getOptimalPreviewSize(Context context, Size[] sizes,
+            double targetRatio, Double aspectRatioTolerance) {
         // TODO(andyhuibers): Don't hardcode this but use device's measurements.
         final int MAX_ASPECT_HEIGHT = 1080;
 
@@ -74,7 +85,8 @@
         }
 
         int optimalIndex = CameraUtil
-                .getOptimalPreviewSizeIndex(context, camera1Sizes, targetRatio);
+                .getOptimalPreviewSizeIndex(context, camera1Sizes, targetRatio,
+                        aspectRatioTolerance);
 
         if (optimalIndex == -1) {
             return null;
@@ -110,7 +122,7 @@
         }
 
         Size pick = CaptureModuleUtil.getOptimalPreviewSize(context, supportedPreviewSizes,
-                bestPreviewAspectRatio);
+                bestPreviewAspectRatio, null);
         Log.d(TAG, "Picked buffer size: " + pick.toString());
         return pick;
     }
diff --git a/src/com/android/camera/debug/DebugPropertyHelper.java b/src/com/android/camera/debug/DebugPropertyHelper.java
index dd1d82a..84adb13 100644
--- a/src/com/android/camera/debug/DebugPropertyHelper.java
+++ b/src/com/android/camera/debug/DebugPropertyHelper.java
@@ -38,6 +38,8 @@
     private static final String PROP_FORCE_LEGACY_ONE_CAMERA = PREFIX + ".legacy";
     /** Write data about each capture request to disk. */
     private static final String PROP_WRITE_CAPTURE_DATA = PREFIX + ".capture_write";
+    /** Is RAW support enabled. */
+    private static final String PROP_CAPTURE_DNG = PREFIX + ".capture_dng";
 
     private static boolean isPropertyOn(String property) {
         return ON_VALUE.equals(SystemProperties.get(property, OFF_VALUE));
@@ -62,4 +64,8 @@
     public static boolean writeCaptureData() {
         return isPropertyOn(PROP_WRITE_CAPTURE_DATA);
     }
+
+    public static boolean isCaptureDngEnabled() {
+        return isPropertyOn(PROP_CAPTURE_DNG);
+    }
 }
diff --git a/src/com/android/camera/one/OneCamera.java b/src/com/android/camera/one/OneCamera.java
index 743be37..24e0a5c 100644
--- a/src/com/android/camera/one/OneCamera.java
+++ b/src/com/android/camera/one/OneCamera.java
@@ -342,9 +342,9 @@
     public void close(CloseCallback closeCallback);
 
     /**
-     * @return A list of all supported resolutions.
+     * @return A list of all supported preview resolutions.
      */
-    public Size[] getSupportedSizes();
+    public Size[] getSupportedPreviewSizes();
 
     /**
      * @return The aspect ratio of the full size capture (usually the native
diff --git a/src/com/android/camera/one/v2/OneCameraImpl.java b/src/com/android/camera/one/v2/OneCameraImpl.java
index 9c286df..b51b2fe 100644
--- a/src/com/android/camera/one/v2/OneCameraImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraImpl.java
@@ -16,9 +16,11 @@
 
 package com.android.camera.one.v2;
 
+import android.annotation.TargetApi;
 import android.content.Context;
 import android.graphics.ImageFormat;
 import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
 import android.hardware.camera2.CameraCharacteristics;
@@ -26,12 +28,14 @@
 import android.hardware.camera2.CameraMetadata;
 import android.hardware.camera2.CaptureRequest;
 import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.DngCreator;
 import android.hardware.camera2.TotalCaptureResult;
 import android.hardware.camera2.params.MeteringRectangle;
 import android.hardware.camera2.params.StreamConfigurationMap;
 import android.media.Image;
 import android.media.ImageReader;
 import android.net.Uri;
+import android.os.Build;
 import android.os.Handler;
 import android.os.HandlerThread;
 import android.os.SystemClock;
@@ -39,6 +43,7 @@
 
 import com.android.camera.CaptureModuleUtil;
 import com.android.camera.Exif;
+import com.android.camera.Storage;
 import com.android.camera.app.MediaSaver.OnMediaSavedListener;
 import com.android.camera.debug.DebugPropertyHelper;
 import com.android.camera.debug.Log;
@@ -56,6 +61,7 @@
 import com.android.camera.util.Size;
 
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -66,18 +72,41 @@
  * {@link OneCamera} implementation directly on top of the Camera2 API for
  * cameras without API 2 FULL support (limited or legacy).
  */
+@TargetApi(Build.VERSION_CODES.L)
 public class OneCameraImpl extends AbstractOneCamera {
 
     /** Captures that are requested but haven't completed yet. */
     private static class InFlightCapture {
         final PhotoCaptureParameters parameters;
         final CaptureSession session;
+        Image image;
+        TotalCaptureResult totalCaptureResult;
 
         public InFlightCapture(PhotoCaptureParameters parameters,
                 CaptureSession session) {
             this.parameters = parameters;
             this.session = session;
         }
+
+        /** Set the image once it's been received. */
+        public InFlightCapture setImage(Image capturedImage) {
+            image = capturedImage;
+            return this;
+        }
+
+        /** Set the total capture result once it's been received. */
+        public InFlightCapture setCaptureResult(TotalCaptureResult result) {
+            totalCaptureResult = result;
+            return this;
+        }
+
+        /**
+         * Returns whether the capture is complete (which is the case once the
+         * image and capture result are both present.
+         */
+        boolean isCaptureComplete() {
+            return image != null && totalCaptureResult != null;
+        }
     }
 
     private static final Tag TAG = new Tag("OneCameraImpl2");
@@ -92,15 +121,17 @@
 
     /**
      * Set to ImageFormat.JPEG, to use the hardware encoder, or
-     * ImageFormat.YUV_420_888 to use the software encoder. No other image
-     * formats are supported.
+     * ImageFormat.YUV_420_888 to use the software encoder. You can also try
+     * RAW_SENSOR experimentally.
      */
-    private static final int sCaptureImageFormat = ImageFormat.JPEG;
+    private static final int sCaptureImageFormat = DebugPropertyHelper.isCaptureDngEnabled() ?
+            ImageFormat.RAW_SENSOR : ImageFormat.JPEG;
 
     /** Duration to hold after manual focus tap. */
     private static final int FOCUS_HOLD_MILLIS = Settings3A.getFocusHoldMillis();
     /** Zero weight 3A region, to reset regions per API. */
-    MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper.getZeroWeightRegion();
+    private static final MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper
+            .getZeroWeightRegion();
 
     /**
      * CaptureRequest tags.
@@ -118,6 +149,9 @@
         TAP_TO_FOCUS
     }
 
+    /** Directory to store raw DNG files in. */
+    private static final File RAW_DIRECTORY = new File(Storage.DIRECTORY, "DNG");
+
     /** Current CONTROL_AF_MODE request to Camera2 API. */
     private int mControlAFMode = CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
     /** Last OneCamera.AutoFocusState reported. */
@@ -154,12 +188,14 @@
     /**
      * Common listener for preview frame metadata.
      */
-    private final CameraCaptureSession.CaptureCallback mAutoFocusStateListener = new
-            CameraCaptureSession.CaptureCallback() {
+    private final CameraCaptureSession.CaptureCallback mCaptureCallback =
+            new CameraCaptureSession.CaptureCallback() {
                 @Override
-                public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
-                                             long timestamp, long frameNumber) {
-                    if (request.getTag() == RequestTag.CAPTURE && mLastPictureCallback != null) {
+                public void onCaptureStarted(CameraCaptureSession session,
+                        CaptureRequest request, long timestamp,
+                        long frameNumber) {
+                    if (request.getTag() == RequestTag.CAPTURE
+                            && mLastPictureCallback != null) {
                         mLastPictureCallback.onQuickExpose();
                     }
                 }
@@ -168,25 +204,40 @@
                 // onCaptureCompleted(), so we take advantage of that.
                 @Override
                 public void onCaptureProgressed(CameraCaptureSession session,
-                        CaptureRequest request,
-                        CaptureResult partialResult) {
+                        CaptureRequest request, CaptureResult partialResult) {
                     autofocusStateChangeDispatcher(partialResult);
                     super.onCaptureProgressed(session, request, partialResult);
                 }
 
                 @Override
                 public void onCaptureCompleted(CameraCaptureSession session,
-                        CaptureRequest request,
-                        TotalCaptureResult result) {
+                        CaptureRequest request, TotalCaptureResult result) {
                     autofocusStateChangeDispatcher(result);
-                    // This checks for a HAL implementation error where TotalCaptureResult
-                    // is missing CONTROL_AF_STATE.  This should not happen.
+                    // This checks for a HAL implementation error where
+                    // TotalCaptureResult
+                    // is missing CONTROL_AF_STATE. This should not happen.
                     if (result.get(CaptureResult.CONTROL_AF_STATE) == null) {
                         AutoFocusHelper.checkControlAfState(result);
                     }
                     if (DEBUG_FOCUS_LOG) {
                         AutoFocusHelper.logExtraFocusInfo(result);
                     }
+
+                    if (request.getTag() == RequestTag.CAPTURE) {
+                        // Add the capture result to the latest in-flight
+                        // capture. If all the data for that capture is
+                        // complete, store the image on disk.
+                        InFlightCapture capture = null;
+                        synchronized (mCaptureQueue) {
+                            if (mCaptureQueue.getFirst().setCaptureResult(result)
+                                    .isCaptureComplete()) {
+                                capture = mCaptureQueue.removeFirst();
+                            }
+                        }
+                        if (capture != null) {
+                            OneCameraImpl.this.onCaptureCompleted(capture);
+                        }
+                    }
                     super.onCaptureCompleted(session, request, result);
                 }
             };
@@ -225,16 +276,19 @@
             new ImageReader.OnImageAvailableListener() {
                 @Override
                 public void onImageAvailable(ImageReader reader) {
-                    InFlightCapture capture = mCaptureQueue.remove();
-
-                    // Since this is not an HDR+ session, we will just save the
-                    // result.
-                    capture.session.startEmpty();
-                    byte[] imageBytes = acquireJpegBytesAndClose(reader);
-                    // TODO: The savePicture call here seems to block UI thread.
-                    savePicture(imageBytes, capture.parameters, capture.session);
-                    broadcastReadyState(true);
-                    capture.parameters.callback.onPictureTaken(capture.session);
+                    // Add the image data to the latest in-flight capture.
+                    // If all the data for that capture is complete, store the
+                    // image data.
+                    InFlightCapture capture = null;
+                    synchronized (mCaptureQueue) {
+                        if (mCaptureQueue.getFirst().setImage(reader.acquireLatestImage())
+                                .isCaptureComplete()) {
+                            capture = mCaptureQueue.removeFirst();
+                        }
+                    }
+                    if (capture != null) {
+                        onCaptureCompleted(capture);
+                    }
                 }
             };
 
@@ -250,6 +304,16 @@
         mCharacteristics = characteristics;
         mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
 
+        // Override pictureSize for RAW (our picture size settings don't include
+        // RAW, which typically only supports one size (sensor size). This also
+        // typically differs from the larges JPEG or YUV size.
+        // TODO: If we ever want to support RAW properly, it should be one entry
+        // in the picture quality list, which should then lead to the right
+        // pictureSize being passes into here.
+        if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) {
+            pictureSize = getDefaultPictureSize();
+        }
+
         mCameraThread = new HandlerThread("OneCamera2");
         mCameraThread.start();
         mCameraHandler = new Handler(mCameraThread.getLooper());
@@ -310,7 +374,11 @@
             builder.setTag(RequestTag.CAPTURE);
             addBaselineCaptureKeysToRequest(builder);
 
-            if (sCaptureImageFormat == ImageFormat.JPEG) {
+            // Enable lens-shading correction for even better DNGs.
+            if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) {
+                builder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
+                        CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
+            } else if (sCaptureImageFormat == ImageFormat.JPEG) {
                 builder.set(CaptureRequest.JPEG_QUALITY, JPEG_QUALITY);
                 builder.set(CaptureRequest.JPEG_ORIENTATION,
                         CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
@@ -328,14 +396,16 @@
                         "capture.txt"));
             }
 
-            mCaptureSession.capture(request, mAutoFocusStateListener, mCameraHandler);
+            mCaptureSession.capture(request, mCaptureCallback, mCameraHandler);
         } catch (CameraAccessException e) {
             Log.e(TAG, "Could not access camera for still image capture.");
             broadcastReadyState(true);
             params.callback.onPictureTakenFailed();
             return;
         }
-        mCaptureQueue.add(new InFlightCapture(params, session));
+        synchronized (mCaptureQueue) {
+            mCaptureQueue.add(new InFlightCapture(params, session));
+        }
     }
 
     @Override
@@ -366,7 +436,9 @@
             return;
         }
         try {
-            mCaptureSession.abortCaptures();
+            if (mCaptureSession != null) {
+                mCaptureSession.abortCaptures();
+            }
         } catch (CameraAccessException e) {
             Log.e(TAG, "Could not abort captures in progress.");
         }
@@ -377,10 +449,10 @@
     }
 
     @Override
-    public Size[] getSupportedSizes() {
+    public Size[] getSupportedPreviewSizes() {
         StreamConfigurationMap config = mCharacteristics
                 .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
-        return Size.convert(config.getOutputSizes(sCaptureImageFormat));
+        return Size.convert(config.getOutputSizes(SurfaceTexture.class));
     }
 
     @Override
@@ -400,7 +472,7 @@
                 == CameraMetadata.LENS_FACING_BACK;
     }
 
-    private void savePicture(byte[] jpegData, final PhotoCaptureParameters captureParams,
+    private void saveJpegPicture(byte[] jpegData, final PhotoCaptureParameters captureParams,
             CaptureSession session) {
         int heading = captureParams.heading;
         int width = 0;
@@ -512,7 +584,8 @@
     }
 
     /**
-     * Adds current regions to CaptureRequest and base AF mode + AF_TRIGGER_IDLE.
+     * Adds current regions to CaptureRequest and base AF mode +
+     * AF_TRIGGER_IDLE.
      *
      * @param builder Build for the CaptureRequest
      */
@@ -542,7 +615,7 @@
             builder.addTarget(mPreviewSurface);
             builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
             addBaselineCaptureKeysToRequest(builder);
-            mCaptureSession.setRepeatingRequest(builder.build(), mAutoFocusStateListener,
+            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureCallback,
                     mCameraHandler);
             Log.v(TAG, String.format("Sent repeating Preview request, zoom = %.2f", mZoomValue));
             return true;
@@ -566,7 +639,7 @@
             addBaselineCaptureKeysToRequest(builder);
             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
             builder.setTag(tag);
-            mCaptureSession.capture(builder.build(), mAutoFocusStateListener, mCameraHandler);
+            mCaptureSession.capture(builder.build(), mCaptureCallback, mCameraHandler);
 
             // Step 2: Call repeatingPreview to update mControlAFMode.
             repeatingPreview(tag);
@@ -624,9 +697,11 @@
     @Override
     public void triggerFocusAndMeterAtPoint(float nx, float ny) {
         int sensorOrientation = mCharacteristics.get(
-            CameraCharacteristics.SENSOR_ORIENTATION);
-        mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
-        mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
+                CameraCharacteristics.SENSOR_ORIENTATION);
+        mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion,
+                sensorOrientation);
+        mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion,
+                sensorOrientation);
 
         sendAutoFocusTriggerCaptureRequest(RequestTag.TAP_TO_FOCUS);
     }
@@ -645,9 +720,21 @@
 
     @Override
     public Size pickPreviewSize(Size pictureSize, Context context) {
+        if (pictureSize == null) {
+            // TODO The default should be selected by the caller, and
+            // pictureSize should never be null.
+            pictureSize = getDefaultPictureSize();
+        }
         float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
-        return CaptureModuleUtil.getOptimalPreviewSize(context, getSupportedSizes(),
-                pictureAspectRatio);
+        Size[] supportedSizes = getSupportedPreviewSizes();
+
+        // Since devices only have one raw resolution we need to be more
+        // flexible for selecting a matching preview resolution.
+        Double aspectRatioTolerance = sCaptureImageFormat == ImageFormat.RAW_SENSOR ? 10d : null;
+        Size size = CaptureModuleUtil.getOptimalPreviewSize(context, supportedSizes,
+                pictureAspectRatio, aspectRatioTolerance);
+        Log.d(TAG, "Selected preview size: " + size);
+        return size;
     }
 
     private Rect cropRegionForZoom(float zoom) {
@@ -664,34 +751,91 @@
     private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
         Rect activeArraySize =
                 characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-        return ((float)(activeArraySize.width())) / activeArraySize.height();
+        return ((float) (activeArraySize.width())) / activeArraySize.height();
+    }
+
+    /*
+     * Called when a capture that is in flight is completed.
+     *
+     * @param capture the in-flight capture which needs to contain the received
+     *            image and capture data
+     */
+    private void onCaptureCompleted(InFlightCapture capture) {
+
+        // Experimental support for writing RAW. We do not have a usable JPEG
+        // here, so we don't use the usual capture session mechanism and instead
+        // just store the RAW file in its own directory.
+        // TODO: If we make this a real feature we should probably put the DNGs
+        // into the Camera directly.
+        if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) {
+            if (!RAW_DIRECTORY.exists()) {
+                if (!RAW_DIRECTORY.mkdirs()) {
+                    throw new RuntimeException("Could not create RAW directory.");
+                }
+            }
+            File dngFile = new File(RAW_DIRECTORY, capture.session.getTitle() + ".dng");
+            writeDngBytesAndClose(capture.image, capture.totalCaptureResult,
+                    mCharacteristics, dngFile);
+        } else {
+            // Since this is not an HDR+ session, we will just save the
+            // result.
+            capture.session.startEmpty();
+            byte[] imageBytes = acquireJpegBytesAndClose(capture.image);
+            saveJpegPicture(imageBytes, capture.parameters, capture.session);
+        }
+        broadcastReadyState(true);
+        capture.parameters.callback.onPictureTaken(capture.session);
     }
 
     /**
-     * Given an image reader, extracts the JPEG image bytes and then closes the
-     * reader.
+     * Take the given RAW image and capture result, convert it to a DNG and
+     * write it to disk.
      *
-     * @param reader the reader to read the JPEG data from.
-     * @return The bytes of the JPEG image. Newly allocated.
+     * @param image the image containing the 16-bit RAW data (RAW_SENSOR)
+     * @param captureResult the capture result for the image
+     * @param characteristics the camera characteristics of the camera that took
+     *            the RAW image
+     * @param dngFile the destination to where the resulting DNG data is written
+     *            to
      */
-    private static byte[] acquireJpegBytesAndClose(ImageReader reader) {
-        Image img = reader.acquireLatestImage();
+    private static void writeDngBytesAndClose(Image image, TotalCaptureResult captureResult,
+            CameraCharacteristics characteristics, File dngFile) {
+        try (DngCreator dngCreator = new DngCreator(characteristics, captureResult);
+                FileOutputStream outputStream = new FileOutputStream(dngFile)) {
+            // TODO: Add DngCreator#setThumbnail and add the DNG to the normal
+            // filmstrip.
+            dngCreator.writeImage(outputStream, image);
+            outputStream.close();
+            image.close();
+        } catch (IOException e) {
+            Log.e(TAG, "Could not store DNG file", e);
+            return;
+        }
+        Log.i(TAG, "Successfully stored DNG file: " + dngFile.getAbsolutePath());
+    }
 
+    /**
+     * Given an image reader, this extracts the final image. If the image in the
+     * reader is JPEG, we extract and return it as is. If the image is YUV, we
+     * convert it to JPEG and return the result.
+     *
+     * @param image the image we got from the image reader.
+     * @return A valid JPEG image.
+     */
+    private static byte[] acquireJpegBytesAndClose(Image image) {
         ByteBuffer buffer;
-
-        if (img.getFormat() == ImageFormat.JPEG) {
-            Image.Plane plane0 = img.getPlanes()[0];
+        if (image.getFormat() == ImageFormat.JPEG) {
+            Image.Plane plane0 = image.getPlanes()[0];
             buffer = plane0.getBuffer();
-        } else if (img.getFormat() == ImageFormat.YUV_420_888) {
-            buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
+        } else if (image.getFormat() == ImageFormat.YUV_420_888) {
+            buffer = ByteBuffer.allocateDirect(image.getWidth() * image.getHeight() * 3);
 
             Log.v(TAG, "Compressing JPEG with software encoder.");
-            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY);
+            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(image, buffer, JPEG_QUALITY);
 
             if (numBytes < 0) {
                 throw new RuntimeException("Error compressing jpeg.");
             }
-
             buffer.limit(numBytes);
         } else {
             throw new RuntimeException("Unsupported image format.");
@@ -700,7 +844,29 @@
         byte[] imageBytes = new byte[buffer.remaining()];
         buffer.get(imageBytes);
         buffer.rewind();
-        img.close();
+        image.close();
         return imageBytes;
     }
-}
+
+    /**
+     * @return The largest supported picture size.
+     */
+    public Size getDefaultPictureSize() {
+        StreamConfigurationMap configs =
+                mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
+
+        // Find the largest supported size.
+        android.util.Size largestSupportedSize = supportedSizes[0];
+        long largestSupportedSizePixels =
+                largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
+        for (int i = 1; i < supportedSizes.length; i++) {
+            long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
+            if (numPixels > largestSupportedSizePixels) {
+                largestSupportedSize = supportedSizes[i];
+                largestSupportedSizePixels = numPixels;
+            }
+        }
+        return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
+    }
+}
\ No newline at end of file
diff --git a/src/com/android/camera/one/v2/OneCameraZslImpl.java b/src/com/android/camera/one/v2/OneCameraZslImpl.java
index e3d2444..ed9aa02 100644
--- a/src/com/android/camera/one/v2/OneCameraZslImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraZslImpl.java
@@ -584,7 +584,7 @@
     }
 
     @Override
-    public Size[] getSupportedSizes() {
+    public Size[] getSupportedPreviewSizes() {
         StreamConfigurationMap config =
                 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
         return Size.convert(config.getOutputSizes(sCaptureImageFormat));
@@ -1061,7 +1061,7 @@
             pictureSize = getDefaultPictureSize();
         }
         float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
-        return CaptureModuleUtil.getOptimalPreviewSize(context, getSupportedSizes(),
+        return CaptureModuleUtil.getOptimalPreviewSize(context, getSupportedPreviewSizes(),
                 pictureAspectRatio);
     }
 
diff --git a/src/com/android/camera/util/CameraUtil.java b/src/com/android/camera/util/CameraUtil.java
index d6237a9..ccd12e7 100644
--- a/src/com/android/camera/util/CameraUtil.java
+++ b/src/com/android/camera/util/CameraUtil.java
@@ -203,9 +203,9 @@
      * we round up the sample size to avoid OOM.
      */
     public static int computeSampleSize(BitmapFactory.Options options,
-    int minSideLength, int maxNumOfPixels) {
+            int minSideLength, int maxNumOfPixels) {
         int initialSize = computeInitialSampleSize(options, minSideLength,
-      maxNumOfPixels);
+                maxNumOfPixels);
 
         int roundedSize;
         if (initialSize <= 8) {
@@ -377,26 +377,25 @@
 
     /**
      * Given (nx, ny) \in [0, 1]^2, in the display's portrait coordinate system,
-     * returns normalized sensor coordinates \in [0, 1]^2 depending on how
-     * the sensor's orientation \in {0, 90, 180, 270}.
-     *
+     * returns normalized sensor coordinates \in [0, 1]^2 depending on how the
+     * sensor's orientation \in {0, 90, 180, 270}.
      * <p>
      * Returns null if sensorOrientation is not one of the above.
      * </p>
      */
     public static PointF normalizedSensorCoordsForNormalizedDisplayCoords(
-        float nx, float ny, int sensorOrientation) {
+            float nx, float ny, int sensorOrientation) {
         switch (sensorOrientation) {
-        case 0:
-            return new PointF(nx, ny);
-        case 90:
-            return new PointF(ny, 1.0f - nx);
-        case 180:
-            return new PointF(1.0f - nx, 1.0f - ny);
-        case 270:
-            return new PointF(1.0f - ny, nx);
-        default:
-            return null;
+            case 0:
+                return new PointF(nx, ny);
+            case 90:
+                return new PointF(ny, 1.0f - nx);
+            case 180:
+                return new PointF(1.0f - nx, 1.0f - ny);
+            case 270:
+                return new PointF(1.0f - ny, nx);
+            default:
+                return null;
         }
     }
 
@@ -510,22 +509,64 @@
         }
     }
 
+    /**
+     * Returns the index into 'sizes' that is most optimal given the current
+     * screen and target aspect ratio..
+     * <p>
+     * This is using a default aspect ratio tolerance. If the tolerance is to be
+     * given you should call
+     * {@link #getOptimalPreviewSizeIndex(Context, List, double, Double)}
+     *
+     * @param context used to get the screen dimensions. TODO: Refactor to take
+     *            in screen dimensions directly
+     * @param previewSizes the available preview sizes
+     * @param targetRatio the target aspect ratio, typically the aspect ratio of
+     *            the picture size
+     * @return The index into 'previewSizes' for the optimal size, or -1, if no
+     *         matching size was found.
+     */
     public static int getOptimalPreviewSizeIndex(Context context,
             List<Size> sizes, double targetRatio) {
         // Use a very small tolerance because we want an exact match.
-        final double ASPECT_TOLERANCE;
+        final double aspectRatioTolerance;
         // HTC 4:3 ratios is over .01 from true 4:3, targeted fix for those
         // devices here, see b/18241645
         if (ApiHelper.IS_HTC && targetRatio > 1.3433 && targetRatio < 1.35) {
             Log.w(TAG, "4:3 ratio out of normal tolerance, increasing tolerance to 0.02");
-            ASPECT_TOLERANCE = 0.02;
+            aspectRatioTolerance = 0.02;
         } else {
-            ASPECT_TOLERANCE = 0.01;
+            aspectRatioTolerance = 0.01;
         }
-        if (sizes == null) {
+        return getOptimalPreviewSizeIndex(context, sizes, targetRatio, aspectRatioTolerance);
+    }
+
+    /**
+     * Returns the index into 'sizes' that is most optimal given the current
+     * screen, target aspect ratio and tolerance.
+     *
+     * @param context used to get the screen dimensions. TODO: Refactor to take
+     *            in screen dimensions directly
+     * @param previewSizes the available preview sizes
+     * @param targetRatio the target aspect ratio, typically the aspect ratio of
+     *            the picture size
+     * @param aspectRatioTolerance the tolerance we allow between the selected
+     *            preview size's aspect ratio and the target ratio. If this is
+     *            set to 'null', the default value is used.
+     * @return The index into 'previewSizes' for the optimal size, or -1, if no
+     *         matching size was found.
+     */
+    public static int getOptimalPreviewSizeIndex(Context context,
+            List<Size> previewSizes, double targetRatio, Double aspectRatioTolerance) {
+        if (previewSizes == null) {
             return -1;
         }
 
+        // If no particular aspect ratio tolerance is set, use the default
+        // value.
+        if (aspectRatioTolerance == null) {
+            return getOptimalPreviewSizeIndex(context, previewSizes, targetRatio);
+        }
+
         int optimalSizeIndex = -1;
         double minDiff = Double.MAX_VALUE;
 
@@ -537,10 +578,10 @@
         Size defaultDisplaySize = getDefaultDisplaySize(context);
         int targetHeight = Math.min(defaultDisplaySize.getWidth(), defaultDisplaySize.getHeight());
         // Try to find an size match aspect ratio and size
-        for (int i = 0; i < sizes.size(); i++) {
-            Size size = sizes.get(i);
+        for (int i = 0; i < previewSizes.size(); i++) {
+            Size size = previewSizes.get(i);
             double ratio = (double) size.getWidth() / size.getHeight();
-            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) {
+            if (Math.abs(ratio - targetRatio) > aspectRatioTolerance) {
                 continue;
             }
 
@@ -560,10 +601,10 @@
         // Cannot find the one match the aspect ratio. This should not happen.
         // Ignore the requirement.
         if (optimalSizeIndex == -1) {
-            Log.w(TAG, "No preview size match the aspect ratio. available sizes: " + sizes);
+            Log.w(TAG, "No preview size match the aspect ratio. available sizes: " + previewSizes);
             minDiff = Double.MAX_VALUE;
-            for (int i = 0; i < sizes.size(); i++) {
-                Size size = sizes.get(i);
+            for (int i = 0; i < previewSizes.size(); i++) {
+                Size size = previewSizes.get(i);
                 if (Math.abs(size.getHeight() - targetHeight) < minDiff) {
                     optimalSizeIndex = i;
                     minDiff = Math.abs(size.getHeight() - targetHeight);
@@ -576,11 +617,11 @@
 
     /**
      * Returns the largest picture size which matches the given aspect ratio,
-     * except for the special WYSIWYG case where the picture size exactly matches
-     * the target size.
+     * except for the special WYSIWYG case where the picture size exactly
+     * matches the target size.
      *
-     * @param sizes        a list of candidate sizes, available for use
-     * @param targetWidth  the ideal width of the video snapshot
+     * @param sizes a list of candidate sizes, available for use
+     * @param targetWidth the ideal width of the video snapshot
      * @param targetHeight the ideal height of the video snapshot
      * @return the Optimal Video Snapshot Picture Size
      */
@@ -596,9 +637,9 @@
 
         com.android.ex.camera2.portability.Size optimalSize = null;
 
-        //  WYSIWYG Override
-        //  We assume that physical display constraints have already been
-        //  imposed on the variables sizes
+        // WYSIWYG Override
+        // We assume that physical display constraints have already been
+        // imposed on the variables sizes
         for (com.android.ex.camera2.portability.Size size : sizes) {
             if (size.height() == targetHeight && size.width() == targetWidth) {
                 return size;
@@ -1094,7 +1135,7 @@
 
     public static void playVideo(Activity activity, Uri uri, String title) {
         try {
-            CameraActivity cameraActivity = (CameraActivity)activity;
+            CameraActivity cameraActivity = (CameraActivity) activity;
             boolean isSecureCamera = cameraActivity.isSecureCamera();
             if (!isSecureCamera) {
                 Intent intent = IntentHelper.getVideoPlayerIntent(uri)