Merge "Move preference subscreens to new Activities from Dialogs" into ub-camera-glacier
diff --git a/res/values-in/strings.xml b/res/values-in/strings.xml
index b72fb78..96c65b2 100644
--- a/res/values-in/strings.xml
+++ b/res/values-in/strings.xml
@@ -210,7 +210,7 @@
     <string name="media_processing_content_description" msgid="8138587719107183754">"Media sedang diproses"</string>
     <string name="accessibility_mode_list_toggle" msgid="4784230103566918645">"Alihkan daftar mode"</string>
     <string name="btn_mode_list_toggle" msgid="3986242443098400164">"Daftar mode"</string>
-    <string name="accessibility_filmstrip_toggle" msgid="6966978483643396442">"Alihkan strip film"</string>
+    <string name="accessibility_filmstrip_toggle" msgid="6966978483643396442">"Alihkan setrip film"</string>
     <string name="btn_filmstrip_toggle" msgid="3666693972882351490">"Strip film"</string>
     <string name="capital_on" msgid="1118214824959797269">"NYALA"</string>
     <string name="capital_off" msgid="3020696135020167263">"MATI"</string>
@@ -276,7 +276,7 @@
     <string name="photo_editor" msgid="1521994560971367225">"Editor Foto"</string>
     <string name="crop_save" msgid="2841974981340098579">"Simpan"</string>
     <string name="cannot_load_image" msgid="4100136187076585580">"Tidak dapat memuat gambar!"</string>
-    <string name="switch_photo_filmstrip" msgid="1448511001008888767">"Tampilan strip film"</string>
+    <string name="switch_photo_filmstrip" msgid="1448511001008888767">"Tampilan setrip film"</string>
     <string name="setting_wallpaper" msgid="2397759659347872725">"Menyetel wallpaper"</string>
     <string name="mode_settings" msgid="2021937261522670921">"Setelan"</string>
     <string name="mode_camera" msgid="279763925715250603">"Kamera"</string>
diff --git a/res/values-km-rKH/strings.xml b/res/values-km-rKH/strings.xml
index f9b266b..766e070 100644
--- a/res/values-km-rKH/strings.xml
+++ b/res/values-km-rKH/strings.xml
@@ -22,7 +22,7 @@
     <string name="details_hms" msgid="4842276230698703554">"%1$d:%2$02d:%3$02d"</string>
     <string name="set_image" msgid="3969690281401045698">"កំណត់​រូបភាព​ជា"</string>
     <string name="delete" msgid="2714492172818940424">"លុប"</string>
-    <string name="share" msgid="8581089487762243115">"ចែក​រំលែក"</string>
+    <string name="share" msgid="8581089487762243115">"ចែក​រំលែក​"</string>
     <string name="share_panorama" msgid="3558466186935359444">"ចែករំលែក​ទេសភាព"</string>
     <string name="share_as_photo" msgid="4831213580709167218">"ចែករំលែក​ជា​រូបថត"</string>
     <string name="deleted" msgid="2036165115527228127">"បាន​លុប"</string>
@@ -195,7 +195,7 @@
     <string name="accessibility_review_ok" msgid="3486465319880320270">"បាន​ពិនិត្យ​រួចរាល់"</string>
     <string name="accessibility_review_retake" msgid="2547112860787022130">"ពិនិត្យ​ឡើងវិញ"</string>
     <string name="accessibility_mode_options" msgid="6376831760155403217">"ជម្រើស"</string>
-    <string name="accessibility_mode_list_hidden" msgid="3743267380450401866">"បាន​​បិទ​បញ្ជី​​របៀប"</string>
+    <string name="accessibility_mode_list_hidden" msgid="3743267380450401866">"បាន​​បិទ​បញ្ជី​​របៀប​"</string>
     <string name="accessibility_mode_list_shown" msgid="5284322142734069179">"បាន​បើក​បញ្ជី​របៀប"</string>
     <string name="media_accessibility_peek" msgid="234540330161031946">"ចាប់​យក"</string>
     <string name="photo_accessibility_peek" msgid="5934133371001677250">"បាន​ថតរូប"</string>
@@ -255,9 +255,9 @@
     <string name="pref_video_time_lapse_frame_interval_43200000" msgid="6426833030111269499">"១២ ម៉ោង"</string>
     <string name="pref_video_time_lapse_frame_interval_54000000" msgid="6688725497680331090">"១៥ ម៉ោង"</string>
     <string name="pref_video_time_lapse_frame_interval_86400000" msgid="5510320806095156153">"២៤ ម៉ោង"</string>
-    <string name="time_lapse_seconds" msgid="7319683099532506270">"វិនាទី"</string>
-    <string name="time_lapse_minutes" msgid="5325447383033224679">"នាទី"</string>
-    <string name="time_lapse_hours" msgid="5294001144133261436">"ម៉ោង"</string>
+    <string name="time_lapse_seconds" msgid="7319683099532506270">"វិនាទី​"</string>
+    <string name="time_lapse_minutes" msgid="5325447383033224679">"នាទី​"</string>
+    <string name="time_lapse_hours" msgid="5294001144133261436">"ម៉ោង​"</string>
     <string name="time_lapse_interval_set" msgid="2418594453248958440">"រួចរាល់"</string>
     <string name="set_time_interval" msgid="2531393962847535331">"កំណត់​ចន្លោះ​ពេល"</string>
     <string name="set_time_interval_help" msgid="64145154088021389">"បាន​បិទ​​លក្ខណៈ​ពេលវេលា​កន្លង​ទៅ។ បើក​វា​ ដើម្បី​កំណត់​ចន្លោះ​ពេល។"</string>
@@ -280,7 +280,7 @@
     <string name="setting_wallpaper" msgid="2397759659347872725">"​កំណត់​ផ្ទាំង​រូបភាព"</string>
     <string name="mode_settings" msgid="2021937261522670921">"ការ​កំណត់"</string>
     <string name="mode_camera" msgid="279763925715250603">"ម៉ាស៊ីន​ថត"</string>
-    <string name="mode_video" msgid="8633929034048169139">"វីដេអូ"</string>
+    <string name="mode_video" msgid="8633929034048169139">"វីដេអូ​"</string>
     <string name="mode_photosphere" msgid="5082338476237291833">"រូបថត​វិល​ជុំ"</string>
     <string name="mode_timelapse" msgid="1517168724627815453">"ពេលវេលា​កន្លងទៅ"</string>
     <string name="mode_wideangle" msgid="1099640345041696830">"មុំ​ពេញ"</string>
@@ -300,7 +300,7 @@
     <string name="flash_off_desc" msgid="7151559466286106797">"បិទ​ការ​បាញ់​ពន្លឺ"</string>
     <string name="flash_auto_desc" msgid="3009043125539675717">"បាញ់​ពន្លឺ​ស្វ័យ​ប្រវត្តិ"</string>
     <string name="flash_on_desc" msgid="930372145324854699">"បើក​ការ​បាញ់​ពន្លឺ"</string>
-    <string name="hdr_plus_flash_off_desc" msgid="5335888906983788789">"HDR+ បិទ​ពន្លឺ"</string>
+    <string name="hdr_plus_flash_off_desc" msgid="5335888906983788789">"HDR+ បិទ​ពន្លឺ​"</string>
     <string name="hdr_plus_flash_auto_desc" msgid="4812200236263011537">"HDR+ បាញ់​ពន្លឺ​ស្វ័យប្រវត្តិ"</string>
     <string name="hdr_plus_flash_on_desc" msgid="8323389161987561284">"HDR+ បើក​ពន្លឺ"</string>
     <string name="torch_on_desc" msgid="3069836196559213365">"បើក​ពិល"</string>
@@ -316,7 +316,7 @@
     <string name="cancel_button_description" msgid="3801167024006905033">"បោះបង់"</string>
     <string name="done_button_description" msgid="1334963435441544592">"រួចរាល់"</string>
     <string name="retake_button_description" msgid="4234613030674787714">"ថត​ឡើងវិញ"</string>
-    <string name="share_button_description" msgid="5108508790540832053">"ចែករំលែក"</string>
+    <string name="share_button_description" msgid="5108508790540832053">"ចែករំលែក​"</string>
     <string name="view_button_description" msgid="4985768837891362075">"មើល"</string>
     <string name="edit_button_description" msgid="98550816724901925">"កែសម្រួល"</string>
     <string name="delete_button_description" msgid="2251065309677200911">"លុប"</string>
diff --git a/src/com/android/camera/app/CameraApp.java b/src/com/android/camera/app/CameraApp.java
index 4bcf83c..6c35c53 100644
--- a/src/com/android/camera/app/CameraApp.java
+++ b/src/com/android/camera/app/CameraApp.java
@@ -19,6 +19,7 @@
 import android.app.Application;
 import android.app.NotificationManager;
 import android.content.Context;
+import android.os.Debug;
 
 import com.android.camera.MediaSaverImpl;
 import com.android.camera.debug.LogHelper;
@@ -40,6 +41,15 @@
  * to be used across modules.
  */
 public class CameraApp extends Application implements CameraServices {
+    /**
+     * This is for debugging only: If set to true, application will not start
+     * until a debugger is attached.
+     * <p>
+     * Use this if you need to debug code that is executed while the app starts
+     * up and it would be too late to attach a debugger afterwards.
+     */
+    private static final boolean WAIT_FOR_DEBUGGER_ON_START = false;
+
     private MediaSaver mMediaSaver;
     private CaptureSessionManager mSessionManager;
     private SessionStorageManager mSessionStorageManager;
@@ -53,11 +63,15 @@
     public void onCreate() {
         super.onCreate();
 
+        if (WAIT_FOR_DEBUGGER_ON_START) {
+            Debug.waitForDebugger();
+        }
+
         Context context = getApplicationContext();
         LogHelper.initialize(context);
 
-        // It is important that this gets called early in execution before the app has had
-        // the opportunity to create any shared preferences.
+        // It is important that this gets called early in execution before the
+        // app has had the opportunity to create any shared preferences.
         UsageStatistics.instance().initialize(this);
         SessionStatsCollector.instance().initialize(this);
         CameraUtil.initialize(this);
diff --git a/src/com/android/camera/one/AbstractOneCamera.java b/src/com/android/camera/one/AbstractOneCamera.java
index c266f59..d21e9f4 100644
--- a/src/com/android/camera/one/AbstractOneCamera.java
+++ b/src/com/android/camera/one/AbstractOneCamera.java
@@ -16,6 +16,8 @@
 
 package com.android.camera.one;
 
+import java.io.File;
+
 /**
  * A common abstract {@link OneCamera} implementation that contains some utility
  * functions and plumbing we don't want every sub-class of {@link OneCamera} to
@@ -35,4 +37,31 @@
     public final void setFocusStateListener(FocusStateListener listener) {
         mFocusStateListener = listener;
     }
+
+    /**
+     * Create a directory we can use to store debugging information during Gcam
+     * captures.
+     *
+     * @param root the root into which we put a session-specific sub-directory.
+     * @param folderName the sub-folder within 'root' where the data should be
+     *            put.
+     * @return The session-specific directory (absolute path) into which to
+     *         store debug information.
+     */
+    protected static String makeDebugDir(File root, String folderName) {
+        if (root == null) {
+            return null;
+        }
+        if (!root.exists() || !root.isDirectory()) {
+            throw new RuntimeException("Gcam debug directory not valid or doesn't exist: "
+                    + root.getAbsolutePath());
+        }
+        File destFolder = (new File(new File(root, folderName),
+                String.valueOf(System.currentTimeMillis())));
+        if (!destFolder.mkdirs()) {
+            throw new RuntimeException("Could not create Gcam debug data folder.");
+        }
+        String destFolderPath = destFolder.getAbsolutePath();
+        return destFolderPath;
+    }
 }
diff --git a/src/com/android/camera/one/v2/OneCameraImpl.java b/src/com/android/camera/one/v2/OneCameraImpl.java
index 1d6753f..f353c5f 100644
--- a/src/com/android/camera/one/v2/OneCameraImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraImpl.java
@@ -50,10 +50,12 @@
 import com.android.camera.one.OneCamera.PhotoCaptureParameters.Flash;
 import com.android.camera.session.CaptureSession;
 import com.android.camera.util.CameraUtil;
+import com.android.camera.util.CaptureDataSerializer;
 import com.android.camera.util.JpegUtilNative;
 import com.android.camera.util.Size;
 import com.android.camera.util.SystemProperties;
 
+import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -79,6 +81,13 @@
 
     private static final Tag TAG = new Tag("OneCameraImpl2");
 
+    /**
+     * If set to true, will write data about each capture request to disk.
+     * <p>
+     * TODO: Port to a setprop.
+     */
+    private static final boolean DEBUG_WRITE_CAPTURE_DATA = false;
+
     /** System Properties switch to enable additional focus logging. */
     private static final String PROP_FOCUS_DEBUG_KEY = "persist.camera.focus_debug_log";
     private static final String PROP_FOCUS_DEBUG_OFF = "0";
@@ -90,8 +99,8 @@
 
     /**
      * Set to ImageFormat.JPEG, to use the hardware encoder, or
-     * ImageFormat.YUV_420_888 to use the software encoder.
-     * No other image formats are supported.
+     * ImageFormat.YUV_420_888 to use the software encoder. No other image
+     * formats are supported.
      */
     private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
 
@@ -131,28 +140,29 @@
     /** Last time takePicture() was called in uptimeMillis. */
     private long mTakePictureStartMillis;
     /** Runnable that returns to CONTROL_AF_MODE = AF_CONTINUOUS_PICTURE. */
-    private Runnable mReturnToContinuousAFRunnable = new Runnable() {
+    private final Runnable mReturnToContinuousAFRunnable = new Runnable() {
         @Override
         public void run() {
             repeatingPreviewWithReadyListener(null);
         }
     };
 
-    /** Current zoom value.  1.0 is no zoom. */
-    private float mZoomValue = 1f;
+    /** Current zoom value. 1.0 is no zoom. */
+    private final float mZoomValue = 1f;
     /** If partial results was OK, don't need to process total result. */
     private boolean mAutoFocusStateListenerPartialOK = false;
 
     /**
      * Common listener for preview frame metadata.
      */
-    private CameraCaptureSession.CaptureListener mAutoFocusStateListener = new
+    private final CameraCaptureSession.CaptureListener mAutoFocusStateListener = new
             CameraCaptureSession.CaptureListener() {
                 // AF state information is sometimes available 1 frame before
                 // onCaptureCompleted(), so we take advantage of that.
                 @Override
-                public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
-                                                CaptureResult partialResult) {
+                public void onCaptureProgressed(CameraCaptureSession session,
+                        CaptureRequest request,
+                        CaptureResult partialResult) {
 
                     if (partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
                         mAutoFocusStateListenerPartialOK = true;
@@ -165,9 +175,11 @@
                     }
                     super.onCaptureProgressed(session, request, partialResult);
                 }
+
                 @Override
-                public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
-                                               TotalCaptureResult result) {
+                public void onCaptureCompleted(CameraCaptureSession session,
+                        CaptureRequest request,
+                        TotalCaptureResult result) {
                     if (!mAutoFocusStateListenerPartialOK) {
                         autofocusStateChangeDispatcher(result);
                     }
@@ -211,7 +223,8 @@
                 public void onImageAvailable(ImageReader reader) {
                     InFlightCapture capture = mCaptureQueue.remove();
 
-                    // Since this is not an HDR+ session, we will just save the result.
+                    // Since this is not an HDR+ session, we will just save the
+                    // result.
                     capture.session.startEmpty();
                     byte[] imageBytes = acquireJpegBytesAndClose(reader);
                     // TODO: The savePicture call here seems to block UI thread.
@@ -236,7 +249,8 @@
         mCameraThread.start();
         mCameraHandler = new Handler(mCameraThread.getLooper());
 
-        mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
+        mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
+                pictureSize.getHeight(),
                 sCaptureImageFormat, 2);
         mCaptureImageReader.setOnImageAvailableListener(mCaptureImageListener, mCameraHandler);
         Log.d(TAG, "New Camera2 based OneCameraImpl created.");
@@ -276,7 +290,7 @@
     }
 
     /**
-     * Take picture immediately.  Parameters passed through from takePicture().
+     * Take picture immediately. Parameters passed through from takePicture().
      */
     public void takePictureNow(PhotoCaptureParameters params, CaptureSession session) {
         long dt = SystemClock.uptimeMillis() - mTakePictureStartMillis;
@@ -310,6 +324,15 @@
             builder.addTarget(mCaptureImageReader.getSurface());
             applyFlashMode(params.flashMode, builder);
             CaptureRequest request = builder.build();
+
+            if (DEBUG_WRITE_CAPTURE_DATA) {
+                final String debugDataDir = makeDebugDir(params.debugDataFolder,
+                        "normal_capture_debug");
+                Log.i(TAG, "Writing capture data to: " + debugDataDir);
+                CaptureDataSerializer.toFile("Normal Capture", request, new File(debugDataDir,
+                        "capture.txt"));
+            }
+
             mCaptureSession.capture(request, mAutoFocusStateListener, mCameraHandler);
         } catch (CameraAccessException e) {
             Log.e(TAG, "Could not access camera for still image capture.");
@@ -487,7 +510,7 @@
      * Request preview capture stream with AF_MODE_CONTINUOUS_PICTURE.
      *
      * @param readyListener called when request was build and sent, or if
-     *                      setting up the request failed.
+     *            setting up the request failed.
      */
     private void repeatingPreviewWithReadyListener(CaptureReadyCallback readyListener) {
         try {
@@ -520,7 +543,7 @@
      * @param meteringRegions metering regions, for tap to focus/expose.
      */
     private void repeatingPreviewWithAFTrigger(MeteringRectangle[] focusRegions,
-                                               MeteringRectangle[] meteringRegions, Object tag) {
+            MeteringRectangle[] meteringRegions, Object tag) {
         try {
             CaptureRequest.Builder builder;
             builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -645,7 +668,9 @@
     @Override
     public void triggerFocusAndMeterAtPoint(float nx, float ny) {
         Log.v(TAG, "triggerFocusAndMeterAtPoint(" + nx + "," + ny + ")");
-        float points[] = new float[]{nx, ny};
+        float points[] = new float[] {
+                nx, ny
+        };
         // Make sure the points are in [0,1] range.
         points[0] = CameraUtil.clamp(points[0], 0f, 1f);
         points[1] = CameraUtil.clamp(points[1], 0f, 1f);
@@ -657,10 +682,12 @@
             zoomMatrix.mapPoints(points);
         }
 
-        // TODO: Make this work when preview aspect ratio != sensor aspect ratio.
+        // TODO: Make this work when preview aspect ratio != sensor aspect
+        // ratio.
         Rect sensor = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
         int edge = (int) (METERING_REGION_EDGE * Math.max(sensor.width(), sensor.height()));
-        // x0 and y0 in sensor coordinate system, rotated 90 degrees from portrait.
+        // x0 and y0 in sensor coordinate system, rotated 90 degrees from
+        // portrait.
         int x0 = (int) (sensor.width() * points[1]);
         int y0 = (int) (sensor.height() * (1f - points[0]));
         int x1 = x0 + edge;
@@ -675,8 +702,9 @@
                 + METERING_REGION_WEIGHT * MeteringRectangle.METERING_WEIGHT_MAX);
 
         Log.v(TAG, "sensor 3A @ x0=" + x0 + " y0=" + y0 + " dx=" + (x1 - x0) + " dy=" + (y1 - y0));
-        MeteringRectangle[] regions = new MeteringRectangle[]{
-                new MeteringRectangle(x0, y0, x1 - x0, y1 - y0, wt)};
+        MeteringRectangle[] regions = new MeteringRectangle[] {
+                new MeteringRectangle(x0, y0, x1 - x0, y1 - y0, wt)
+        };
         repeatingPreviewWithAFTrigger(regions, regions, null);
     }
 
@@ -801,7 +829,8 @@
         Object tag = result.getRequest().getTag();
         // Nexus 5 has a bug where CONTROL_AF_STATE is missing sometimes.
         if (result.get(CaptureResult.CONTROL_AF_STATE) == null) {
-            //throw new IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
+            // throw new
+            // IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
             Log.e(TAG, "\n!!!! TotalCaptureResult missing CONTROL_AF_STATE. !!!!\n ");
             return;
         }
diff --git a/src/com/android/camera/util/CaptureDataSerializer.java b/src/com/android/camera/util/CaptureDataSerializer.java
new file mode 100644
index 0000000..40fcfe9
--- /dev/null
+++ b/src/com/android/camera/util/CaptureDataSerializer.java
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.util;
+
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.ColorSpaceTransform;
+import android.hardware.camera2.params.RggbChannelVector;
+import android.hardware.camera2.params.TonemapCurve;
+import android.util.Rational;
+
+import com.android.camera.debug.Log;
+import com.android.camera.debug.Log.Tag;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.lang.reflect.Array;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Can be used for debugging to output details about Camera2 capture request and
+ * responses.
+ */
+public class CaptureDataSerializer {
+    private static interface Writeable {
+        public void write(Writer writer) throws IOException;
+    }
+
+    private static final Tag TAG = new Tag("CaptureDataSerializer");
+
+    /**
+     * Generate a human-readable string of the given capture request and return
+     * it.
+     */
+    public static String toString(String title, CaptureRequest metadata) {
+        StringWriter writer = new StringWriter();
+        dumpMetadata(title, metadata, writer);
+        return writer.toString();
+    }
+
+    /**
+     * Generate a human-readable string of the given capture request and write
+     * it to the given file.
+     */
+    public static void toFile(String title, CameraMetadata<?> metadata, File file) {
+        try {
+            // Will append if the file already exists.
+            FileWriter writer = new FileWriter(file, true);
+            if (metadata instanceof CaptureRequest) {
+                dumpMetadata(title, (CaptureRequest) metadata, writer);
+            } else if (metadata instanceof CaptureResult) {
+                dumpMetadata(title, (CaptureResult) metadata, writer);
+            } else {
+                writer.close();
+                throw new IllegalArgumentException("Cannot generate debug data from type "
+                        + metadata.getClass().getName());
+            }
+            writer.close();
+        } catch (IOException ex) {
+            Log.e(TAG, "Could not write capture data to file.", ex);
+        }
+    }
+
+    /**
+     * Writes the data about the marker and requests to the given folder for
+     * offline debugging.
+     */
+    private static void dumpMetadata(final String title, final CaptureRequest metadata,
+            Writer writer) {
+        Writeable writeable = new Writeable() {
+            @Override
+            public void write(Writer writer) throws IOException {
+                List<CaptureRequest.Key<?>> keys = metadata.getKeys();
+                writer.write(title + '\n');
+
+                // TODO: move to CameraMetadata#toString ?
+                for (CaptureRequest.Key<?> key : keys) {
+                    writer.write(String.format("    %s\n", key.getName()));
+                    writer.write(String.format("        %s\n",
+                            metadataValueToString(metadata.get(key))));
+                }
+            }
+        };
+        dumpMetadata(writeable, new BufferedWriter(writer));
+    }
+
+    /**
+     * Writes the data about the marker and requests to the given folder for
+     * offline debugging.
+     */
+    private static void dumpMetadata(final String title, final CaptureResult metadata,
+            Writer writer) {
+        Writeable writeable = new Writeable() {
+            @Override
+            public void write(Writer writer) throws IOException {
+                List<CaptureResult.Key<?>> keys = metadata.getKeys();
+                writer.write(String.format(title));
+
+                // TODO: move to CameraMetadata#toString ?
+                for (CaptureResult.Key<?> key : keys) {
+                    writer.write(String.format("    %s\n", key.getName()));
+                    writer.write(String.format("        %s\n",
+                            metadataValueToString(metadata.get(key))));
+                }
+            }
+        };
+        dumpMetadata(writeable, new BufferedWriter(writer));
+    }
+
+    private static String metadataValueToString(Object object) {
+        if (object == null) {
+            return "<null>";
+        }
+        if (object.getClass().isArray()) {
+            StringBuilder builder = new StringBuilder();
+            builder.append("[");
+
+            int length = Array.getLength(object);
+            for (int i = 0; i < length; ++i) {
+                Object item = Array.get(object, i);
+                builder.append(metadataValueToString(item));
+
+                if (i != length - 1) {
+                    builder.append(", ");
+                }
+            }
+            builder.append(']');
+
+            return builder.toString();
+        } else {
+            // These classes don't have a toString() method yet
+            // See: http://b/16899576
+            if (object instanceof RggbChannelVector) {
+                return toString((RggbChannelVector) object);
+            } else if (object instanceof ColorSpaceTransform) {
+                return toString((ColorSpaceTransform) object);
+            } else if (object instanceof TonemapCurve) {
+                return toString((TonemapCurve) object);
+            }
+            return object.toString();
+        }
+    }
+
+    private static void dumpMetadata(Writeable metadata, Writer writer) {
+        /**
+         * Save metadata to file, appending if another metadata is already in
+         * that file.
+         */
+        try {
+            metadata.write(writer);
+        } catch (IOException e) {
+            Log.e(TAG, "dumpMetadata - Failed to dump metadata", e);
+        } finally {
+            try {
+                if (writer != null) {
+                    writer.close();
+                }
+            } catch (IOException e) {
+                Log.e(TAG, "dumpMetadata - Failed to close writer.", e);
+            }
+        }
+    }
+
+    private static String toString(RggbChannelVector vector) {
+        StringBuilder str = new StringBuilder();
+        str.append("RggbChannelVector:");
+        str.append(" R:");
+        str.append(vector.getRed());
+        str.append(" G(even):");
+        str.append(vector.getGreenEven());
+        str.append(" G(odd):");
+        str.append(vector.getGreenOdd());
+        str.append(" B:");
+        str.append(vector.getBlue());
+
+        return str.toString();
+    }
+
+    private static String toString(ColorSpaceTransform transform) {
+        StringBuilder str = new StringBuilder();
+        Rational[] rationals = new Rational[9];
+        transform.copyElements(rationals, 0);
+        str.append("ColorSpaceTransform: ");
+        str.append(Arrays.toString(rationals));
+        return str.toString();
+    }
+
+    private static String toString(TonemapCurve curve) {
+        StringBuilder str = new StringBuilder();
+        str.append("TonemapCurve:");
+
+        float[] reds = new float[curve.getPointCount(TonemapCurve.CHANNEL_RED)
+                * TonemapCurve.POINT_SIZE];
+        curve.copyColorCurve(TonemapCurve.CHANNEL_RED, reds, 0);
+        float[] greens = new float[curve.getPointCount(TonemapCurve.CHANNEL_GREEN)
+                * TonemapCurve.POINT_SIZE];
+        curve.copyColorCurve(TonemapCurve.CHANNEL_GREEN, greens, 0);
+        float[] blues = new float[curve.getPointCount(TonemapCurve.CHANNEL_BLUE)
+                * TonemapCurve.POINT_SIZE];
+        curve.copyColorCurve(TonemapCurve.CHANNEL_BLUE, blues, 0);
+
+        str.append("\n\nReds: ");
+        str.append(Arrays.toString(reds));
+        str.append("\n\nGreens: ");
+        str.append(Arrays.toString(greens));
+        str.append("\n\nBlues: ");
+        str.append(Arrays.toString(blues));
+
+        return str.toString();
+    }
+}