Move code from GoogleCamera that dumps capture request metadata.
Bug: 16945820
Change-Id: Iff3f05b65687073a7272ed31644f34dc6187de21
diff --git a/src/com/android/camera/app/CameraApp.java b/src/com/android/camera/app/CameraApp.java
index 4bcf83c..6c35c53 100644
--- a/src/com/android/camera/app/CameraApp.java
+++ b/src/com/android/camera/app/CameraApp.java
@@ -19,6 +19,7 @@
import android.app.Application;
import android.app.NotificationManager;
import android.content.Context;
+import android.os.Debug;
import com.android.camera.MediaSaverImpl;
import com.android.camera.debug.LogHelper;
@@ -40,6 +41,15 @@
* to be used across modules.
*/
public class CameraApp extends Application implements CameraServices {
+ /**
+ * This is for debugging only: If set to true, application will not start
+ * until a debugger is attached.
+ * <p>
+ * Use this if you need to debug code that is executed while the app starts
+ * up and it would be too late to attach a debugger afterwards.
+ */
+ private static final boolean WAIT_FOR_DEBUGGER_ON_START = false;
+
private MediaSaver mMediaSaver;
private CaptureSessionManager mSessionManager;
private SessionStorageManager mSessionStorageManager;
@@ -53,11 +63,15 @@
public void onCreate() {
super.onCreate();
+ if (WAIT_FOR_DEBUGGER_ON_START) {
+ Debug.waitForDebugger();
+ }
+
Context context = getApplicationContext();
LogHelper.initialize(context);
- // It is important that this gets called early in execution before the app has had
- // the opportunity to create any shared preferences.
+ // It is important that this gets called early in execution before the
+ // app has had the opportunity to create any shared preferences.
UsageStatistics.instance().initialize(this);
SessionStatsCollector.instance().initialize(this);
CameraUtil.initialize(this);
diff --git a/src/com/android/camera/one/AbstractOneCamera.java b/src/com/android/camera/one/AbstractOneCamera.java
index c266f59..d21e9f4 100644
--- a/src/com/android/camera/one/AbstractOneCamera.java
+++ b/src/com/android/camera/one/AbstractOneCamera.java
@@ -16,6 +16,8 @@
package com.android.camera.one;
+import java.io.File;
+
/**
* A common abstract {@link OneCamera} implementation that contains some utility
* functions and plumbing we don't want every sub-class of {@link OneCamera} to
@@ -35,4 +37,31 @@
public final void setFocusStateListener(FocusStateListener listener) {
mFocusStateListener = listener;
}
+
+ /**
+ * Create a directory we can use to store debugging information during Gcam
+ * captures.
+ *
+ * @param root the root into which we put a session-specific sub-directory.
+ * @param folderName the sub-folder within 'root' where the data should be
+ * put.
+ * @return The session-specific directory (absolute path) into which to
+ * store debug information.
+ */
+ protected static String makeDebugDir(File root, String folderName) {
+ if (root == null) {
+ return null;
+ }
+ if (!root.exists() || !root.isDirectory()) {
+ throw new RuntimeException("Gcam debug directory not valid or doesn't exist: "
+ + root.getAbsolutePath());
+ }
+ File destFolder = (new File(new File(root, folderName),
+ String.valueOf(System.currentTimeMillis())));
+ if (!destFolder.mkdirs()) {
+ throw new RuntimeException("Could not create Gcam debug data folder.");
+ }
+ String destFolderPath = destFolder.getAbsolutePath();
+ return destFolderPath;
+ }
}
diff --git a/src/com/android/camera/one/v2/OneCameraImpl.java b/src/com/android/camera/one/v2/OneCameraImpl.java
index 1d6753f..f353c5f 100644
--- a/src/com/android/camera/one/v2/OneCameraImpl.java
+++ b/src/com/android/camera/one/v2/OneCameraImpl.java
@@ -50,10 +50,12 @@
import com.android.camera.one.OneCamera.PhotoCaptureParameters.Flash;
import com.android.camera.session.CaptureSession;
import com.android.camera.util.CameraUtil;
+import com.android.camera.util.CaptureDataSerializer;
import com.android.camera.util.JpegUtilNative;
import com.android.camera.util.Size;
import com.android.camera.util.SystemProperties;
+import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
@@ -79,6 +81,13 @@
private static final Tag TAG = new Tag("OneCameraImpl2");
+ /**
+ * If set to true, will write data about each capture request to disk.
+ * <p>
+ * TODO: Port to a setprop.
+ */
+ private static final boolean DEBUG_WRITE_CAPTURE_DATA = false;
+
/** System Properties switch to enable additional focus logging. */
private static final String PROP_FOCUS_DEBUG_KEY = "persist.camera.focus_debug_log";
private static final String PROP_FOCUS_DEBUG_OFF = "0";
@@ -90,8 +99,8 @@
/**
* Set to ImageFormat.JPEG, to use the hardware encoder, or
- * ImageFormat.YUV_420_888 to use the software encoder.
- * No other image formats are supported.
+ * ImageFormat.YUV_420_888 to use the software encoder. No other image
+ * formats are supported.
*/
private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
@@ -131,28 +140,29 @@
/** Last time takePicture() was called in uptimeMillis. */
private long mTakePictureStartMillis;
/** Runnable that returns to CONTROL_AF_MODE = AF_CONTINUOUS_PICTURE. */
- private Runnable mReturnToContinuousAFRunnable = new Runnable() {
+ private final Runnable mReturnToContinuousAFRunnable = new Runnable() {
@Override
public void run() {
repeatingPreviewWithReadyListener(null);
}
};
- /** Current zoom value. 1.0 is no zoom. */
- private float mZoomValue = 1f;
+ /** Current zoom value. 1.0 is no zoom. */
+ private final float mZoomValue = 1f;
/** If partial results was OK, don't need to process total result. */
private boolean mAutoFocusStateListenerPartialOK = false;
/**
* Common listener for preview frame metadata.
*/
- private CameraCaptureSession.CaptureListener mAutoFocusStateListener = new
+ private final CameraCaptureSession.CaptureListener mAutoFocusStateListener = new
CameraCaptureSession.CaptureListener() {
// AF state information is sometimes available 1 frame before
// onCaptureCompleted(), so we take advantage of that.
@Override
- public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
- CaptureResult partialResult) {
+ public void onCaptureProgressed(CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureResult partialResult) {
if (partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
mAutoFocusStateListenerPartialOK = true;
@@ -165,9 +175,11 @@
}
super.onCaptureProgressed(session, request, partialResult);
}
+
@Override
- public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
- TotalCaptureResult result) {
+ public void onCaptureCompleted(CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
if (!mAutoFocusStateListenerPartialOK) {
autofocusStateChangeDispatcher(result);
}
@@ -211,7 +223,8 @@
public void onImageAvailable(ImageReader reader) {
InFlightCapture capture = mCaptureQueue.remove();
- // Since this is not an HDR+ session, we will just save the result.
+ // Since this is not an HDR+ session, we will just save the
+ // result.
capture.session.startEmpty();
byte[] imageBytes = acquireJpegBytesAndClose(reader);
// TODO: The savePicture call here seems to block UI thread.
@@ -236,7 +249,8 @@
mCameraThread.start();
mCameraHandler = new Handler(mCameraThread.getLooper());
- mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
+ mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
+ pictureSize.getHeight(),
sCaptureImageFormat, 2);
mCaptureImageReader.setOnImageAvailableListener(mCaptureImageListener, mCameraHandler);
Log.d(TAG, "New Camera2 based OneCameraImpl created.");
@@ -276,7 +290,7 @@
}
/**
- * Take picture immediately. Parameters passed through from takePicture().
+ * Take picture immediately. Parameters passed through from takePicture().
*/
public void takePictureNow(PhotoCaptureParameters params, CaptureSession session) {
long dt = SystemClock.uptimeMillis() - mTakePictureStartMillis;
@@ -310,6 +324,15 @@
builder.addTarget(mCaptureImageReader.getSurface());
applyFlashMode(params.flashMode, builder);
CaptureRequest request = builder.build();
+
+ if (DEBUG_WRITE_CAPTURE_DATA) {
+ final String debugDataDir = makeDebugDir(params.debugDataFolder,
+ "normal_capture_debug");
+ Log.i(TAG, "Writing capture data to: " + debugDataDir);
+ CaptureDataSerializer.toFile("Normal Capture", request, new File(debugDataDir,
+ "capture.txt"));
+ }
+
mCaptureSession.capture(request, mAutoFocusStateListener, mCameraHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "Could not access camera for still image capture.");
@@ -487,7 +510,7 @@
* Request preview capture stream with AF_MODE_CONTINUOUS_PICTURE.
*
* @param readyListener called when request was build and sent, or if
- * setting up the request failed.
+ * setting up the request failed.
*/
private void repeatingPreviewWithReadyListener(CaptureReadyCallback readyListener) {
try {
@@ -520,7 +543,7 @@
* @param meteringRegions metering regions, for tap to focus/expose.
*/
private void repeatingPreviewWithAFTrigger(MeteringRectangle[] focusRegions,
- MeteringRectangle[] meteringRegions, Object tag) {
+ MeteringRectangle[] meteringRegions, Object tag) {
try {
CaptureRequest.Builder builder;
builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -645,7 +668,9 @@
@Override
public void triggerFocusAndMeterAtPoint(float nx, float ny) {
Log.v(TAG, "triggerFocusAndMeterAtPoint(" + nx + "," + ny + ")");
- float points[] = new float[]{nx, ny};
+ float points[] = new float[] {
+ nx, ny
+ };
// Make sure the points are in [0,1] range.
points[0] = CameraUtil.clamp(points[0], 0f, 1f);
points[1] = CameraUtil.clamp(points[1], 0f, 1f);
@@ -657,10 +682,12 @@
zoomMatrix.mapPoints(points);
}
- // TODO: Make this work when preview aspect ratio != sensor aspect ratio.
+ // TODO: Make this work when preview aspect ratio != sensor aspect
+ // ratio.
Rect sensor = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
int edge = (int) (METERING_REGION_EDGE * Math.max(sensor.width(), sensor.height()));
- // x0 and y0 in sensor coordinate system, rotated 90 degrees from portrait.
+ // x0 and y0 in sensor coordinate system, rotated 90 degrees from
+ // portrait.
int x0 = (int) (sensor.width() * points[1]);
int y0 = (int) (sensor.height() * (1f - points[0]));
int x1 = x0 + edge;
@@ -675,8 +702,9 @@
+ METERING_REGION_WEIGHT * MeteringRectangle.METERING_WEIGHT_MAX);
Log.v(TAG, "sensor 3A @ x0=" + x0 + " y0=" + y0 + " dx=" + (x1 - x0) + " dy=" + (y1 - y0));
- MeteringRectangle[] regions = new MeteringRectangle[]{
- new MeteringRectangle(x0, y0, x1 - x0, y1 - y0, wt)};
+ MeteringRectangle[] regions = new MeteringRectangle[] {
+ new MeteringRectangle(x0, y0, x1 - x0, y1 - y0, wt)
+ };
repeatingPreviewWithAFTrigger(regions, regions, null);
}
@@ -801,7 +829,8 @@
Object tag = result.getRequest().getTag();
// Nexus 5 has a bug where CONTROL_AF_STATE is missing sometimes.
if (result.get(CaptureResult.CONTROL_AF_STATE) == null) {
- //throw new IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
+ // throw new
+ // IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
Log.e(TAG, "\n!!!! TotalCaptureResult missing CONTROL_AF_STATE. !!!!\n ");
return;
}
diff --git a/src/com/android/camera/util/CaptureDataSerializer.java b/src/com/android/camera/util/CaptureDataSerializer.java
new file mode 100644
index 0000000..40fcfe9
--- /dev/null
+++ b/src/com/android/camera/util/CaptureDataSerializer.java
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.util;
+
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.ColorSpaceTransform;
+import android.hardware.camera2.params.RggbChannelVector;
+import android.hardware.camera2.params.TonemapCurve;
+import android.util.Rational;
+
+import com.android.camera.debug.Log;
+import com.android.camera.debug.Log.Tag;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.lang.reflect.Array;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Can be used for debugging to output details about Camera2 capture request and
+ * responses.
+ */
+public class CaptureDataSerializer {
+ private static interface Writeable {
+ public void write(Writer writer) throws IOException;
+ }
+
+ private static final Tag TAG = new Tag("CaptureDataSerializer");
+
+ /**
+ * Generate a human-readable string of the given capture request and return
+ * it.
+ */
+ public static String toString(String title, CaptureRequest metadata) {
+ StringWriter writer = new StringWriter();
+ dumpMetadata(title, metadata, writer);
+ return writer.toString();
+ }
+
+ /**
+ * Generate a human-readable string of the given capture request and write
+ * it to the given file.
+ */
+ public static void toFile(String title, CameraMetadata<?> metadata, File file) {
+ try {
+ // Will append if the file already exists.
+ FileWriter writer = new FileWriter(file, true);
+ if (metadata instanceof CaptureRequest) {
+ dumpMetadata(title, (CaptureRequest) metadata, writer);
+ } else if (metadata instanceof CaptureResult) {
+ dumpMetadata(title, (CaptureResult) metadata, writer);
+ } else {
+ writer.close();
+ throw new IllegalArgumentException("Cannot generate debug data from type "
+ + metadata.getClass().getName());
+ }
+ writer.close();
+ } catch (IOException ex) {
+ Log.e(TAG, "Could not write capture data to file.", ex);
+ }
+ }
+
+ /**
+ * Writes the data about the marker and requests to the given folder for
+ * offline debugging.
+ */
+ private static void dumpMetadata(final String title, final CaptureRequest metadata,
+ Writer writer) {
+ Writeable writeable = new Writeable() {
+ @Override
+ public void write(Writer writer) throws IOException {
+ List<CaptureRequest.Key<?>> keys = metadata.getKeys();
+ writer.write(title + '\n');
+
+ // TODO: move to CameraMetadata#toString ?
+ for (CaptureRequest.Key<?> key : keys) {
+ writer.write(String.format(" %s\n", key.getName()));
+ writer.write(String.format(" %s\n",
+ metadataValueToString(metadata.get(key))));
+ }
+ }
+ };
+ dumpMetadata(writeable, new BufferedWriter(writer));
+ }
+
+ /**
+ * Writes the data about the marker and requests to the given folder for
+ * offline debugging.
+ */
+ private static void dumpMetadata(final String title, final CaptureResult metadata,
+ Writer writer) {
+ Writeable writeable = new Writeable() {
+ @Override
+ public void write(Writer writer) throws IOException {
+ List<CaptureResult.Key<?>> keys = metadata.getKeys();
+ writer.write(String.format(title));
+
+ // TODO: move to CameraMetadata#toString ?
+ for (CaptureResult.Key<?> key : keys) {
+ writer.write(String.format(" %s\n", key.getName()));
+ writer.write(String.format(" %s\n",
+ metadataValueToString(metadata.get(key))));
+ }
+ }
+ };
+ dumpMetadata(writeable, new BufferedWriter(writer));
+ }
+
+ private static String metadataValueToString(Object object) {
+ if (object == null) {
+ return "<null>";
+ }
+ if (object.getClass().isArray()) {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[");
+
+ int length = Array.getLength(object);
+ for (int i = 0; i < length; ++i) {
+ Object item = Array.get(object, i);
+ builder.append(metadataValueToString(item));
+
+ if (i != length - 1) {
+ builder.append(", ");
+ }
+ }
+ builder.append(']');
+
+ return builder.toString();
+ } else {
+ // These classes don't have a toString() method yet
+ // See: http://b/16899576
+ if (object instanceof RggbChannelVector) {
+ return toString((RggbChannelVector) object);
+ } else if (object instanceof ColorSpaceTransform) {
+ return toString((ColorSpaceTransform) object);
+ } else if (object instanceof TonemapCurve) {
+ return toString((TonemapCurve) object);
+ }
+ return object.toString();
+ }
+ }
+
+ private static void dumpMetadata(Writeable metadata, Writer writer) {
+ /**
+ * Save metadata to file, appending if another metadata is already in
+ * that file.
+ */
+ try {
+ metadata.write(writer);
+ } catch (IOException e) {
+ Log.e(TAG, "dumpMetadata - Failed to dump metadata", e);
+ } finally {
+ try {
+ if (writer != null) {
+ writer.close();
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "dumpMetadata - Failed to close writer.", e);
+ }
+ }
+ }
+
+ private static String toString(RggbChannelVector vector) {
+ StringBuilder str = new StringBuilder();
+ str.append("RggbChannelVector:");
+ str.append(" R:");
+ str.append(vector.getRed());
+ str.append(" G(even):");
+ str.append(vector.getGreenEven());
+ str.append(" G(odd):");
+ str.append(vector.getGreenOdd());
+ str.append(" B:");
+ str.append(vector.getBlue());
+
+ return str.toString();
+ }
+
+ private static String toString(ColorSpaceTransform transform) {
+ StringBuilder str = new StringBuilder();
+ Rational[] rationals = new Rational[9];
+ transform.copyElements(rationals, 0);
+ str.append("ColorSpaceTransform: ");
+ str.append(Arrays.toString(rationals));
+ return str.toString();
+ }
+
+ private static String toString(TonemapCurve curve) {
+ StringBuilder str = new StringBuilder();
+ str.append("TonemapCurve:");
+
+ float[] reds = new float[curve.getPointCount(TonemapCurve.CHANNEL_RED)
+ * TonemapCurve.POINT_SIZE];
+ curve.copyColorCurve(TonemapCurve.CHANNEL_RED, reds, 0);
+ float[] greens = new float[curve.getPointCount(TonemapCurve.CHANNEL_GREEN)
+ * TonemapCurve.POINT_SIZE];
+ curve.copyColorCurve(TonemapCurve.CHANNEL_GREEN, greens, 0);
+ float[] blues = new float[curve.getPointCount(TonemapCurve.CHANNEL_BLUE)
+ * TonemapCurve.POINT_SIZE];
+ curve.copyColorCurve(TonemapCurve.CHANNEL_BLUE, blues, 0);
+
+ str.append("\n\nReds: ");
+ str.append(Arrays.toString(reds));
+ str.append("\n\nGreens: ");
+ str.append(Arrays.toString(greens));
+ str.append("\n\nBlues: ");
+ str.append(Arrays.toString(blues));
+
+ return str.toString();
+ }
+}