Merge "camera2: Use StreamConfigurationMap for format/size/duration lookup"
diff --git a/api/current.txt b/api/current.txt
index 2358291..b52b2e1 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -11859,16 +11859,8 @@
     field public static final android.hardware.camera2.CameraMetadata.Key REQUEST_MAX_NUM_OUTPUT_STREAMS;
     field public static final android.hardware.camera2.CameraMetadata.Key REQUEST_PARTIAL_RESULT_COUNT;
     field public static final android.hardware.camera2.CameraMetadata.Key REQUEST_PIPELINE_MAX_DEPTH;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_FORMATS;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_JPEG_MIN_DURATIONS;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_JPEG_SIZES;
     field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_PROCESSED_SIZES;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_STALL_DURATIONS;
-    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+    field public static final android.hardware.camera2.CameraMetadata.Key SCALER_STREAM_CONFIGURATION_MAP;
     field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_AVAILABLE_TEST_PATTERN_MODES;
     field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_BASE_GAIN_FACTOR;
     field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_BLACK_LEVEL_PATTERN;
@@ -12078,8 +12070,6 @@
     field public static final int REQUEST_AVAILABLE_CAPABILITIES_DNG = 5; // 0x5
     field public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 2; // 0x2
     field public static final int REQUEST_AVAILABLE_CAPABILITIES_ZSL = 4; // 0x4
-    field public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1; // 0x1
-    field public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0; // 0x0
     field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3; // 0x3
     field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2; // 0x2
     field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1; // 0x1
@@ -12360,6 +12350,22 @@
     method public final int getWidth();
   }
 
+  public final class TonemapCurve {
+    method public void copyColorCurve(int, float[], int);
+    method public android.graphics.PointF getPoint(int, int);
+    method public int getPointCount(int);
+    field public static final int CHANNEL_BLUE = 2; // 0x2
+    field public static final int CHANNEL_GREEN = 1; // 0x1
+    field public static final int CHANNEL_RED = 0; // 0x0
+    field public static final float LEVEL_BLACK = 0.0f;
+    field public static final float LEVEL_WHITE = 1.0f;
+    field public static final int POINT_SIZE = 2; // 0x2
+  }
+
+}
+
+package android.hardware.camera2.params {
+
   public final class StreamConfigurationMap {
     method public final int[] getOutputFormats();
     method public long getOutputMinFrameDuration(int, android.util.Size);
@@ -12373,18 +12379,6 @@
     method public boolean isOutputSupportedFor(android.view.Surface);
   }
 
-  public final class TonemapCurve {
-    method public void copyColorCurve(int, float[], int);
-    method public android.graphics.PointF getPoint(int, int);
-    method public int getPointCount(int);
-    field public static final int CHANNEL_BLUE = 2; // 0x2
-    field public static final int CHANNEL_GREEN = 1; // 0x1
-    field public static final int CHANNEL_RED = 0; // 0x0
-    field public static final float LEVEL_BLACK = 0.0f;
-    field public static final float LEVEL_WHITE = 1.0f;
-    field public static final int POINT_SIZE = 2; // 0x2
-  }
-
 }
 
 package android.hardware.display {
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index 5f2af8cf..b1c1005 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -307,16 +307,14 @@
      * <li>The sizes will be sorted by increasing pixel area (width x height).
      * If several resolutions have the same area, they will be sorted by increasing width.</li>
      * <li>The aspect ratio of the largest thumbnail size will be same as the
-     * aspect ratio of largest JPEG output size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.
+     * aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
      * The largest size is defined as the size that has the largest pixel area
      * in a given size list.</li>
-     * <li>Each output JPEG size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations} will have at least
+     * <li>Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
      * one corresponding size that has the same aspect ratio in availableThumbnailSizes,
      * and vice versa.</li>
      * <li>All non (0, 0) sizes will have non-zero widths and heights.</li>
      * </ul>
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
      */
     public static final Key<android.hardware.camera2.Size[]> JPEG_AVAILABLE_THUMBNAIL_SIZES =
             new Key<android.hardware.camera2.Size[]>("android.jpeg.availableThumbnailSizes", android.hardware.camera2.Size[].class);
@@ -445,8 +443,10 @@
      * working at that point; DO NOT USE without careful
      * consideration of future support.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+     * @deprecated
      * @hide
      */
+    @Deprecated
     public static final Key<Byte> QUIRKS_USE_PARTIAL_RESULT =
             new Key<Byte>("android.quirks.usePartialResult", byte.class);
 
@@ -461,8 +461,8 @@
      * <p>This lists the upper bound of the number of output streams supported by
      * the camera device. Using more streams simultaneously may require more hardware and
      * CPU resources that will consume more power. The image format for a output stream can
-     * be any supported format provided by {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.
-     * The formats defined in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations} can be catergorized
+     * be any supported format provided by android.scaler.availableStreamConfigurations.
+     * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
      * into the 3 stream types as below:</p>
      * <ul>
      * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
@@ -471,8 +471,6 @@
      * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
      * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li>
      * </ul>
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
      */
     public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
             new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
@@ -483,14 +481,12 @@
      * <p>When set to 0, it means no input stream is supported.</p>
      * <p>The image format for a input stream can be any supported
      * format provided by
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP android.scaler.availableInputOutputFormatsMap}. When using an
+     * android.scaler.availableInputOutputFormatsMap. When using an
      * input stream, there must be at least one output stream
      * configured to to receive the reprocessed images.</p>
      * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
      * stream image format will be RAW_OPAQUE, the associated output stream image format
      * should be JPEG.</p>
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
      */
     public static final Key<Integer> REQUEST_MAX_NUM_INPUT_STREAMS =
             new Key<Integer>("android.request.maxNumInputStreams", int.class);
@@ -629,22 +625,26 @@
      * camera device for output streams.</p>
      * <p>All camera devices will support JPEG and YUV_420_888 formats.</p>
      * <p>When set to YUV_420_888, application can access the YUV420 data directly.</p>
+     * @deprecated
+     * @hide
      */
+    @Deprecated
     public static final Key<int[]> SCALER_AVAILABLE_FORMATS =
             new Key<int[]>("android.scaler.availableFormats", int[].class);
 
     /**
      * <p>The minimum frame duration that is supported
-     * for each resolution in {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES android.scaler.availableJpegSizes}.</p>
+     * for each resolution in android.scaler.availableJpegSizes.</p>
      * <p>This corresponds to the minimum steady-state frame duration when only
      * that JPEG stream is active and captured in a burst, with all
      * processing (typically in android.*.mode) set to FAST.</p>
      * <p>When multiple streams are configured, the minimum
      * frame duration will be &gt;= max(individual stream min
      * durations)</p>
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES
+     * @deprecated
+     * @hide
      */
+    @Deprecated
     public static final Key<long[]> SCALER_AVAILABLE_JPEG_MIN_DURATIONS =
             new Key<long[]>("android.scaler.availableJpegMinDurations", long[].class);
 
@@ -654,7 +654,10 @@
      * sensor maximum resolution (defined by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}).</p>
      *
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @deprecated
+     * @hide
      */
+    @Deprecated
     public static final Key<android.hardware.camera2.Size[]> SCALER_AVAILABLE_JPEG_SIZES =
             new Key<android.hardware.camera2.Size[]>("android.scaler.availableJpegSizes", android.hardware.camera2.Size[].class);
 
@@ -669,16 +672,17 @@
 
     /**
      * <p>For each available processed output size (defined in
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES android.scaler.availableProcessedSizes}), this property lists the
+     * android.scaler.availableProcessedSizes), this property lists the
      * minimum supportable frame duration for that size.</p>
      * <p>This should correspond to the frame duration when only that processed
      * stream is active, with all processing (typically in android.*.mode)
      * set to FAST.</p>
      * <p>When multiple streams are configured, the minimum frame duration will
      * be &gt;= max(individual stream min durations).</p>
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES
+     * @deprecated
+     * @hide
      */
+    @Deprecated
     public static final Key<long[]> SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS =
             new Key<long[]>("android.scaler.availableProcessedMinDurations", long[].class);
 
@@ -696,7 +700,10 @@
      * can provide.</p>
      * <p>Please reference the documentation for the image data destination to
      * check if it limits the maximum size for image data.</p>
+     * @deprecated
+     * @hide
      */
+    @Deprecated
     public static final Key<android.hardware.camera2.Size[]> SCALER_AVAILABLE_PROCESSED_SIZES =
             new Key<android.hardware.camera2.Size[]>("android.scaler.availableProcessedSizes", android.hardware.camera2.Size[].class);
 
@@ -746,13 +753,14 @@
      * </table>
      * <p>For ZSL-capable camera devices, using the RAW_OPAQUE format
      * as either input or output will never hurt maximum frame rate (i.e.
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} will not have RAW_OPAQUE).</p>
+     * StreamConfigurationMap#getOutputStallDuration(int,Size)
+     * for a <code>format =</code> RAW_OPAQUE is always 0).</p>
      * <p>Attempting to configure an input stream with output streams not
      * listed as available in this map is not valid.</p>
-     * <p>TODO: Add java type mapping for this property.</p>
+     * <p>TODO: typedef to ReprocessFormatMap</p>
      *
      * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+     * @hide
      */
     public static final Key<int[]> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
             new Key<int[]>("android.scaler.availableInputOutputFormatsMap", int[].class);
@@ -775,7 +783,7 @@
      * check if it limits the maximum size for image data.</p>
      * <p>Not all output formats may be supported in a configuration with
      * an input stream of a particular format. For more details, see
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP android.scaler.availableInputOutputFormatsMap}.</p>
+     * android.scaler.availableInputOutputFormatsMap.</p>
      * <p>The following table describes the minimum required output stream
      * configurations based on the hardware level
      * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
@@ -844,13 +852,11 @@
      *
      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
-     * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
-     * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
-     * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT
+     * @hide
      */
-    public static final Key<int[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
-            new Key<int[]>("android.scaler.availableStreamConfigurations", int[].class);
+    public static final Key<android.hardware.camera2.params.StreamConfiguration[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
+            new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.scaler.availableStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class);
 
     /**
      * <p>This lists the minimum frame duration for each
@@ -863,14 +869,16 @@
      * <p>The minimum frame duration of a stream (of a particular format, size)
      * is the same regardless of whether the stream is input or output.</p>
      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} for more details about
+     * android.scaler.availableStallDurations for more details about
      * calculating the max frame rate.</p>
+     * <p>(Keep in sync with
+     * StreamConfigurationMap#getOutputMinFrameDuration)</p>
      *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
      * @see CaptureRequest#SENSOR_FRAME_DURATION
+     * @hide
      */
-    public static final Key<long[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
-            new Key<long[]>("android.scaler.availableMinFrameDurations", long[].class);
+    public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
+            new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
 
     /**
      * <p>This lists the maximum stall duration for each
@@ -929,12 +937,105 @@
      * for more details.</p>
      * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
      * calculating the max frame rate (absent stalls).</p>
+     * <p>(Keep up to date with
+     * StreamConfigurationMap#getOutputStallDuration(int, Size) )</p>
      *
      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
      * @see CaptureRequest#SENSOR_FRAME_DURATION
+     * @hide
      */
-    public static final Key<long[]> SCALER_AVAILABLE_STALL_DURATIONS =
-            new Key<long[]>("android.scaler.availableStallDurations", long[].class);
+    public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_STALL_DURATIONS =
+            new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+    /**
+     * <p>The available stream configurations that this
+     * camera device supports; also includes the minimum frame durations
+     * and the stall durations for each format/size combination.</p>
+     * <p>All camera devices will support sensor maximum resolution (defined by
+     * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) for the JPEG format.</p>
+     * <p>For a given use case, the actual maximum supported resolution
+     * may be lower than what is listed here, depending on the destination
+     * Surface for the image data. For example, for recording video,
+     * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+     * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+     * can provide.</p>
+     * <p>Please reference the documentation for the image data destination to
+     * check if it limits the maximum size for image data.</p>
+     * <p>Not all output formats may be supported in a configuration with
+     * an input stream of a particular format. For more details, see
+     * android.scaler.availableInputOutputFormatsMap.</p>
+     * <p>The following table describes the minimum required output stream
+     * configurations based on the hardware level
+     * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
+     * <table>
+     * <thead>
+     * <tr>
+     * <th align="center">Format</th>
+     * <th align="center">Size</th>
+     * <th align="center">Hardware Level</th>
+     * <th align="center">Notes</th>
+     * </tr>
+     * </thead>
+     * <tbody>
+     * <tr>
+     * <td align="center">JPEG</td>
+     * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
+     * <td align="center">Any</td>
+     * <td align="center"></td>
+     * </tr>
+     * <tr>
+     * <td align="center">JPEG</td>
+     * <td align="center">1920x1080 (1080p)</td>
+     * <td align="center">Any</td>
+     * <td align="center">if 1080p &lt;= activeArraySize</td>
+     * </tr>
+     * <tr>
+     * <td align="center">JPEG</td>
+     * <td align="center">1280x720 (720)</td>
+     * <td align="center">Any</td>
+     * <td align="center">if 720p &lt;= activeArraySize</td>
+     * </tr>
+     * <tr>
+     * <td align="center">JPEG</td>
+     * <td align="center">640x480 (480p)</td>
+     * <td align="center">Any</td>
+     * <td align="center">if 480p &lt;= activeArraySize</td>
+     * </tr>
+     * <tr>
+     * <td align="center">JPEG</td>
+     * <td align="center">320x240 (240p)</td>
+     * <td align="center">Any</td>
+     * <td align="center">if 240p &lt;= activeArraySize</td>
+     * </tr>
+     * <tr>
+     * <td align="center">YUV_420_888</td>
+     * <td align="center">all output sizes available for JPEG</td>
+     * <td align="center">FULL</td>
+     * <td align="center"></td>
+     * </tr>
+     * <tr>
+     * <td align="center">YUV_420_888</td>
+     * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
+     * <td align="center">LIMITED</td>
+     * <td align="center"></td>
+     * </tr>
+     * <tr>
+     * <td align="center">IMPLEMENTATION_DEFINED</td>
+     * <td align="center">same as YUV_420_888</td>
+     * <td align="center">Any</td>
+     * <td align="center"></td>
+     * </tr>
+     * </tbody>
+     * </table>
+     * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
+     * mandatory stream configurations on a per-capability basis.</p>
+     *
+     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     */
+    public static final Key<android.hardware.camera2.params.StreamConfigurationMap> SCALER_STREAM_CONFIGURATION_MAP =
+            new Key<android.hardware.camera2.params.StreamConfigurationMap>("android.scaler.streamConfigurationMap", android.hardware.camera2.params.StreamConfigurationMap.class);
 
     /**
      * <p>Area of raw data which corresponds to only
@@ -982,13 +1083,9 @@
      * being clipped to the maximum. See that control
      * for a full definition of frame durations.</p>
      * <p>Refer to
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS android.scaler.availableProcessedMinDurations},
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS android.scaler.availableJpegMinDurations}, and
-     * android.scaler.availableRawMinDurations for the minimum
-     * frame duration values.</p>
+     * StreamConfigurationMap#getOutputMinFrameDuration(int,Size)
+     * for the minimum frame duration values.</p>
      *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS
-     * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS
      * @see CaptureRequest#SENSOR_FRAME_DURATION
      */
     public static final Key<Long> SENSOR_INFO_MAX_FRAME_DURATION =
@@ -1007,9 +1104,7 @@
      * including black calibration pixels.</p>
      * <p>Maximum output resolution for raw format must
      * match this in
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.</p>
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+     * android.scaler.availableStreamConfigurations.</p>
      */
     public static final Key<android.hardware.camera2.Size> SENSOR_INFO_PIXEL_ARRAY_SIZE =
             new Key<android.hardware.camera2.Size>("android.sensor.info.pixelArraySize", android.hardware.camera2.Size.class);
@@ -1420,4 +1515,13 @@
     /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
      * End generated code
      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+
+
+
+
+
+
 }
diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java
index 9d0e0e1..ca03dae 100644
--- a/core/java/android/hardware/camera2/CameraDevice.java
+++ b/core/java/android/hardware/camera2/CameraDevice.java
@@ -16,6 +16,8 @@
 
 package android.hardware.camera2;
 
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.graphics.ImageFormat;
 import android.os.Handler;
 import android.view.Surface;
 
@@ -147,7 +149,7 @@
      *   the size of the Surface with
      *   {@link android.view.SurfaceHolder#setFixedSize} to be one of the
      *   supported
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}
+     *   {@link StreamConfigurationMap#getOutputSizes(Class) processed sizes}
      *   before calling {@link android.view.SurfaceHolder#getSurface}.</li>
      *
      * <li>For accessing through an OpenGL texture via a
@@ -155,14 +157,14 @@
      *   the SurfaceTexture with
      *   {@link android.graphics.SurfaceTexture#setDefaultBufferSize} to be one
      *   of the supported
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}
+     *   {@link StreamConfigurationMap#getOutputSizes(Class) processed sizes}
      *   before creating a Surface from the SurfaceTexture with
      *   {@link Surface#Surface}.</li>
      *
      * <li>For recording with {@link android.media.MediaCodec}: Call
      *   {@link android.media.MediaCodec#createInputSurface} after configuring
      *   the media codec to use one of the
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}
+     *   {@link StreamConfigurationMap#getOutputSizes(Class) processed sizes}
      *   </li>
      *
      * <li>For recording with {@link android.media.MediaRecorder}: TODO</li>
@@ -171,18 +173,15 @@
      *   Create a RenderScript
      *   {@link android.renderscript.Allocation Allocation} with a supported YUV
      *   type, the IO_INPUT flag, and one of the supported
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}. Then
+     *   {@link StreamConfigurationMap#getOutputSizes(int) processed sizes}. Then
      *   obtain the Surface with
      *   {@link android.renderscript.Allocation#getSurface}.</li>
      *
-     * <li>For access to uncompressed or JPEG data in the application: Create a
-     *   {@link android.media.ImageReader} object with the desired
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_FORMATS image format}, and a
-     *   size from the matching
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed},
-     *   {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES jpeg}. Then obtain
-     *   a Surface from it.</li>
-     *
+     * <li>For access to uncompressed or {@link ImageFormat#JPEG JPEG} data in the application:
+     * Create a {@link android.media.ImageReader} object with the desired
+     * {@link StreamConfigurationMap#getOutputFormats() image format}, and a size from the matching
+     * {@link StreamConfigurationMap#getOutputSizes(int) processed size} and {@code format}.
+     * Then obtain a {@link Surface} from it.</li>
      * </ul>
      *
      * </p>
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index 6659278..a11390d 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -446,20 +446,6 @@
     public static final int REQUEST_AVAILABLE_CAPABILITIES_DNG = 5;
 
     //
-    // Enumeration values for CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
-    //
-
-    /**
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
-     */
-    public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0;
-
-    /**
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
-     */
-    public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1;
-
-    //
     // Enumeration values for CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
     //
 
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index 0ca9161..8ae21f3 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -1185,7 +1185,8 @@
      * cannot process more than 1 capture at a time.</li>
      * </ul>
      * <p>The necessary information for the application, given the model above,
-     * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field.
+     * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field
+     * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size).
      * These are used to determine the maximum frame rate / minimum frame
      * duration that is possible for a given stream configuration.</p>
      * <p>Specifically, the application can use the following rules to
@@ -1195,7 +1196,8 @@
      * <li>Let the set of currently configured input/output streams
      * be called <code>S</code>.</li>
      * <li>Find the minimum frame durations for each stream in <code>S</code>, by
-     * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with
+     * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using
+     * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with
      * its respective size/format). Let this set of frame durations be called
      * <code>F</code>.</li>
      * <li>For any given request <code>R</code>, the minimum frame duration allowed
@@ -1203,7 +1205,8 @@
      * used in <code>R</code> be called <code>S_r</code>.</li>
      * </ol>
      * <p>If none of the streams in <code>S_r</code> have a stall time (listed in
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in
+     * StreamConfigurationMap#getOutputStallDuration(int,Size) using its
+     * respective size/format), then the frame duration in
      * <code>F</code> determines the steady state frame rate that the application will
      * get if it uses <code>R</code> as a repeating request. Let this special kind
      * of request be called <code>Rsimple</code>.</p>
@@ -1214,10 +1217,9 @@
      * if all buffers from the previous <code>Rstall</code> have already been
      * delivered.</p>
      * <p>For more details about stalling, see
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p>
+     * StreamConfigurationMap#getOutputStallDuration(int,Size).</p>
      *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
      */
     public static final Key<Long> SENSOR_FRAME_DURATION =
             new Key<Long>("android.sensor.frameDuration", long.class);
@@ -1516,4 +1518,12 @@
     /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
      * End generated code
      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+
+
+
+
+
 }
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 42a3de8..0160622 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -216,18 +216,6 @@
             new Key<float[]>("android.colorCorrection.gains", float[].class);
 
     /**
-     * <p>The ID sent with the latest
-     * CAMERA2_TRIGGER_PRECAPTURE_METERING call</p>
-     * <p>Must be 0 if no
-     * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
-     * by HAL. Always updated even if AE algorithm ignores the
-     * trigger</p>
-     * @hide
-     */
-    public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID =
-            new Key<Integer>("android.control.aePrecaptureId", int.class);
-
-    /**
      * <p>The desired setting for the camera device's auto-exposure
      * algorithm's antibanding compensation.</p>
      * <p>Some kinds of lighting fixtures, such as some fluorescent
@@ -1068,17 +1056,6 @@
             new Key<Integer>("android.control.afState", int.class);
 
     /**
-     * <p>The ID sent with the latest
-     * CAMERA2_TRIGGER_AUTOFOCUS call</p>
-     * <p>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
-     * received yet by HAL. Always updated even if AF algorithm
-     * ignores the trigger</p>
-     * @hide
-     */
-    public static final Key<Integer> CONTROL_AF_TRIGGER_ID =
-            new Key<Integer>("android.control.afTriggerId", int.class);
-
-    /**
      * <p>Whether AWB is currently locked to its
      * latest calculated values.</p>
      * <p>Note that AWB lock is only meaningful for AUTO
@@ -1713,8 +1690,10 @@
      * capture must arrive before the FINAL buffer for that capture. This entry may
      * only be used by the camera device if quirks.usePartialResult is set to 1.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+     * @deprecated
      * @hide
      */
+    @Deprecated
     public static final Key<Boolean> QUIRKS_PARTIAL_RESULT =
             new Key<Boolean>("android.quirks.partialResult", boolean.class);
 
@@ -1834,7 +1813,8 @@
      * cannot process more than 1 capture at a time.</li>
      * </ul>
      * <p>The necessary information for the application, given the model above,
-     * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field.
+     * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field
+     * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size).
      * These are used to determine the maximum frame rate / minimum frame
      * duration that is possible for a given stream configuration.</p>
      * <p>Specifically, the application can use the following rules to
@@ -1844,7 +1824,8 @@
      * <li>Let the set of currently configured input/output streams
      * be called <code>S</code>.</li>
      * <li>Find the minimum frame durations for each stream in <code>S</code>, by
-     * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with
+     * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using
+     * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with
      * its respective size/format). Let this set of frame durations be called
      * <code>F</code>.</li>
      * <li>For any given request <code>R</code>, the minimum frame duration allowed
@@ -1852,7 +1833,8 @@
      * used in <code>R</code> be called <code>S_r</code>.</li>
      * </ol>
      * <p>If none of the streams in <code>S_r</code> have a stall time (listed in
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in
+     * StreamConfigurationMap#getOutputStallDuration(int,Size) using its
+     * respective size/format), then the frame duration in
      * <code>F</code> determines the steady state frame rate that the application will
      * get if it uses <code>R</code> as a repeating request. Let this special kind
      * of request be called <code>Rsimple</code>.</p>
@@ -1863,10 +1845,9 @@
      * if all buffers from the previous <code>Rstall</code> have already been
      * delivered.</p>
      * <p>For more details about stalling, see
-     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p>
+     * StreamConfigurationMap#getOutputStallDuration(int,Size).</p>
      *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
      */
     public static final Key<Long> SENSOR_FRAME_DURATION =
             new Key<Long>("android.sensor.frameDuration", long.class);
@@ -2141,8 +2122,10 @@
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      *
      * @see CaptureRequest#COLOR_CORRECTION_GAINS
+     * @deprecated
      * @hide
      */
+    @Deprecated
     public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS =
             new Key<float[]>("android.statistics.predictedColorGains", float[].class);
 
@@ -2163,8 +2146,10 @@
      * <p>This value should always be calculated by the AWB block,
      * regardless of the android.control.* current values.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+     * @deprecated
      * @hide
      */
+    @Deprecated
     public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM =
             new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class);
 
@@ -2441,6 +2426,14 @@
      * End generated code
      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
 
+
+
+
+
+
+
+
+
     /**
      * <p>
      * List of the {@link Face Faces} detected through camera face detection
diff --git a/core/java/android/hardware/camera2/ICameraDeviceUser.aidl b/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
index d77f3d1..0815170 100644
--- a/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -20,7 +20,7 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.camera2.CaptureRequest;
 
-import android.hardware.camera2.LongParcelable;
+import android.hardware.camera2.utils.LongParcelable;
 
 /** @hide */
 interface ICameraDeviceUser
diff --git a/core/java/android/hardware/camera2/StreamConfigurationMap.java b/core/java/android/hardware/camera2/StreamConfigurationMap.java
deleted file mode 100644
index 5ddd7d6..0000000
--- a/core/java/android/hardware/camera2/StreamConfigurationMap.java
+++ /dev/null
@@ -1,508 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2;
-
-import android.graphics.ImageFormat;
-import android.graphics.PixelFormat;
-import android.hardware.camera2.utils.HashCodeHelpers;
-import android.view.Surface;
-import android.util.Size;
-
-import java.util.Arrays;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Immutable class to store the available stream
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS configurations} to be used
- * when configuring streams with {@link CameraDevice#configureOutputs}.
- * <!-- TODO: link to input stream configuration -->
- *
- * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
- * for that format) that are supported by a camera device.</p>
- *
- * <p>This also contains the minimum frame durations and stall durations for each format/size
- * combination that can be used to calculate effective frame rate when submitting multiple captures.
- * </p>
- *
- * <p>An instance of this object is available from {@link CameraCharacteristics} using
- * the {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS} key and the
- * {@link CameraCharacteristics#get} method.</p.
- *
- * <pre>{@code
- * CameraCharacteristics characteristics = ...;
- * StreamConfigurationMap configs = characteristics.get(
- *         CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- * }</pre>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- * @see CameraDevice#configureOutputs
- */
-public final class StreamConfigurationMap {
-
-    /**
-     * Create a new {@link StreamConfigurationMap}.
-     *
-     * <p>The array parameters ownership is passed to this object after creation; do not
-     * write to them after this constructor is invoked.</p>
-     *
-     * @param configurations a non-{@code null} array of {@link StreamConfiguration}
-     * @param durations a non-{@code null} array of {@link StreamConfigurationDuration}
-     *
-     * @throws NullPointerException if any of the arguments or subelements were {@code null}
-     *
-     * @hide
-     */
-    public StreamConfigurationMap(
-            StreamConfiguration[] configurations,
-            StreamConfigurationDuration[] durations) {
-        // TODO: format check against ImageFormat/PixelFormat ?
-
-        mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
-        mDurations = checkArrayElementsNotNull(durations, "durations");
-
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the image {@code format} output formats in this stream configuration.
-     *
-     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
-     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
-     *
-     * <p>Formats listed in this array are guaranteed to return true if queried with
-     * {@link #isOutputSupportedFor(int).</p>
-     *
-     * @return an array of integer format
-     *
-     * @see ImageFormat
-     * @see PixelFormat
-     */
-    public final int[] getOutputFormats() {
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the image {@code format} input formats in this stream configuration.
-     *
-     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
-     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
-     *
-     * @return an array of integer format
-     *
-     * @see ImageFormat
-     * @see PixelFormat
-     *
-     * @hide
-     */
-    public final int[] getInputFormats() {
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the supported input sizes for this input format.
-     *
-     * <p>The format must have come from {@link #getInputFormats}; otherwise
-     * {@code null} is returned.</p>
-     *
-     * @param format a format from {@link #getInputFormats}
-     * @return a non-empty array of sizes, or {@code null} if the format was not available.
-     *
-     * @hide
-     */
-    public Size[] getInputSizes(final int format) {
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Determine whether or not output streams can be
-     * {@link CameraDevice#configureOutputs configured} with a particular user-defined format.
-     *
-     * <p>This method determines that the output {@code format} is supported by the camera device;
-     * each output {@code surface} target may or may not itself support that {@code format}.
-     * Refer to the class which provides the surface for additional documentation.</p>
-     *
-     * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
-     * returned by {@link #getOutputSizes}.</p>
-     *
-     * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
-     * @return
-     *          {@code true} iff using a {@code surface} with this {@code format} will be
-     *          supported with {@link CameraDevice#configureOutputs}
-     *
-     * @throws IllegalArgumentException
-     *          if the image format was not a defined named constant
-     *          from either {@link ImageFormat} or {@link PixelFormat}
-     *
-     * @see ImageFormat
-     * @see PixelFormat
-     * @see CameraDevice#configureOutputs
-     */
-    public boolean isOutputSupportedFor(int format) {
-        checkArgumentFormat(format);
-
-        final int[] formats = getOutputFormats();
-        for (int i = 0; i < formats.length; ++i) {
-            if (format == formats[i]) {
-                return true;
-            }
-        }
-
-        return false;
-    }
-
-    /**
-     * Determine whether or not output streams can be configured with a particular class
-     * as a consumer.
-     *
-     * <p>The following list is generally usable for outputs:
-     * <ul>
-     * <li>{@link android.media.ImageReader} -
-     * Recommended for image processing or streaming to external resources (such as a file or
-     * network)
-     * <li>{@link android.media.MediaRecorder} -
-     * Recommended for recording video (simple to use)
-     * <li>{@link android.media.MediaCodec} -
-     * Recommended for recording video (more complicated to use, with more flexibility)
-     * <li>{@link android.renderscript.Allocation} -
-     * Recommended for image processing with {@link android.renderscript RenderScript}
-     * <li>{@link android.view.SurfaceHolder} -
-     * Recommended for low-power camera preview with {@link android.view.SurfaceView}
-     * <li>{@link android.graphics.SurfaceTexture} -
-     * Recommended for OpenGL-accelerated preview processing or compositing with
-     * {@link android.view.TextureView}
-     * </ul>
-     * </p>
-     *
-     * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
-     * provide a producer endpoint that is suitable to be used with
-     * {@link CameraDevice#configureOutputs}.</p>
-     *
-     * <p>Since not all of the above classes support output of all format and size combinations,
-     * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
-     *
-     * @param klass a non-{@code null} {@link Class} object reference
-     * @return {@code true} if this class is supported as an output, {@code false} otherwise
-     *
-     * @throws NullPointerException if {@code klass} was {@code null}
-     *
-     * @see CameraDevice#configureOutputs
-     * @see #isOutputSupportedFor(Surface)
-     */
-    public static <T> boolean isOutputSupportedFor(final Class<T> klass) {
-        checkNotNull(klass, "klass must not be null");
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Determine whether or not the {@code surface} in its current state is suitable to be
-     * {@link CameraDevice#configureOutputs configured} as an output.
-     *
-     * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
-     * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
-     * compatible with the {@link CameraDevice} in general
-     * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
-     * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
-     *
-     * <p>Reasons for a {@code surface} being specifically incompatible might be:
-     * <ul>
-     * <li>Using a format that's not listed by {@link #getOutputFormats}
-     * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
-     * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
-     * </li>
-     * </ul>
-     *
-     * This is not an exhaustive list; see the particular class's documentation for further
-     * possible reasons of incompatibility.</p>
-     *
-     * @param surface a non-{@code null} {@link Surface} object reference
-     * @return {@code true} if this is supported, {@code false} otherwise
-     *
-     * @throws NullPointerException if {@code surface} was {@code null}
-     *
-     * @see CameraDevice#configureOutputs
-     * @see #isOutputSupportedFor(Class)
-     */
-    public boolean isOutputSupportedFor(final Surface surface) {
-        checkNotNull(surface, "surface must not be null");
-
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get a list of sizes compatible with {@code klass} to use as an output.
-     *
-     * <p>Since some of the supported classes may support additional formats beyond
-     * an opaque/implementation-defined (under-the-hood) format; this function only returns
-     * sizes for the implementation-defined format.</p>
-     *
-     * <p>Some classes such as {@link android.media.ImageReader} may only support user-defined
-     * formats; in particular {@link #isOutputSupportedFor(Class)} will return {@code true} for
-     * that class and this method will return an empty array (but not {@code null}).</p>
-     *
-     * <p>If a well-defined format such as {@code NV21} is required, use
-     * {@link #getOutputSizes(int)} instead.</p>
-     *
-     * <p>The {@code klass} should be a supported output, that querying
-     * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
-     *
-     * @param klass
-     *          a non-{@code null} {@link Class} object reference
-     * @return
-     *          an array of supported sizes for implementation-defined formats,
-     *          or {@code null} iff the {@code klass} is not a supported output
-     *
-     * @throws NullPointerException if {@code klass} was {@code null}
-     *
-     * @see #isOutputSupportedFor(Class)
-     */
-    public <T> Size[] getOutputSizes(final Class<T> klass) {
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get a list of sizes compatible with the requested image {@code format}.
-     *
-     * <p>The {@code format} should be a supported format (one of the formats returned by
-     * {@link #getOutputFormats}).</p>
-     *
-     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
-     * @return
-     *          an array of supported sizes,
-     *          or {@code null} if the {@code format} is not a supported output
-     *
-     * @see ImageFormat
-     * @see PixelFormat
-     * @see #getOutputFormats
-     */
-    public Size[] getOutputSizes(final int format) {
-        try {
-            checkArgumentFormatSupported(format, /*output*/true);
-        } catch (IllegalArgumentException e) {
-            return null;
-        }
-
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
-     * for the format/size combination (in nanoseconds).
-     *
-     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
-     * <p>{@code size} should be one of the ones returned by
-     * {@link #getOutputSizes(int)}.</p>
-     *
-     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
-     * @param size an output-compatible size
-     * @return a minimum frame duration {@code >=} 0 in nanoseconds
-     *
-     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
-     * @throws NullPointerException if {@code size} was {@code null}
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
-     * @see CaptureRequest#SENSOR_FRAME_DURATION
-     * @see ImageFormat
-     * @see PixelFormat
-     */
-    public long getOutputMinFrameDuration(final int format, final Size size) {
-        checkArgumentFormatSupported(format, /*output*/true);
-
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
-     * for the class/size combination (in nanoseconds).
-     *
-     * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
-     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
-     *
-     * <p>{@code klass} should be one of the ones which is supported by
-     * {@link #isOutputSupportedFor(Class)}.</p>
-     *
-     * <p>{@code size} should be one of the ones returned by
-     * {@link #getOutputSizes(int)}.</p>
-     *
-     * @param klass
-     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
-     *          non-empty array returned by {@link #getOutputSizes(Class)}
-     * @param size an output-compatible size
-     * @return a minimum frame duration {@code >=} 0 in nanoseconds
-     *
-     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
-     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
-     * @see CaptureRequest#SENSOR_FRAME_DURATION
-     * @see ImageFormat
-     * @see PixelFormat
-     */
-    public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS stall duration}
-     * for the format/size combination (in nanoseconds).
-     *
-     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
-     * <p>{@code size} should be one of the ones returned by
-     * {@link #getOutputSizes(int)}.</p>
-     *
-     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
-     * @param size an output-compatible size
-     * @return a stall duration {@code >=} 0 in nanoseconds
-     *
-     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
-     * @throws NullPointerException if {@code size} was {@code null}
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
-     * @see ImageFormat
-     * @see PixelFormat
-     */
-    public long getOutputStallDuration(final int format, final Size size) {
-        checkArgumentFormatSupported(format, /*output*/true);
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Get the {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS stall duration}
-     * for the class/size combination (in nanoseconds).
-     *
-     * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
-     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
-     *
-     * <p>{@code klass} should be one of the ones with a non-empty array returned by
-     * {@link #getOutputSizes(Class)}.</p>
-     *
-     * <p>{@code size} should be one of the ones returned by
-     * {@link #getOutputSizes(Class)}.</p>
-     *
-     * @param klass
-     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
-     *          non-empty array returned by {@link #getOutputSizes(Class)}
-     * @param size an output-compatible size
-     * @return a minimum frame duration {@code >=} 0 in nanoseconds
-     *
-     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
-     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
-     *
-     * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
-     * @see CaptureRequest#SENSOR_FRAME_DURATION
-     * @see ImageFormat
-     * @see PixelFormat
-     */
-    public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
-        throw new UnsupportedOperationException("Not implemented yet");
-    }
-
-    /**
-     * Check if this {@link StreamConfigurationMap} is equal to another
-     * {@link StreamConfigurationMap}.
-     *
-     * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
-     *
-     * @return {@code true} if the objects were equal, {@code false} otherwise
-     */
-    @Override
-    public boolean equals(final Object obj) {
-        if (obj == null) {
-            return false;
-        }
-        if (this == obj) {
-            return true;
-        }
-        if (obj instanceof StreamConfigurationMap) {
-            final StreamConfigurationMap other = (StreamConfigurationMap) obj;
-            // TODO: do we care about order?
-            return Arrays.equals(mConfigurations, other.mConfigurations) &&
-                    Arrays.equals(mDurations, other.mDurations);
-        }
-        return false;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public int hashCode() {
-        // TODO: do we care about order?
-        return HashCodeHelpers.hashCode(mConfigurations) ^ HashCodeHelpers.hashCode(mDurations);
-    }
-
-    // Check that the argument is supported by #getOutputFormats or #getInputFormats
-    private int checkArgumentFormatSupported(int format, boolean output) {
-        checkArgumentFormat(format);
-
-        int[] formats = output ? getOutputFormats() : getInputFormats();
-        for (int i = 0; i < formats.length; ++i) {
-            if (format == formats[i]) {
-                return format;
-            }
-        }
-
-        throw new IllegalArgumentException(String.format(
-                "format %x is not supported by this stream configuration map", format));
-    }
-
-    /**
-     * Ensures that the format is either user-defined or implementation defined.
-     *
-     * <p>Any invalid/undefined formats will raise an exception.</p>
-     *
-     * @param format image format
-     * @return the format
-     *
-     * @throws IllegalArgumentException if the format was invalid
-     */
-    static int checkArgumentFormatInternal(int format) {
-        if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-            return format;
-        }
-
-        return checkArgumentFormat(format);
-    }
-
-    /**
-     * Ensures that the format is user-defined in either ImageFormat or PixelFormat.
-     *
-     * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
-     * </p>
-     *
-     * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
-     *
-     * @param format image format
-     * @return the format
-     *
-     * @throws IllegalArgumentException if the format was not user-defined
-     */
-    static int checkArgumentFormat(int format) {
-        if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
-            throw new IllegalArgumentException(String.format(
-                    "format %x was not defined in either ImageFormat or PixelFormat", format));
-        }
-
-        return format;
-    }
-
-    private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
-
-    private final StreamConfiguration[] mConfigurations;
-    private final StreamConfigurationDuration[] mDurations;
-
-}
diff --git a/core/java/android/hardware/camera2/impl/CameraDevice.java b/core/java/android/hardware/camera2/impl/CameraDevice.java
index 988f8f9..628d1c3 100644
--- a/core/java/android/hardware/camera2/impl/CameraDevice.java
+++ b/core/java/android/hardware/camera2/impl/CameraDevice.java
@@ -24,9 +24,9 @@
 import android.hardware.camera2.CaptureResultExtras;
 import android.hardware.camera2.ICameraDeviceCallbacks;
 import android.hardware.camera2.ICameraDeviceUser;
-import android.hardware.camera2.LongParcelable;
 import android.hardware.camera2.utils.CameraBinderDecorator;
 import android.hardware.camera2.utils.CameraRuntimeException;
+import android.hardware.camera2.utils.LongParcelable;
 import android.os.Handler;
 import android.os.IBinder;
 import android.os.Looper;
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index 9a06e97..d28f7bd 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -43,6 +43,9 @@
 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration;
 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration;
 import android.hardware.camera2.marshal.impl.MarshalQueryableString;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.params.StreamConfigurationMap;
 import android.os.Parcelable;
 import android.os.Parcel;
 import android.util.Log;
@@ -207,10 +210,8 @@
             return (T) getFaces();
         } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
             return (T) getFaceRectangles();
-        } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS)) {
-            return (T) getAvailableStreamConfigurations();
-        } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS)) {
-            return (T) getAvailableMinFrameDurations();
+        } else if (key.equals(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)) {
+            return (T) getStreamConfigurationMap();
         }
 
         // For other keys, get() falls back to getBase()
@@ -231,50 +232,6 @@
         return availableFormats;
     }
 
-    private int[] getAvailableStreamConfigurations() {
-        final int NUM_ELEMENTS_IN_CONFIG = 4;
-        int[] availableConfigs =
-                getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-        if (availableConfigs != null) {
-            if (availableConfigs.length % NUM_ELEMENTS_IN_CONFIG != 0) {
-                Log.w(TAG, "availableStreamConfigurations is malformed, length must be multiple"
-                        + " of " + NUM_ELEMENTS_IN_CONFIG);
-                return availableConfigs;
-            }
-
-            for (int i = 0; i < availableConfigs.length; i += NUM_ELEMENTS_IN_CONFIG) {
-                // JPEG has different value between native and managed side, need override.
-                if (availableConfigs[i] == NATIVE_JPEG_FORMAT) {
-                    availableConfigs[i] = ImageFormat.JPEG;
-                }
-            }
-        }
-
-        return availableConfigs;
-    }
-
-    private long[] getAvailableMinFrameDurations() {
-        final int NUM_ELEMENTS_IN_DURATION = 4;
-        long[] availableMinDurations =
-                getBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
-        if (availableMinDurations != null) {
-            if (availableMinDurations.length % NUM_ELEMENTS_IN_DURATION != 0) {
-                Log.w(TAG, "availableStreamConfigurations is malformed, length must be multiple"
-                        + " of " + NUM_ELEMENTS_IN_DURATION);
-                return availableMinDurations;
-            }
-
-            for (int i = 0; i < availableMinDurations.length; i += NUM_ELEMENTS_IN_DURATION) {
-                // JPEG has different value between native and managed side, need override.
-                if (availableMinDurations[i] == NATIVE_JPEG_FORMAT) {
-                    availableMinDurations[i] = ImageFormat.JPEG;
-                }
-            }
-        }
-
-        return availableMinDurations;
-    }
-
     private Face[] getFaces() {
         final int FACE_LANDMARK_SIZE = 6;
 
@@ -374,6 +331,17 @@
         return fixedFaceRectangles;
     }
 
+    private StreamConfigurationMap getStreamConfigurationMap() {
+        StreamConfiguration[] configurations = getBase(
+                CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+        StreamConfigurationDuration[] minFrameDurations = getBase(
+                CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+        StreamConfigurationDuration[] stallDurations = getBase(
+                CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
+
+        return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations);
+    }
+
     private <T> void setBase(Key<T> key, T value) {
         int tag = key.getTag();
 
@@ -401,56 +369,12 @@
             return setAvailableFormats((int[]) value);
         } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
             return setFaceRectangles((Rect[]) value);
-        } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS)) {
-            return setAvailableStreamConfigurations((int[])value);
-        } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS)) {
-            return setAvailableMinFrameDurations((long[])value);
         }
 
         // For other keys, set() falls back to setBase().
         return false;
     }
 
-    private boolean setAvailableStreamConfigurations(int[] value) {
-        final int NUM_ELEMENTS_IN_CONFIG = 4;
-        int[] availableConfigs = value;
-        if (value == null) {
-            // Let setBase() to handle the null value case.
-            return false;
-        }
-
-        int[] newValues = new int[availableConfigs.length];
-        for (int i = 0; i < availableConfigs.length; i++) {
-            newValues[i] = availableConfigs[i];
-            if (i % NUM_ELEMENTS_IN_CONFIG == 0 && availableConfigs[i] == ImageFormat.JPEG) {
-                newValues[i] = NATIVE_JPEG_FORMAT;
-            }
-        }
-
-        setBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS, newValues);
-        return true;
-    }
-
-    private boolean setAvailableMinFrameDurations(long[] value) {
-        final int NUM_ELEMENTS_IN_DURATION = 4;
-        long[] availableDurations = value;
-        if (value == null) {
-            // Let setBase() to handle the null value case.
-            return false;
-        }
-
-        long[] newValues = new long[availableDurations.length];
-        for (int i = 0; i < availableDurations.length; i++) {
-            newValues[i] = availableDurations[i];
-            if (i % NUM_ELEMENTS_IN_DURATION == 0 && availableDurations[i] == ImageFormat.JPEG) {
-                newValues[i] = NATIVE_JPEG_FORMAT;
-            }
-        }
-
-        setBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS, newValues);
-        return true;
-    }
-
     private boolean setAvailableFormats(int[] value) {
         int[] availableFormat = value;
         if (value == null) {
diff --git a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
index 3025cb4..98a7ad7 100644
--- a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
+++ b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
@@ -15,9 +15,10 @@
  */
 package android.hardware.camera2.marshal.impl;
 
-import android.hardware.camera2.ReprocessFormatsMap;
 import android.hardware.camera2.marshal.Marshaler;
 import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfigurationMap;
 import android.hardware.camera2.utils.TypeReference;
 
 import static android.hardware.camera2.impl.CameraMetadataNative.*;
@@ -50,12 +51,13 @@
              *   INPUT_FORMAT, OUTPUT_FORMAT_COUNT, [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
              * };
              */
-            int[] inputs = value.getInputs();
+            int[] inputs = StreamConfigurationMap.imageFormatToInternal(value.getInputs());
             for (int input : inputs) {
                 // INPUT_FORMAT
                 buffer.putInt(input);
 
-                int[] outputs = value.getOutputs(input);
+                int[] outputs =
+                        StreamConfigurationMap.imageFormatToInternal(value.getOutputs(input));
                 // OUTPUT_FORMAT_COUNT
                 buffer.putInt(outputs.length);
 
diff --git a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
index 6a4e821..62ace31 100644
--- a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
+++ b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
@@ -15,9 +15,9 @@
  */
 package android.hardware.camera2.marshal.impl;
 
-import android.hardware.camera2.StreamConfiguration;
 import android.hardware.camera2.marshal.Marshaler;
 import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.StreamConfiguration;
 import android.hardware.camera2.utils.TypeReference;
 
 import static android.hardware.camera2.impl.CameraMetadataNative.*;
diff --git a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
index c3d564e..fd3dfac 100644
--- a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
+++ b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
@@ -15,9 +15,9 @@
  */
 package android.hardware.camera2.marshal.impl;
 
-import android.hardware.camera2.StreamConfigurationDuration;
 import android.hardware.camera2.marshal.Marshaler;
 import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.StreamConfigurationDuration;
 import android.hardware.camera2.utils.TypeReference;
 
 import static android.hardware.camera2.impl.CameraMetadataNative.*;
diff --git a/core/java/android/hardware/camera2/ReprocessFormatsMap.java b/core/java/android/hardware/camera2/params/ReprocessFormatsMap.java
similarity index 90%
rename from core/java/android/hardware/camera2/ReprocessFormatsMap.java
rename to core/java/android/hardware/camera2/params/ReprocessFormatsMap.java
index 894a499..d3f5bc3 100644
--- a/core/java/android/hardware/camera2/ReprocessFormatsMap.java
+++ b/core/java/android/hardware/camera2/params/ReprocessFormatsMap.java
@@ -14,10 +14,11 @@
  * limitations under the License.
  */
 
-package android.hardware.camera2;
+package android.hardware.camera2.params;
 
 import static com.android.internal.util.Preconditions.*;
 
+import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.utils.HashCodeHelpers;
 
 import java.util.Arrays;
@@ -61,9 +62,12 @@
      * @throws IllegalArgumentException
      *              if the data was poorly formatted
      *              (missing output format length or too few output formats)
+     *              or if any of the input/formats were not valid
      * @throws NullPointerException
      *              if entry was null
      *
+     * @see StreamConfigurationMap#checkArgumentFormatInternal
+     *
      * @hide
      */
     public ReprocessFormatsMap(final int[] entry) {
@@ -72,26 +76,31 @@
         int numInputs = 0;
         int left = entry.length;
         for (int i = 0; i < entry.length; ) {
-            final int format = entry[i];
+            int inputFormat = StreamConfigurationMap.checkArgumentFormatInternal(entry[i]);
 
             left--;
             i++;
 
             if (left < 1) {
                 throw new IllegalArgumentException(
-                        String.format("Input %x had no output format length listed", format));
+                        String.format("Input %x had no output format length listed", inputFormat));
             }
 
             final int length = entry[i];
             left--;
             i++;
 
+            for (int j = 0; j < length; ++j) {
+                int outputFormat = entry[i + j];
+                StreamConfigurationMap.checkArgumentFormatInternal(outputFormat);
+            }
+
             if (length > 0) {
                 if (left < length) {
                     throw new IllegalArgumentException(
                             String.format(
                                     "Input %x had too few output formats listed (actual: %d, " +
-                                    "expected: %d)", format, left, length));
+                                    "expected: %d)", inputFormat, left, length));
                 }
 
                 i += length;
@@ -131,7 +140,6 @@
                 throw new AssertionError(
                         String.format("Input %x had no output format length listed", format));
             }
-            // TODO: check format is a valid input format
 
             final int length = mEntry[i];
             left--;
@@ -149,12 +157,10 @@
                 left -= length;
             }
 
-            // TODO: check output format is a valid output format
-
             inputs[j] = format;
         }
 
-        return inputs;
+        return StreamConfigurationMap.imageFormatToPublic(inputs);
     }
 
     /**
@@ -204,7 +210,7 @@
                     outputs[k] = mEntry[i + k];
                 }
 
-                return outputs;
+                return StreamConfigurationMap.imageFormatToPublic(outputs);
             }
 
             i += length;
diff --git a/core/java/android/hardware/camera2/StreamConfiguration.java b/core/java/android/hardware/camera2/params/StreamConfiguration.java
similarity index 92%
rename from core/java/android/hardware/camera2/StreamConfiguration.java
rename to core/java/android/hardware/camera2/params/StreamConfiguration.java
index a514034..1c6b6e9 100644
--- a/core/java/android/hardware/camera2/StreamConfiguration.java
+++ b/core/java/android/hardware/camera2/params/StreamConfiguration.java
@@ -14,13 +14,16 @@
  * limitations under the License.
  */
 
-package android.hardware.camera2;
+package android.hardware.camera2.params;
 
 import static com.android.internal.util.Preconditions.*;
-import static android.hardware.camera2.StreamConfigurationMap.checkArgumentFormatInternal;
+import static android.hardware.camera2.params.StreamConfigurationMap.checkArgumentFormatInternal;
 
 import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
 import android.hardware.camera2.utils.HashCodeHelpers;
+import android.graphics.PixelFormat;
 import android.util.Size;
 
 /**
@@ -62,11 +65,12 @@
     }
 
     /**
-     * Get the image {@code format} in this stream configuration.
+     * Get the internal image {@code format} in this stream configuration.
      *
      * @return an integer format
      *
      * @see ImageFormat
+     * @see PixelFormat
      */
     public final int getFormat() {
         return mFormat;
diff --git a/core/java/android/hardware/camera2/StreamConfigurationDuration.java b/core/java/android/hardware/camera2/params/StreamConfigurationDuration.java
similarity index 92%
rename from core/java/android/hardware/camera2/StreamConfigurationDuration.java
rename to core/java/android/hardware/camera2/params/StreamConfigurationDuration.java
index 6a31156..217059d 100644
--- a/core/java/android/hardware/camera2/StreamConfigurationDuration.java
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationDuration.java
@@ -14,13 +14,15 @@
  * limitations under the License.
  */
 
-package android.hardware.camera2;
+package android.hardware.camera2.params;
 
 import static com.android.internal.util.Preconditions.*;
-import static android.hardware.camera2.StreamConfigurationMap.checkArgumentFormatInternal;
+import static android.hardware.camera2.params.StreamConfigurationMap.checkArgumentFormatInternal;
 
 import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.utils.HashCodeHelpers;
+import android.graphics.PixelFormat;
 import android.util.Size;
 
 /**
@@ -59,11 +61,12 @@
     }
 
     /**
-     * Get the image {@code format} in this stream configuration duration
+     * Get the internal image {@code format} in this stream configuration duration
      *
      * @return an integer format
      *
      * @see ImageFormat
+     * @see PixelFormat
      */
     public final int getFormat() {
         return mFormat;
diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
new file mode 100644
index 0000000..4cd6d15
--- /dev/null
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -0,0 +1,949 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.view.Surface;
+import android.util.Log;
+import android.util.Size;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Objects;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Immutable class to store the available stream
+ * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to be used
+ * when configuring streams with {@link CameraDevice#configureOutputs}.
+ * <!-- TODO: link to input stream configuration -->
+ *
+ * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
+ * for that format) that are supported by a camera device.</p>
+ *
+ * <p>This also contains the minimum frame durations and stall durations for each format/size
+ * combination that can be used to calculate effective frame rate when submitting multiple captures.
+ * </p>
+ *
+ * <p>An instance of this object is available from {@link CameraCharacteristics} using
+ * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
+ * {@link CameraCharacteristics#get} method.</p>
+ *
+ * <pre><code>{@code
+ * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
+ * StreamConfigurationMap configs = characteristics.get(
+ *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ * }</code></pre>
+ *
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CameraDevice#configureOutputs
+ */
+public final class StreamConfigurationMap {
+
+    private static final String TAG = "StreamConfigurationMap";
+    /**
+     * Create a new {@link StreamConfigurationMap}.
+     *
+     * <p>The array parameters ownership is passed to this object after creation; do not
+     * write to them after this constructor is invoked.</p>
+     *
+     * @param configurations a non-{@code null} array of {@link StreamConfiguration}
+     * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+     * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+     *
+     * @throws NullPointerException if any of the arguments or subelements were {@code null}
+     *
+     * @hide
+     */
+    public StreamConfigurationMap(
+            StreamConfiguration[] configurations,
+            StreamConfigurationDuration[] minFrameDurations,
+            StreamConfigurationDuration[] stallDurations) {
+
+        mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
+        mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
+        mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
+
+        // For each format, track how many sizes there are available to configure
+        for (StreamConfiguration config : configurations) {
+            HashMap<Integer, Integer> map = config.isOutput() ? mOutputFormats : mInputFormats;
+
+            Integer count = map.get(config.getFormat());
+
+            if (count == null) {
+                count = 0;
+            }
+            count = count + 1;
+
+            map.put(config.getFormat(), count);
+        }
+
+        if (!mOutputFormats.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+            throw new AssertionError(
+                    "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
+        }
+    }
+
+    /**
+     * Get the image {@code format} output formats in this stream configuration.
+     *
+     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+     *
+     * <p>Formats listed in this array are guaranteed to return true if queried with
+     * {@link #isOutputSupportedFor(int).</p>
+     *
+     * @return an array of integer format
+     *
+     * @see ImageFormat
+     * @see PixelFormat
+     */
+    public final int[] getOutputFormats() {
+        return getPublicFormats(/*output*/true);
+    }
+
+    /**
+     * Get the image {@code format} input formats in this stream configuration.
+     *
+     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+     *
+     * @return an array of integer format
+     *
+     * @see ImageFormat
+     * @see PixelFormat
+     *
+     * @hide
+     */
+    public final int[] getInputFormats() {
+        return getPublicFormats(/*output*/false);
+    }
+
+    /**
+     * Get the supported input sizes for this input format.
+     *
+     * <p>The format must have come from {@link #getInputFormats}; otherwise
+     * {@code null} is returned.</p>
+     *
+     * @param format a format from {@link #getInputFormats}
+     * @return a non-empty array of sizes, or {@code null} if the format was not available.
+     *
+     * @hide
+     */
+    public Size[] getInputSizes(final int format) {
+        return getPublicFormatSizes(format, /*output*/false);
+    }
+
+    /**
+     * Determine whether or not output streams can be
+     * {@link CameraDevice#configureOutputs configured} with a particular user-defined format.
+     *
+     * <p>This method determines that the output {@code format} is supported by the camera device;
+     * each output {@code surface} target may or may not itself support that {@code format}.
+     * Refer to the class which provides the surface for additional documentation.</p>
+     *
+     * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
+     * returned by {@link #getOutputSizes}.</p>
+     *
+     * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
+     * @return
+     *          {@code true} iff using a {@code surface} with this {@code format} will be
+     *          supported with {@link CameraDevice#configureOutputs}
+     *
+     * @throws IllegalArgumentException
+     *          if the image format was not a defined named constant
+     *          from either {@link ImageFormat} or {@link PixelFormat}
+     *
+     * @see ImageFormat
+     * @see PixelFormat
+     * @see CameraDevice#configureOutputs
+     */
+    public boolean isOutputSupportedFor(int format) {
+        checkArgumentFormat(format);
+
+        format = imageFormatToInternal(format);
+        return getFormatsMap(/*output*/true).containsKey(format);
+    }
+
+    /**
+     * Determine whether or not output streams can be configured with a particular class
+     * as a consumer.
+     *
+     * <p>The following list is generally usable for outputs:
+     * <ul>
+     * <li>{@link android.media.ImageReader} -
+     * Recommended for image processing or streaming to external resources (such as a file or
+     * network)
+     * <li>{@link android.media.MediaRecorder} -
+     * Recommended for recording video (simple to use)
+     * <li>{@link android.media.MediaCodec} -
+     * Recommended for recording video (more complicated to use, with more flexibility)
+     * <li>{@link android.renderscript.Allocation} -
+     * Recommended for image processing with {@link android.renderscript RenderScript}
+     * <li>{@link android.view.SurfaceHolder} -
+     * Recommended for low-power camera preview with {@link android.view.SurfaceView}
+     * <li>{@link android.graphics.SurfaceTexture} -
+     * Recommended for OpenGL-accelerated preview processing or compositing with
+     * {@link android.view.TextureView}
+     * </ul>
+     * </p>
+     *
+     * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
+     * provide a producer endpoint that is suitable to be used with
+     * {@link CameraDevice#configureOutputs}.</p>
+     *
+     * <p>Since not all of the above classes support output of all format and size combinations,
+     * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
+     *
+     * @param klass a non-{@code null} {@link Class} object reference
+     * @return {@code true} if this class is supported as an output, {@code false} otherwise
+     *
+     * @throws NullPointerException if {@code klass} was {@code null}
+     *
+     * @see CameraDevice#configureOutputs
+     * @see #isOutputSupportedFor(Surface)
+     */
+    public static <T> boolean isOutputSupportedFor(Class<T> klass) {
+        checkNotNull(klass, "klass must not be null");
+
+        if (klass == android.media.ImageReader.class) {
+            return true;
+        } else if (klass == android.media.MediaRecorder.class) {
+            return true;
+        } else if (klass == android.media.MediaCodec.class) {
+            return true;
+        } else if (klass == android.renderscript.Allocation.class) {
+            return true;
+        } else if (klass == android.view.SurfaceHolder.class) {
+            return true;
+        } else if (klass == android.graphics.SurfaceTexture.class) {
+            return true;
+        }
+
+        return false;
+    }
+
+    /**
+     * Determine whether or not the {@code surface} in its current state is suitable to be
+     * {@link CameraDevice#configureOutputs configured} as an output.
+     *
+     * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
+     * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
+     * compatible with the {@link CameraDevice} in general
+     * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
+     * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
+     *
+     * <p>Reasons for a {@code surface} being specifically incompatible might be:
+     * <ul>
+     * <li>Using a format that's not listed by {@link #getOutputFormats}
+     * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
+     * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
+     * </li>
+     * </ul>
+     *
+     * This is not an exhaustive list; see the particular class's documentation for further
+     * possible reasons of incompatibility.</p>
+     *
+     * @param surface a non-{@code null} {@link Surface} object reference
+     * @return {@code true} if this is supported, {@code false} otherwise
+     *
+     * @throws NullPointerException if {@code surface} was {@code null}
+     *
+     * @see CameraDevice#configureOutputs
+     * @see #isOutputSupportedFor(Class)
+     */
+    public boolean isOutputSupportedFor(Surface surface) {
+        checkNotNull(surface, "surface must not be null");
+
+        throw new UnsupportedOperationException("Not implemented yet");
+
+        // TODO: JNI function that checks the Surface's IGraphicBufferProducer state
+    }
+
+    /**
+     * Get a list of sizes compatible with {@code klass} to use as an output.
+     *
+     * <p>Since some of the supported classes may support additional formats beyond
+     * an opaque/implementation-defined (under-the-hood) format; this function only returns
+     * sizes for the implementation-defined format.</p>
+     *
+     * <p>Some classes such as {@link android.media.ImageReader} may only support user-defined
+     * formats; in particular {@link #isOutputSupportedFor(Class)} will return {@code true} for
+     * that class and this method will return an empty array (but not {@code null}).</p>
+     *
+     * <p>If a well-defined format such as {@code NV21} is required, use
+     * {@link #getOutputSizes(int)} instead.</p>
+     *
+     * <p>The {@code klass} should be a supported output, that querying
+     * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
+     *
+     * @param klass
+     *          a non-{@code null} {@link Class} object reference
+     * @return
+     *          an array of supported sizes for implementation-defined formats,
+     *          or {@code null} iff the {@code klass} is not a supported output
+     *
+     * @throws NullPointerException if {@code klass} was {@code null}
+     *
+     * @see #isOutputSupportedFor(Class)
+     */
+    public <T> Size[] getOutputSizes(Class<T> klass) {
+        if (isOutputSupportedFor(klass) == false) {
+            return null;
+        }
+
+        return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /*output*/true);
+    }
+
+    /**
+     * Get a list of sizes compatible with the requested image {@code format}.
+     *
+     * <p>The {@code format} should be a supported format (one of the formats returned by
+     * {@link #getOutputFormats}).</p>
+     *
+     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+     * @return
+     *          an array of supported sizes,
+     *          or {@code null} if the {@code format} is not a supported output
+     *
+     * @see ImageFormat
+     * @see PixelFormat
+     * @see #getOutputFormats
+     */
+    public Size[] getOutputSizes(int format) {
+        return getPublicFormatSizes(format, /*output*/true);
+    }
+
+    /**
+     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
+     * for the format/size combination (in nanoseconds).
+     *
+     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
+     * <p>{@code size} should be one of the ones returned by
+     * {@link #getOutputSizes(int)}.</p>
+     *
+     * <p>This should correspond to the frame duration when only that stream is active, with all
+     * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
+     * </p>
+     *
+     * <p>When multiple streams are used in a request, the minimum frame duration will be
+     * {@code max(individual stream min durations)}.</p>
+     *
+     * <!--
+     * TODO: uncomment after adding input stream support
+     * <p>The minimum frame duration of a stream (of a particular format, size) is the same
+     * regardless of whether the stream is input or output.</p>
+     * -->
+     *
+     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+     * @param size an output-compatible size
+     * @return a minimum frame duration {@code >=} 0 in nanoseconds
+     *
+     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
+     * @throws NullPointerException if {@code size} was {@code null}
+     *
+     * @see CaptureRequest#SENSOR_FRAME_DURATION
+     * @see #getOutputStallDuration(int, Size)
+     * @see ImageFormat
+     * @see PixelFormat
+     */
+    public long getOutputMinFrameDuration(int format, Size size) {
+        checkNotNull(size, "size must not be null");
+        checkArgumentFormatSupported(format, /*output*/true);
+
+        return getInternalFormatDuration(imageFormatToInternal(format), size, DURATION_MIN_FRAME);
+    }
+
+    /**
+     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
+     * for the class/size combination (in nanoseconds).
+     *
+     * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
+     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
+     *
+     * <p>{@code klass} should be one of the ones which is supported by
+     * {@link #isOutputSupportedFor(Class)}.</p>
+     *
+     * <p>{@code size} should be one of the ones returned by
+     * {@link #getOutputSizes(int)}.</p>
+     *
+     * <p>This should correspond to the frame duration when only that stream is active, with all
+     * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
+     * </p>
+     *
+     * <p>When multiple streams are used in a request, the minimum frame duration will be
+     * {@code max(individual stream min durations)}.</p>
+     *
+     * <!--
+     * TODO: uncomment after adding input stream support
+     * <p>The minimum frame duration of a stream (of a particular format, size) is the same
+     * regardless of whether the stream is input or output.</p>
+     * -->
+     *
+     * @param klass
+     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
+     *          non-empty array returned by {@link #getOutputSizes(Class)}
+     * @param size an output-compatible size
+     * @return a minimum frame duration {@code >=} 0 in nanoseconds
+     *
+     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
+     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
+     *
+     * @see CaptureRequest#SENSOR_FRAME_DURATION
+     * @see ImageFormat
+     * @see PixelFormat
+     */
+    public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
+        if (!isOutputSupportedFor(klass)) {
+            throw new IllegalArgumentException("klass was not supported");
+        }
+
+        return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+                size, DURATION_MIN_FRAME);
+    }
+
+    /**
+     * Get the stall duration for the format/size combination (in nanoseconds).
+     *
+     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
+     * <p>{@code size} should be one of the ones returned by
+     * {@link #getOutputSizes(int)}.</p>
+     *
+     * <p>
+     * A stall duration is how much extra time would get added to the normal minimum frame duration
+     * for a repeating request that has streams with non-zero stall.
+     *
+     * <p>For example, consider JPEG captures which have the following characteristics:
+     *
+     * <ul>
+     * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
+     * in requests in which they are directly referenced, they act as JPEG streams.
+     * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
+     * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
+     * requests that actually reference a JPEG stream.
+     * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
+     * process more than 1 capture at a time.
+     * </ul>
+     *
+     * <p>In other words, using a repeating YUV request would result in a steady frame rate
+     * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
+     * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
+     * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
+     * 30 FPS.</p>
+     *
+     * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
+     * frame rate drop unless there are still outstanding buffers for that stream from previous
+     * requests.</p>
+     *
+     * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
+     * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
+     * added with the maximum stall duration for {@code S}.</p>
+     *
+     * <p>If interleaving requests with and without a stall duration, a request will stall by the
+     * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
+     *
+     * <p>This means that a stalling request will not have an exposure start until the stall has
+     * completed.</p>
+     *
+     * <p>This should correspond to the stall duration when only that stream is active, with all
+     * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
+     * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
+     * indeterminate stall duration for all streams in a request (the regular stall calculation
+     * rules are ignored).</p>
+     *
+     * <p>The following formats may always have a stall duration:
+     * <ul>
+     * <li>{@link ImageFormat#JPEG JPEG}
+     * <li>{@link ImageFormat#RAW_SENSOR RAW16}
+     * </ul>
+     * </p>
+     *
+     * <p>The following formats will never have a stall duration:
+     * <ul>
+     * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
+     * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
+     * </ul></p>
+     *
+     * <p>
+     * All other formats may or may not have an allowed stall duration on a per-capability basis;
+     * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+     * android.request.availableCapabilities} for more details.</p>
+     * </p>
+     *
+     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
+     * for more information about calculating the max frame rate (absent stalls).</p>
+     *
+     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+     * @param size an output-compatible size
+     * @return a stall duration {@code >=} 0 in nanoseconds
+     *
+     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
+     * @throws NullPointerException if {@code size} was {@code null}
+     *
+     * @see CaptureRequest#SENSOR_FRAME_DURATION
+     * @see ImageFormat
+     * @see PixelFormat
+     */
+    public long getOutputStallDuration(int format, Size size) {
+        checkArgumentFormatSupported(format, /*output*/true);
+
+        return getInternalFormatDuration(imageFormatToInternal(format),
+                size, DURATION_STALL);
+    }
+
+    /**
+     * Get the stall duration for the class/size combination (in nanoseconds).
+     *
+     * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
+     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
+     *
+     * <p>{@code klass} should be one of the ones with a non-empty array returned by
+     * {@link #getOutputSizes(Class)}.</p>
+     *
+     * <p>{@code size} should be one of the ones returned by
+     * {@link #getOutputSizes(Class)}.</p>
+     *
+     * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
+     * <em>stall duration</em>.</p>
+     *
+     * @param klass
+     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
+     *          non-empty array returned by {@link #getOutputSizes(Class)}
+     * @param size an output-compatible size
+     * @return a minimum frame duration {@code >=} 0 in nanoseconds
+     *
+     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
+     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
+     *
+     * @see CaptureRequest#SENSOR_FRAME_DURATION
+     * @see ImageFormat
+     * @see PixelFormat
+     */
+    public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
+        if (!isOutputSupportedFor(klass)) {
+            throw new IllegalArgumentException("klass was not supported");
+        }
+
+        return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+                size, DURATION_STALL);
+    }
+
+    /**
+     * Check if this {@link StreamConfigurationMap} is equal to another
+     * {@link StreamConfigurationMap}.
+     *
+     * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+     *
+     * @return {@code true} if the objects were equal, {@code false} otherwise
+     */
+    @Override
+    public boolean equals(final Object obj) {
+        if (obj == null) {
+            return false;
+        }
+        if (this == obj) {
+            return true;
+        }
+        if (obj instanceof StreamConfigurationMap) {
+            final StreamConfigurationMap other = (StreamConfigurationMap) obj;
+            // XX: do we care about order?
+            return Arrays.equals(mConfigurations, other.mConfigurations) &&
+                    Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
+                    Arrays.equals(mStallDurations, other.mStallDurations);
+        }
+        return false;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public int hashCode() {
+        // XX: do we care about order?
+        return HashCodeHelpers.hashCode(mConfigurations, mMinFrameDurations, mStallDurations);
+    }
+
+    // Check that the argument is supported by #getOutputFormats or #getInputFormats
+    private int checkArgumentFormatSupported(int format, boolean output) {
+        checkArgumentFormat(format);
+
+        int[] formats = output ? getOutputFormats() : getInputFormats();
+        for (int i = 0; i < formats.length; ++i) {
+            if (format == formats[i]) {
+                return format;
+            }
+        }
+
+        throw new IllegalArgumentException(String.format(
+                "format %x is not supported by this stream configuration map", format));
+    }
+
+    /**
+     * Ensures that the format is either user-defined or implementation defined.
+     *
+     * <p>If a format has a different internal representation than the public representation,
+     * passing in the public representation here will fail.</p>
+     *
+     * <p>For example if trying to use {@link ImageFormat#JPEG}:
+     * it has a different public representation than the internal representation
+     * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
+     *
+     * <p>Any invalid/undefined formats will raise an exception.</p>
+     *
+     * @param format image format
+     * @return the format
+     *
+     * @throws IllegalArgumentException if the format was invalid
+     */
+    static int checkArgumentFormatInternal(int format) {
+        switch (format) {
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            case HAL_PIXEL_FORMAT_BLOB:
+                return format;
+            case ImageFormat.JPEG:
+                throw new IllegalArgumentException(
+                        "ImageFormat.JPEG is an unknown internal format");
+            default:
+                return checkArgumentFormat(format);
+        }
+    }
+
+    /**
+     * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
+     *
+     * <p>If a format has a different public representation than the internal representation,
+     * passing in the internal representation here will fail.</p>
+     *
+     * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
+     * it has a different internal representation than the public representation
+     * {@link ImageFormat#JPEG}, this check will fail.</p>
+     *
+     * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
+     * </p>
+     *
+     * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
+     *
+     * @param format image format
+     * @return the format
+     *
+     * @throws IllegalArgumentException if the format was not user-defined
+     */
+    static int checkArgumentFormat(int format) {
+        // TODO: remove this hack , CTS shouldn't have been using internal constants
+        if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
+            Log.w(TAG, "RAW_OPAQUE is not yet a published format; allowing it anyway");
+            return format;
+        }
+
+        if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
+            throw new IllegalArgumentException(String.format(
+                    "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
+        }
+
+        return format;
+    }
+
+    /**
+     * Convert a public-visible {@code ImageFormat} into an internal format
+     * compatible with {@code graphics.h}.
+     *
+     * <p>In particular these formats are converted:
+     * <ul>
+     * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG
+     * </ul>
+     * </p>
+     *
+     * <p>Passing in an implementation-defined format which has no public equivalent will fail;
+     * as will passing in a public format which has a different internal format equivalent.
+     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+     *
+     * <p>All other formats are returned as-is, no further invalid check is performed.</p>
+     *
+     * <p>This function is the dual of {@link #imageFormatToInternal}.</p>
+     *
+     * @param format image format from {@link ImageFormat} or {@link PixelFormat}
+     * @return the converted image formats
+     *
+     * @throws IllegalArgumentException
+     *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
+     *          {@link ImageFormat#JPEG}
+     *
+     * @see ImageFormat
+     * @see PixelFormat
+     * @see #checkArgumentFormat
+     */
+    static int imageFormatToPublic(int format) {
+        switch (format) {
+            case HAL_PIXEL_FORMAT_BLOB:
+                return ImageFormat.JPEG;
+            case ImageFormat.JPEG:
+                throw new IllegalArgumentException(
+                        "ImageFormat.JPEG is an unknown internal format");
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                throw new IllegalArgumentException(
+                        "IMPLEMENTATION_DEFINED must not leak to public API");
+            default:
+                return format;
+        }
+    }
+
+    /**
+     * Convert image formats from internal to public formats (in-place).
+     *
+     * @param formats an array of image formats
+     * @return {@code formats}
+     *
+     * @see #imageFormatToPublic
+     */
+    static int[] imageFormatToPublic(int[] formats) {
+        if (formats == null) {
+            return null;
+        }
+
+        for (int i = 0; i < formats.length; ++i) {
+            formats[i] = imageFormatToPublic(formats[i]);
+        }
+
+        return formats;
+    }
+
+    /**
+     * Convert a public format compatible with {@code ImageFormat} to an internal format
+     * from {@code graphics.h}.
+     *
+     * <p>In particular these formats are converted:
+     * <ul>
+     * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
+     * </ul>
+     * </p>
+     *
+     * <p>Passing in an implementation-defined format here will fail (it's not a public format);
+     * as will passing in an internal format which has a different public format equivalent.
+     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+     *
+     * <p>All other formats are returned as-is, no invalid check is performed.</p>
+     *
+     * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
+     *
+     * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
+     * @return the converted image formats
+     *
+     * @see ImageFormat
+     * @see PixelFormat
+     *
+     * @throws IllegalArgumentException
+     *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
+     */
+    static int imageFormatToInternal(int format) {
+        switch (format) {
+            case ImageFormat.JPEG:
+                return HAL_PIXEL_FORMAT_BLOB;
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                throw new IllegalArgumentException(
+                        "IMPLEMENTATION_DEFINED is not allowed via public API");
+            default:
+                return format;
+        }
+    }
+
+    /**
+     * Convert image formats from public to internal formats (in-place).
+     *
+     * @param formats an array of image formats
+     * @return {@code formats}
+     *
+     * @see #imageFormatToInternal
+     *
+     * @hide
+     */
+    public static int[] imageFormatToInternal(int[] formats) {
+        if (formats == null) {
+            return null;
+        }
+
+        for (int i = 0; i < formats.length; ++i) {
+            formats[i] = imageFormatToInternal(formats[i]);
+        }
+
+        return formats;
+    }
+
+    private Size[] getPublicFormatSizes(int format, boolean output) {
+        try {
+            checkArgumentFormatSupported(format, output);
+        } catch (IllegalArgumentException e) {
+            return null;
+        }
+
+        format = imageFormatToInternal(format);
+
+        return getInternalFormatSizes(format, output);
+    }
+
+    private Size[] getInternalFormatSizes(int format, boolean output) {
+        HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
+
+        Integer sizesCount = formatsMap.get(format);
+        if (sizesCount == null) {
+            throw new IllegalArgumentException("format not available");
+        }
+
+        int len = sizesCount;
+        Size[] sizes = new Size[len];
+        int sizeIndex = 0;
+
+        for (StreamConfiguration config : mConfigurations) {
+            if (config.getFormat() == format && config.isOutput() == output) {
+                sizes[sizeIndex++] = config.getSize();
+            }
+        }
+
+        if (sizeIndex != len) {
+            throw new AssertionError(
+                    "Too few sizes (expected " + len + ", actual " + sizeIndex + ")");
+        }
+
+        return sizes;
+    }
+
+    /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
+    private int[] getPublicFormats(boolean output) {
+        int[] formats = new int[getPublicFormatCount(output)];
+
+        int i = 0;
+
+        for (int format : getFormatsMap(output).keySet()) {
+            if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+                formats[i++] = format;
+            }
+        }
+
+        if (formats.length != i) {
+            throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
+        }
+
+        return imageFormatToPublic(formats);
+    }
+
+    /** Get the format -> size count map for either output or input formats */
+    private HashMap<Integer, Integer> getFormatsMap(boolean output) {
+        return output ? mOutputFormats : mInputFormats;
+    }
+
+    private long getInternalFormatDuration(int format, Size size, int duration) {
+        // assume format is already checked, since its internal
+
+        if (!arrayContains(getInternalFormatSizes(format, /*output*/true), size)) {
+            throw new IllegalArgumentException("size was not supported");
+        }
+
+        StreamConfigurationDuration[] durations = getDurations(duration);
+
+        for (StreamConfigurationDuration configurationDuration : durations) {
+            if (configurationDuration.getFormat() == format &&
+                    configurationDuration.getWidth() == size.getWidth() &&
+                    configurationDuration.getHeight() == size.getHeight()) {
+                return configurationDuration.getDuration();
+            }
+        }
+
+        return getDurationDefault(duration);
+    }
+
+    /**
+     * Get the durations array for the kind of duration
+     *
+     * @see #DURATION_MIN_FRAME
+     * @see #DURATION_STALL
+     * */
+    private StreamConfigurationDuration[] getDurations(int duration) {
+        switch (duration) {
+            case DURATION_MIN_FRAME:
+                return mMinFrameDurations;
+            case DURATION_STALL:
+                return mStallDurations;
+            default:
+                throw new IllegalArgumentException("duration was invalid");
+        }
+    }
+
+    private long getDurationDefault(int duration) {
+        switch (duration) {
+            case DURATION_MIN_FRAME:
+                throw new AssertionError("Minimum frame durations are required to be listed");
+            case DURATION_STALL:
+                return 0L; // OK. A lack of a stall duration implies a 0 stall duration
+            default:
+                throw new IllegalArgumentException("duration was invalid");
+        }
+    }
+
+    /** Count the number of publicly-visible output formats */
+    private int getPublicFormatCount(boolean output) {
+        HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
+
+        int size = formatsMap.size();
+        if (formatsMap.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+            size -= 1;
+        }
+        return size;
+    }
+
+    private static <T> boolean arrayContains(T[] array, T element) {
+        if (array == null) {
+            return false;
+        }
+
+        for (T el : array) {
+            if (Objects.equals(el, element)) {
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    // from system/core/include/system/graphics.h
+    private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
+    private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
+    private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
+
+    /**
+     * @see #getDurations(int)
+     * @see #getDurationDefault(int)
+     */
+    private static final int DURATION_MIN_FRAME = 0;
+    private static final int DURATION_STALL = 1;
+
+    private final StreamConfiguration[] mConfigurations;
+    private final StreamConfigurationDuration[] mMinFrameDurations;
+    private final StreamConfigurationDuration[] mStallDurations;
+
+    /** ImageFormat -> num output sizes mapping */
+    private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mOutputFormats =
+            new HashMap<Integer, Integer>();
+    /** ImageFormat -> num input sizes mapping */
+    private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mInputFormats =
+            new HashMap<Integer, Integer>();
+
+}
diff --git a/core/java/android/hardware/camera2/LongParcelable.aidl b/core/java/android/hardware/camera2/utils/LongParcelable.aidl
similarity index 93%
rename from core/java/android/hardware/camera2/LongParcelable.aidl
rename to core/java/android/hardware/camera2/utils/LongParcelable.aidl
index 7d7e51b..98ad1b2 100644
--- a/core/java/android/hardware/camera2/LongParcelable.aidl
+++ b/core/java/android/hardware/camera2/utils/LongParcelable.aidl
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package android.hardware.camera2;
+package android.hardware.camera2.utils;
 
 /** @hide */
-parcelable LongParcelable;
\ No newline at end of file
+parcelable LongParcelable;
diff --git a/core/java/android/hardware/camera2/LongParcelable.java b/core/java/android/hardware/camera2/utils/LongParcelable.java
similarity index 97%
rename from core/java/android/hardware/camera2/LongParcelable.java
rename to core/java/android/hardware/camera2/utils/LongParcelable.java
index 97b0631..c89b339 100644
--- a/core/java/android/hardware/camera2/LongParcelable.java
+++ b/core/java/android/hardware/camera2/utils/LongParcelable.java
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package android.hardware.camera2;
+package android.hardware.camera2.utils;
 
 import android.os.Parcel;
 import android.os.Parcelable;
diff --git a/graphics/java/android/graphics/ImageFormat.java b/graphics/java/android/graphics/ImageFormat.java
index 062acaf..fe53a17 100644
--- a/graphics/java/android/graphics/ImageFormat.java
+++ b/graphics/java/android/graphics/ImageFormat.java
@@ -272,6 +272,7 @@
             case NV16:
             case YUY2:
             case YV12:
+            case JPEG:
             case NV21:
             case YUV_420_888:
             case RAW_SENSOR:
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
index b28733a..5ab586f 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
@@ -23,19 +23,21 @@
 import android.graphics.Point;
 import android.graphics.PointF;
 import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
 import android.hardware.camera2.CameraCharacteristics;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.camera2.ColorSpaceTransform;
 import android.hardware.camera2.Face;
 import android.hardware.camera2.MeteringRectangle;
 import android.hardware.camera2.Rational;
-import android.hardware.camera2.ReprocessFormatsMap;
 import android.hardware.camera2.RggbChannelVector;
 import android.hardware.camera2.Size;
-import android.hardware.camera2.StreamConfiguration;
-import android.hardware.camera2.StreamConfigurationDuration;
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.params.StreamConfigurationMap;
 import android.hardware.camera2.utils.TypeReference;
 
 import static android.hardware.camera2.impl.CameraMetadataNative.*;
@@ -72,6 +74,9 @@
     static final int ANDROID_CONTROL_AE_ANTIBANDING_MODE = ANDROID_CONTROL_START;
     static final int ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION = ANDROID_CONTROL_START + 1;
 
+    // From graphics.h
+    private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
+
     @Override
     public void setUp() {
         mMetadata = new CameraMetadataNative();
@@ -293,6 +298,28 @@
         }
     }
 
+    private static <T, T2> void assertArrayContains(T needle, T2 array) {
+        if (!array.getClass().isArray()) {
+            throw new IllegalArgumentException("actual must be array");
+        }
+
+        int len = Array.getLength(array);
+        for (int i = 0; i < len; ++i) {
+
+            Object actualElement = Array.get(array, i);
+
+            if (needle.equals(actualElement)) {
+                return;
+            }
+        }
+
+        fail(String.format(
+                "could not find element in array (needle %s). "
+                        + "Array was: %s.",
+                        needle,
+                        formatArray(array, len)));
+    }
+
     private <T> void checkKeyGetAndSet(String keyStr, TypeReference<T> typeToken, T expected,
             boolean reuse) {
         Key<T> key = new Key<T>(keyStr, typeToken);
@@ -804,18 +831,48 @@
     @SmallTest
     public void testReadWriteReprocessFormatsMap() {
 
-        final int RAW_OPAQUE = 0x24;
+        // final int RAW_OPAQUE = 0x24; // TODO: add RAW_OPAQUE to ImageFormat
         final int RAW16 = ImageFormat.RAW_SENSOR;
         final int YUV_420_888 = ImageFormat.YUV_420_888;
         final int BLOB = 0x21;
 
+        // TODO: also test HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as an output
         int[] contents = new int[] {
-                RAW_OPAQUE, 3, RAW16, YUV_420_888, BLOB,
+                YUV_420_888, 3, YUV_420_888, ImageFormat.NV21, BLOB,
                 RAW16, 2, YUV_420_888, BLOB,
+
         };
 
         // int32 x n
-        checkKeyMarshal("android.scaler.availableInputOutputFormatsMap",
+        Key<ReprocessFormatsMap> key = new Key<ReprocessFormatsMap>(
+                "android.scaler.availableInputOutputFormatsMap", ReprocessFormatsMap.class);
+        mMetadata.writeValues(key.getTag(), toByteArray(contents));
+
+        ReprocessFormatsMap map = mMetadata.get(key);
+
+        /*
+         * Make sure the inputs/outputs were what we expected.
+         * - Use public image format constants here.
+         */
+
+        int[] expectedInputs = new int[] {
+                YUV_420_888, RAW16
+        };
+        assertArrayEquals(expectedInputs, map.getInputs());
+
+        int[] expectedYuvOutputs = new int[] {
+                YUV_420_888, ImageFormat.NV21, ImageFormat.JPEG,
+        };
+        assertArrayEquals(expectedYuvOutputs, map.getOutputs(ImageFormat.YUV_420_888));
+
+        int[] expectedRaw16Outputs = new int[] {
+                YUV_420_888, ImageFormat.JPEG,
+        };
+        assertArrayEquals(expectedRaw16Outputs, map.getOutputs(ImageFormat.RAW_SENSOR));
+
+        // Finally, do a round-trip check as a sanity
+        checkKeyMarshal(
+                "android.scaler.availableInputOutputFormatsMap",
                 new ReprocessFormatsMap(contents),
                 toByteArray(contents)
         );
@@ -889,68 +946,6 @@
                 expectedIntValues, availableFormatTag);
 
         //
-        // android.scaler.availableStreamConfigurations (int x n x 4 array)
-        //
-        final int OUTPUT = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
-        int[] availableStreamConfigs = new int[] {
-                0x20, 3280, 2464, OUTPUT, // RAW16
-                0x23, 3264, 2448, OUTPUT, // YCbCr_420_888
-                0x23, 3200, 2400, OUTPUT, // YCbCr_420_888
-                0x100, 3264, 2448, OUTPUT, // ImageFormat.JPEG
-                0x100, 3200, 2400, OUTPUT, // ImageFormat.JPEG
-                0x100, 2592, 1944, OUTPUT, // ImageFormat.JPEG
-                0x100, 2048, 1536, OUTPUT, // ImageFormat.JPEG
-                0x100, 1920, 1080, OUTPUT  // ImageFormat.JPEG
-        };
-        int[] expectedAvailableStreamConfigs = new int[] {
-                0x20, 3280, 2464, OUTPUT, // RAW16
-                0x23, 3264, 2448, OUTPUT, // YCbCr_420_888
-                0x23, 3200, 2400, OUTPUT, // YCbCr_420_888
-                0x21, 3264, 2448, OUTPUT, // BLOB
-                0x21, 3200, 2400, OUTPUT, // BLOB
-                0x21, 2592, 1944, OUTPUT, // BLOB
-                0x21, 2048, 1536, OUTPUT, // BLOB
-                0x21, 1920, 1080, OUTPUT  // BLOB
-        };
-        int availableStreamConfigTag =
-                CameraMetadataNative.getTag("android.scaler.availableStreamConfigurations");
-
-        Key<int[]> configKey = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
-        validateArrayMetadataReadWriteOverride(configKey, availableStreamConfigs,
-                expectedAvailableStreamConfigs, availableStreamConfigTag);
-
-        //
-        // android.scaler.availableMinFrameDurations (int x n x 4 array)
-
-        //
-        long[] availableMinDurations = new long[] {
-                0x20, 3280, 2464, 33333336, // RAW16
-                0x23, 3264, 2448, 33333336, // YCbCr_420_888
-                0x23, 3200, 2400, 33333336, // YCbCr_420_888
-                0x100, 3264, 2448, 33333336, // ImageFormat.JPEG
-                0x100, 3200, 2400, 33333336, // ImageFormat.JPEG
-                0x100, 2592, 1944, 33333336, // ImageFormat.JPEG
-                0x100, 2048, 1536, 33333336, // ImageFormat.JPEG
-                0x100, 1920, 1080, 33333336  // ImageFormat.JPEG
-        };
-        long[] expectedAvailableMinDurations = new long[] {
-                0x20, 3280, 2464, 33333336, // RAW16
-                0x23, 3264, 2448, 33333336, // YCbCr_420_888
-                0x23, 3200, 2400, 33333336, // YCbCr_420_888
-                0x21, 3264, 2448, 33333336, // BLOB
-                0x21, 3200, 2400, 33333336, // BLOB
-                0x21, 2592, 1944, 33333336, // BLOB
-                0x21, 2048, 1536, 33333336, // BLOB
-                0x21, 1920, 1080, 33333336  // BLOB
-        };
-        int availableMinDurationsTag =
-                CameraMetadataNative.getTag("android.scaler.availableMinFrameDurations");
-
-        Key<long[]> durationKey = CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
-        validateArrayMetadataReadWriteOverride(durationKey, availableMinDurations,
-                expectedAvailableMinDurations, availableMinDurationsTag);
-
-        //
         // android.statistics.faces (Face x n array)
         //
         int[] expectedFaceIds = new int[] {1, 2, 3, 4, 5};
@@ -1015,14 +1010,238 @@
     }
 
     /**
+     * Set the raw native value of the available stream configurations; ensure that
+     * the read-out managed value is consistent with what we write in.
+     */
+    @SmallTest
+    public void testOverrideStreamConfigurationMap() {
+
+        /*
+         * First, write all the raw values:
+         * - availableStreamConfigurations
+         * - availableMinFrameDurations
+         * - availableStallDurations
+         *
+         * Then, read this out as a synthetic multi-key 'streamConfigurationMap'
+         *
+         * Finally, validate that the map was unmarshaled correctly
+         * and is converting the internal formats to public formats properly.
+         */
+
+        //
+        // android.scaler.availableStreamConfigurations (int x n x 4 array)
+        //
+        final int OUTPUT = 0;
+        final int INPUT = 1;
+        int[] rawAvailableStreamConfigs = new int[] {
+                0x20, 3280, 2464, OUTPUT, // RAW16
+                0x23, 3264, 2448, OUTPUT, // YCbCr_420_888
+                0x23, 3200, 2400, OUTPUT, // YCbCr_420_888
+                0x21, 3264, 2448, OUTPUT, // BLOB
+                0x21, 3200, 2400, OUTPUT, // BLOB
+                0x21, 2592, 1944, OUTPUT, // BLOB
+                0x21, 2048, 1536, OUTPUT, // BLOB
+                0x21, 1920, 1080, OUTPUT, // BLOB
+                0x22, 640, 480, OUTPUT,   // IMPLEMENTATION_DEFINED
+                0x20, 320, 240, INPUT,   // RAW16
+        };
+        Key<StreamConfiguration[]> configKey =
+                CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+        mMetadata.writeValues(configKey.getTag(),
+                toByteArray(rawAvailableStreamConfigs));
+
+        //
+        // android.scaler.availableMinFrameDurations (int x n x 4 array)
+        //
+        long[] expectedAvailableMinDurations = new long[] {
+                0x20, 3280, 2464, 33333331, // RAW16
+                0x23, 3264, 2448, 33333332, // YCbCr_420_888
+                0x23, 3200, 2400, 33333333, // YCbCr_420_888
+                0x100, 3264, 2448, 33333334, // ImageFormat.JPEG
+                0x100, 3200, 2400, 33333335, // ImageFormat.JPEG
+                0x100, 2592, 1944, 33333336, // ImageFormat.JPEG
+                0x100, 2048, 1536, 33333337, // ImageFormat.JPEG
+                0x100, 1920, 1080, 33333338  // ImageFormat.JPEG
+        };
+        long[] rawAvailableMinDurations = new long[] {
+                0x20, 3280, 2464, 33333331, // RAW16
+                0x23, 3264, 2448, 33333332, // YCbCr_420_888
+                0x23, 3200, 2400, 33333333, // YCbCr_420_888
+                0x21, 3264, 2448, 33333334, // BLOB
+                0x21, 3200, 2400, 33333335, // BLOB
+                0x21, 2592, 1944, 33333336, // BLOB
+                0x21, 2048, 1536, 33333337, // BLOB
+                0x21, 1920, 1080, 33333338  // BLOB
+        };
+        Key<StreamConfigurationDuration[]> durationKey =
+                CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+        mMetadata.writeValues(durationKey.getTag(),
+                toByteArray(rawAvailableMinDurations));
+
+        //
+        // android.scaler.availableStallDurations (int x n x 4 array)
+        //
+        long[] expectedAvailableStallDurations = new long[] {
+                0x20, 3280, 2464, 0,        // RAW16
+                0x23, 3264, 2448, 0,        // YCbCr_420_888
+                0x23, 3200, 2400, 0,        // YCbCr_420_888
+                0x100, 3264, 2448, 33333334, // ImageFormat.JPEG
+                0x100, 3200, 2400, 33333335, // ImageFormat.JPEG
+                0x100, 2592, 1944, 33333336, // ImageFormat.JPEG
+                0x100, 2048, 1536, 33333337, // ImageFormat.JPEG
+                0x100, 1920, 1080, 33333338  // ImageFormat.JPEG
+        };
+        // Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0
+        long[] rawAvailableStallDurations = new long[] {
+                0x21, 3264, 2448, 33333334, // BLOB
+                0x21, 3200, 2400, 33333335, // BLOB
+                0x21, 2592, 1944, 33333336, // BLOB
+                0x21, 2048, 1536, 33333337, // BLOB
+                0x21, 1920, 1080, 33333338  // BLOB
+        };
+        Key<StreamConfigurationDuration[]> stallDurationKey =
+                CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS;
+        mMetadata.writeValues(stallDurationKey.getTag(),
+                toByteArray(rawAvailableStallDurations));
+
+        //
+        // android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap)
+        //
+        StreamConfigurationMap streamConfigMap = mMetadata.get(
+                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+        // Inputs
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/false);
+
+        // Outputs
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/true);
+        checkStreamConfigurationMapByFormatSize(
+                streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/true);
+
+        // Min Frame Durations
+
+        final int DURATION_TUPLE_SIZE = 4;
+        for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) {
+            checkStreamConfigurationMapDurationByFormatSize(
+                    streamConfigMap,
+                    (int)expectedAvailableMinDurations[i],
+                    (int)expectedAvailableMinDurations[i+1],
+                    (int)expectedAvailableMinDurations[i+2],
+                    Duration.MinFrame,
+                    expectedAvailableMinDurations[i+3]);
+        }
+
+        // Stall Frame Durations
+
+        for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) {
+            checkStreamConfigurationMapDurationByFormatSize(
+                    streamConfigMap,
+                    (int)expectedAvailableStallDurations[i],
+                    (int)expectedAvailableStallDurations[i+1],
+                    (int)expectedAvailableStallDurations[i+2],
+                    Duration.Stall,
+                    expectedAvailableStallDurations[i+3]);
+        }
+    }
+
+    private static void checkStreamConfigurationMapByFormatSize(StreamConfigurationMap configMap,
+            int format, int width, int height,
+            boolean output) {
+
+        /** arbitrary class for which StreamConfigurationMap#isOutputSupportedFor(Class) is true */
+        final Class<?> IMPLEMENTATION_DEFINED_OUTPUT_CLASS = SurfaceTexture.class;
+
+        android.util.Size[] sizes;
+        int[] formats;
+
+        if (output) {
+            if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+                sizes = configMap.getOutputSizes(IMPLEMENTATION_DEFINED_OUTPUT_CLASS);
+                // in this case the 'is output format supported' is vacuously true
+                formats = new int[] { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED };
+            } else {
+                sizes = configMap.getOutputSizes(format);
+                formats = configMap.getOutputFormats();
+                assertTrue("Format must be supported by stream configuration map",
+                        configMap.isOutputSupportedFor(format));
+            }
+        } else {
+            // NOTE: No function to do input sizes from IMPL_DEFINED, so it would just fail for that
+            sizes = configMap.getInputSizes(format);
+            formats = configMap.getInputFormats();
+        }
+
+        android.util.Size expectedSize = new android.util.Size(width, height);
+
+        assertArrayContains(format, formats);
+        assertArrayContains(expectedSize, sizes);
+    }
+
+    private enum Duration {
+        MinFrame,
+        Stall
+    }
+
+    private static void checkStreamConfigurationMapDurationByFormatSize(
+            StreamConfigurationMap configMap,
+            int format, int width, int height, Duration durationKind, long expectedDuration) {
+
+        /** arbitrary class for which StreamConfigurationMap#isOutputSupportedFor(Class) is true */
+        final Class<?> IMPLEMENTATION_DEFINED_OUTPUT_CLASS = SurfaceTexture.class;
+
+        long actualDuration;
+
+        android.util.Size size = new android.util.Size(width, height);
+        switch (durationKind) {
+            case MinFrame:
+                if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+                    actualDuration = configMap.getOutputMinFrameDuration(
+                            IMPLEMENTATION_DEFINED_OUTPUT_CLASS, size);
+                } else {
+                    actualDuration = configMap.getOutputMinFrameDuration(format, size);
+                }
+
+                break;
+            case Stall:
+                if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+                    actualDuration = configMap.getOutputStallDuration(
+                            IMPLEMENTATION_DEFINED_OUTPUT_CLASS, size);
+                } else {
+                    actualDuration = configMap.getOutputStallDuration(format, size);
+                }
+
+                break;
+            default:
+                throw new AssertionError();
+        }
+
+        assertEquals("Expected " + durationKind + " to match actual value", expectedDuration,
+                actualDuration);
+    }
+
+    /**
      * Validate metadata array tag read/write override.
      *
      * <p>Only support long and int array for now, can be easily extend to support other
      * primitive arrays.</p>
      */
-    private <T> void validateArrayMetadataReadWriteOverride(Key<T> key, T writeValues,
-            T readValues, int tag) {
-        Class<?> type = writeValues.getClass();
+    private <T> void validateArrayMetadataReadWriteOverride(Key<T> key, T expectedWriteValues,
+            T expectedReadValues, int tag) {
+        Class<?> type = expectedWriteValues.getClass();
         if (!type.isArray()) {
             throw new IllegalArgumentException("This function expects an key with array type");
         } else if (type != int[].class && type != long[].class) {
@@ -1030,13 +1249,13 @@
         }
 
         // Write
-        mMetadata.set(key, writeValues);
+        mMetadata.set(key, expectedWriteValues);
 
         byte[] readOutValues = mMetadata.readValues(tag);
 
         ByteBuffer bf = ByteBuffer.wrap(readOutValues).order(ByteOrder.nativeOrder());
 
-        int readValuesLength = Array.getLength(readValues);
+        int readValuesLength = Array.getLength(expectedReadValues);
         int readValuesNumBytes = readValuesLength * 4;
         if (type == long[].class) {
             readValuesNumBytes = readValuesLength * 8;
@@ -1045,9 +1264,9 @@
         assertEquals(readValuesNumBytes, readOutValues.length);
         for (int i = 0; i < readValuesLength; ++i) {
             if (type == int[].class) {
-                assertEquals(Array.getInt(readValues, i), bf.getInt());
+                assertEquals(Array.getInt(expectedReadValues, i), bf.getInt());
             } else if (type == long[].class) {
-                assertEquals(Array.getLong(readValues, i), bf.getLong());
+                assertEquals(Array.getLong(expectedReadValues, i), bf.getLong());
             }
         }
 
@@ -1057,16 +1276,16 @@
                 ByteBuffer.wrap(readOutValuesAsByteArray).order(ByteOrder.nativeOrder());
         for (int i = 0; i < readValuesLength; ++i) {
             if (type == int[].class) {
-                readOutValuesByteBuffer.putInt(Array.getInt(readValues, i));
+                readOutValuesByteBuffer.putInt(Array.getInt(expectedReadValues, i));
             } else if (type == long[].class) {
-                readOutValuesByteBuffer.putLong(Array.getLong(readValues, i));
+                readOutValuesByteBuffer.putLong(Array.getLong(expectedReadValues, i));
             }
         }
         mMetadata.writeValues(tag, readOutValuesAsByteArray);
 
         T result = mMetadata.get(key);
         assertNotNull(key.getName() + " result shouldn't be null", result);
-        assertArrayEquals(writeValues, result);
+        assertArrayEquals(expectedWriteValues, result);
     }
 
     // TODO: move somewhere else