Merge changes Ia0f54124,I1e085f5e,Ic0a3ff0a,I141847df,I8ae4efa2 into lmp-preview-dev

* changes:
  camera2: Update native/managed key mappings.
  Camera2: switch API interface to java classses
  Camera2: Use ColorSpaceTransform and RggbChannelVector, replace enum byte[]
  camera2: Add new metadata keys, change types for existing range keys
  Camera2: Replace int[] by MeteringRectangle
diff --git a/api/current.txt b/api/current.txt
index 5c931f9..cddbbf8 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -12153,7 +12153,9 @@
     field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_SCENE_MODES;
     field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES;
     field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AWB_AVAILABLE_MODES;
-    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS;
+    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AE;
+    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AF;
+    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AWB;
     field public static final android.hardware.camera2.CameraCharacteristics.Key EDGE_AVAILABLE_EDGE_MODES;
     field public static final android.hardware.camera2.CameraCharacteristics.Key FLASH_INFO_AVAILABLE;
     field public static final android.hardware.camera2.CameraCharacteristics.Key HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES;
@@ -12167,11 +12169,12 @@
     field public static final android.hardware.camera2.CameraCharacteristics.Key LENS_INFO_FOCUS_DISTANCE_CALIBRATION;
     field public static final android.hardware.camera2.CameraCharacteristics.Key LENS_INFO_HYPERFOCAL_DISTANCE;
     field public static final android.hardware.camera2.CameraCharacteristics.Key LENS_INFO_MINIMUM_FOCUS_DISTANCE;
-    field public static final android.hardware.camera2.CameraCharacteristics.Key LENS_INFO_SHADING_MAP_SIZE;
     field public static final android.hardware.camera2.CameraCharacteristics.Key NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES;
     field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_AVAILABLE_CAPABILITIES;
     field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_INPUT_STREAMS;
-    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_STREAMS;
+    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_PROC;
+    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_PROC_STALLING;
+    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_RAW;
     field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PARTIAL_RESULT_COUNT;
     field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PIPELINE_MAX_DEPTH;
     field public static final android.hardware.camera2.CameraCharacteristics.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
@@ -12484,9 +12487,7 @@
     field public static final android.hardware.camera2.CaptureRequest.Key EDGE_MODE;
     field public static final android.hardware.camera2.CaptureRequest.Key FLASH_MODE;
     field public static final android.hardware.camera2.CaptureRequest.Key HOT_PIXEL_MODE;
-    field public static final android.hardware.camera2.CaptureRequest.Key JPEG_GPS_COORDINATES;
-    field public static final android.hardware.camera2.CaptureRequest.Key JPEG_GPS_PROCESSING_METHOD;
-    field public static final android.hardware.camera2.CaptureRequest.Key JPEG_GPS_TIMESTAMP;
+    field public static final android.hardware.camera2.CaptureRequest.Key JPEG_GPS_LOCATION;
     field public static final android.hardware.camera2.CaptureRequest.Key JPEG_ORIENTATION;
     field public static final android.hardware.camera2.CaptureRequest.Key JPEG_QUALITY;
     field public static final android.hardware.camera2.CaptureRequest.Key JPEG_THUMBNAIL_QUALITY;
@@ -12507,9 +12508,7 @@
     field public static final android.hardware.camera2.CaptureRequest.Key STATISTICS_FACE_DETECT_MODE;
     field public static final android.hardware.camera2.CaptureRequest.Key STATISTICS_HOT_PIXEL_MAP_MODE;
     field public static final android.hardware.camera2.CaptureRequest.Key STATISTICS_LENS_SHADING_MAP_MODE;
-    field public static final android.hardware.camera2.CaptureRequest.Key TONEMAP_CURVE_BLUE;
-    field public static final android.hardware.camera2.CaptureRequest.Key TONEMAP_CURVE_GREEN;
-    field public static final android.hardware.camera2.CaptureRequest.Key TONEMAP_CURVE_RED;
+    field public static final android.hardware.camera2.CaptureRequest.Key TONEMAP_CURVE;
     field public static final android.hardware.camera2.CaptureRequest.Key TONEMAP_MODE;
   }
 
@@ -12562,9 +12561,7 @@
     field public static final android.hardware.camera2.CaptureResult.Key FLASH_MODE;
     field public static final android.hardware.camera2.CaptureResult.Key FLASH_STATE;
     field public static final android.hardware.camera2.CaptureResult.Key HOT_PIXEL_MODE;
-    field public static final android.hardware.camera2.CaptureResult.Key JPEG_GPS_COORDINATES;
-    field public static final android.hardware.camera2.CaptureResult.Key JPEG_GPS_PROCESSING_METHOD;
-    field public static final android.hardware.camera2.CaptureResult.Key JPEG_GPS_TIMESTAMP;
+    field public static final android.hardware.camera2.CaptureResult.Key JPEG_GPS_LOCATION;
     field public static final android.hardware.camera2.CaptureResult.Key JPEG_ORIENTATION;
     field public static final android.hardware.camera2.CaptureResult.Key JPEG_QUALITY;
     field public static final android.hardware.camera2.CaptureResult.Key JPEG_THUMBNAIL_QUALITY;
@@ -12593,12 +12590,10 @@
     field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_FACE_DETECT_MODE;
     field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_HOT_PIXEL_MAP;
     field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_HOT_PIXEL_MAP_MODE;
-    field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_LENS_SHADING_MAP;
+    field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_LENS_SHADING_CORRECTION_MAP;
     field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_LENS_SHADING_MAP_MODE;
     field public static final android.hardware.camera2.CaptureResult.Key STATISTICS_SCENE_FLICKER;
-    field public static final android.hardware.camera2.CaptureResult.Key TONEMAP_CURVE_BLUE;
-    field public static final android.hardware.camera2.CaptureResult.Key TONEMAP_CURVE_GREEN;
-    field public static final android.hardware.camera2.CaptureResult.Key TONEMAP_CURVE_RED;
+    field public static final android.hardware.camera2.CaptureResult.Key TONEMAP_CURVE;
     field public static final android.hardware.camera2.CaptureResult.Key TONEMAP_MODE;
   }
 
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index c08424a..eb8d402 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -296,8 +296,8 @@
      * valid anti-banding modes that the application may request
      * for this camera device; they must include AUTO.</p>
      */
-    public static final Key<byte[]> CONTROL_AE_AVAILABLE_ANTIBANDING_MODES =
-            new Key<byte[]>("android.control.aeAvailableAntibandingModes", byte[].class);
+    public static final Key<int[]> CONTROL_AE_AVAILABLE_ANTIBANDING_MODES =
+            new Key<int[]>("android.control.aeAvailableAntibandingModes", int[].class);
 
     /**
      * <p>The set of auto-exposure modes that are supported by this
@@ -315,15 +315,15 @@
      *
      * @see CaptureRequest#CONTROL_AE_MODE
      */
-    public static final Key<byte[]> CONTROL_AE_AVAILABLE_MODES =
-            new Key<byte[]>("android.control.aeAvailableModes", byte[].class);
+    public static final Key<int[]> CONTROL_AE_AVAILABLE_MODES =
+            new Key<int[]>("android.control.aeAvailableModes", int[].class);
 
     /**
      * <p>List of frame rate ranges supported by the
      * AE algorithm/hardware</p>
      */
-    public static final Key<int[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
-            new Key<int[]>("android.control.aeAvailableTargetFpsRanges", int[].class);
+    public static final Key<android.util.Range<Integer>[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
+            new Key<android.util.Range<Integer>[]>("android.control.aeAvailableTargetFpsRanges", new TypeReference<android.util.Range<Integer>[]>() {{ }});
 
     /**
      * <p>Maximum and minimum exposure compensation
@@ -332,8 +332,8 @@
      *
      * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
      */
-    public static final Key<int[]> CONTROL_AE_COMPENSATION_RANGE =
-            new Key<int[]>("android.control.aeCompensationRange", int[].class);
+    public static final Key<android.util.Range<Integer>> CONTROL_AE_COMPENSATION_RANGE =
+            new Key<android.util.Range<Integer>>("android.control.aeCompensationRange", new TypeReference<android.util.Range<Integer>>() {{ }});
 
     /**
      * <p>Smallest step by which exposure compensation
@@ -355,8 +355,8 @@
      * @see CaptureRequest#CONTROL_AF_MODE
      * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
      */
-    public static final Key<byte[]> CONTROL_AF_AVAILABLE_MODES =
-            new Key<byte[]>("android.control.afAvailableModes", byte[].class);
+    public static final Key<int[]> CONTROL_AF_AVAILABLE_MODES =
+            new Key<int[]>("android.control.afAvailableModes", int[].class);
 
     /**
      * <p>List containing the subset of color effects
@@ -374,8 +374,8 @@
      * @see CaptureRequest#CONTROL_EFFECT_MODE
      * @see CaptureRequest#CONTROL_MODE
      */
-    public static final Key<byte[]> CONTROL_AVAILABLE_EFFECTS =
-            new Key<byte[]>("android.control.availableEffects", byte[].class);
+    public static final Key<int[]> CONTROL_AVAILABLE_EFFECTS =
+            new Key<int[]>("android.control.availableEffects", int[].class);
 
     /**
      * <p>List containing a subset of scene modes
@@ -388,15 +388,15 @@
      *
      * @see CaptureRequest#CONTROL_SCENE_MODE
      */
-    public static final Key<byte[]> CONTROL_AVAILABLE_SCENE_MODES =
-            new Key<byte[]>("android.control.availableSceneModes", byte[].class);
+    public static final Key<int[]> CONTROL_AVAILABLE_SCENE_MODES =
+            new Key<int[]>("android.control.availableSceneModes", int[].class);
 
     /**
      * <p>List of video stabilization modes that can
      * be supported</p>
      */
-    public static final Key<byte[]> CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES =
-            new Key<byte[]>("android.control.availableVideoStabilizationModes", byte[].class);
+    public static final Key<int[]> CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES =
+            new Key<int[]>("android.control.availableVideoStabilizationModes", int[].class);
 
     /**
      * <p>The set of auto-white-balance modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode})
@@ -414,8 +414,8 @@
      * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
      * @see CaptureRequest#CONTROL_AWB_MODE
      */
-    public static final Key<byte[]> CONTROL_AWB_AVAILABLE_MODES =
-            new Key<byte[]>("android.control.awbAvailableModes", byte[].class);
+    public static final Key<int[]> CONTROL_AWB_AVAILABLE_MODES =
+            new Key<int[]>("android.control.awbAvailableModes", int[].class);
 
     /**
      * <p>List of the maximum number of regions that can be used for metering in
@@ -427,19 +427,53 @@
      * @see CaptureRequest#CONTROL_AE_REGIONS
      * @see CaptureRequest#CONTROL_AF_REGIONS
      * @see CaptureRequest#CONTROL_AWB_REGIONS
+     * @hide
      */
     public static final Key<int[]> CONTROL_MAX_REGIONS =
             new Key<int[]>("android.control.maxRegions", int[].class);
 
     /**
+     * <p>List of the maximum number of regions that can be used for metering in
+     * auto-exposure (AE);
+     * this corresponds to the the maximum number of elements in
+     * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}.</p>
+     *
+     * @see CaptureRequest#CONTROL_AE_REGIONS
+     */
+    public static final Key<Integer> CONTROL_MAX_REGIONS_AE =
+            new Key<Integer>("android.control.maxRegionsAe", int.class);
+
+    /**
+     * <p>List of the maximum number of regions that can be used for metering in
+     * auto-white balance (AWB);
+     * this corresponds to the the maximum number of elements in
+     * {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}.</p>
+     *
+     * @see CaptureRequest#CONTROL_AWB_REGIONS
+     */
+    public static final Key<Integer> CONTROL_MAX_REGIONS_AWB =
+            new Key<Integer>("android.control.maxRegionsAwb", int.class);
+
+    /**
+     * <p>List of the maximum number of regions that can be used for metering in
+     * auto-focus (AF);
+     * this corresponds to the the maximum number of elements in
+     * {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
+     *
+     * @see CaptureRequest#CONTROL_AF_REGIONS
+     */
+    public static final Key<Integer> CONTROL_MAX_REGIONS_AF =
+            new Key<Integer>("android.control.maxRegionsAf", int.class);
+
+    /**
      * <p>The set of edge enhancement modes supported by this camera device.</p>
      * <p>This tag lists the valid modes for {@link CaptureRequest#EDGE_MODE android.edge.mode}.</p>
      * <p>Full-capability camera devices must always support OFF and FAST.</p>
      *
      * @see CaptureRequest#EDGE_MODE
      */
-    public static final Key<byte[]> EDGE_AVAILABLE_EDGE_MODES =
-            new Key<byte[]>("android.edge.availableEdgeModes", byte[].class);
+    public static final Key<int[]> EDGE_AVAILABLE_EDGE_MODES =
+            new Key<int[]>("android.edge.availableEdgeModes", int[].class);
 
     /**
      * <p>Whether this camera device has a
@@ -458,8 +492,8 @@
      *
      * @see CaptureRequest#HOT_PIXEL_MODE
      */
-    public static final Key<byte[]> HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES =
-            new Key<byte[]>("android.hotPixel.availableHotPixelModes", byte[].class);
+    public static final Key<int[]> HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES =
+            new Key<int[]>("android.hotPixel.availableHotPixelModes", int[].class);
 
     /**
      * <p>Supported resolutions for the JPEG thumbnail</p>
@@ -526,8 +560,8 @@
      *
      * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
      */
-    public static final Key<byte[]> LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION =
-            new Key<byte[]>("android.lens.info.availableOpticalStabilization", byte[].class);
+    public static final Key<int[]> LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION =
+            new Key<int[]>("android.lens.info.availableOpticalStabilization", int[].class);
 
     /**
      * <p>Optional. Hyperfocal distance for this lens.</p>
@@ -553,6 +587,7 @@
      * <p>Dimensions of lens shading map.</p>
      * <p>The map should be on the order of 30-40 rows and columns, and
      * must be smaller than 64x64.</p>
+     * @hide
      */
     public static final Key<android.util.Size> LENS_INFO_SHADING_MAP_SIZE =
             new Key<android.util.Size>("android.lens.info.shadingMapSize", android.util.Size.class);
@@ -591,8 +626,8 @@
      *
      * @see CaptureRequest#NOISE_REDUCTION_MODE
      */
-    public static final Key<byte[]> NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES =
-            new Key<byte[]>("android.noiseReduction.availableNoiseReductionModes", byte[].class);
+    public static final Key<int[]> NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES =
+            new Key<int[]>("android.noiseReduction.availableNoiseReductionModes", int[].class);
 
     /**
      * <p>If set to 1, the HAL will always split result
@@ -621,7 +656,7 @@
      * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
      * <p>This lists the upper bound of the number of output streams supported by
      * the camera device. Using more streams simultaneously may require more hardware and
-     * CPU resources that will consume more power. The image format for a output stream can
+     * CPU resources that will consume more power. The image format for an output stream can
      * be any supported format provided by android.scaler.availableStreamConfigurations.
      * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
      * into the 3 stream types as below:</p>
@@ -632,11 +667,79 @@
      * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
      * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li>
      * </ul>
+     * @hide
      */
     public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
             new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
 
     /**
+     * <p>The maximum numbers of different types of output streams
+     * that can be configured and used simultaneously by a camera device
+     * for any <code>RAW</code> formats.</p>
+     * <p>This value contains the max number of output simultaneous
+     * streams from the raw sensor.</p>
+     * <p>This lists the upper bound of the number of output streams supported by
+     * the camera device. Using more streams simultaneously may require more hardware and
+     * CPU resources that will consume more power. The image format for this kind of an output stream can
+     * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
+     * <p>In particular, a <code>RAW</code> format is typically one of:</p>
+     * <ul>
+     * <li>ImageFormat#RAW_SENSOR</li>
+     * <li>Opaque <code>RAW</code></li>
+     * </ul>
+     *
+     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+     */
+    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_RAW =
+            new Key<Integer>("android.request.maxNumOutputRaw", int.class);
+
+    /**
+     * <p>The maximum numbers of different types of output streams
+     * that can be configured and used simultaneously by a camera device
+     * for any processed (but not-stalling) formats.</p>
+     * <p>This value contains the max number of output simultaneous
+     * streams for any processed (but not-stalling) formats.</p>
+     * <p>This lists the upper bound of the number of output streams supported by
+     * the camera device. Using more streams simultaneously may require more hardware and
+     * CPU resources that will consume more power. The image format for this kind of an output stream can
+     * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
+     * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
+     * Typically:</p>
+     * <ul>
+     * <li>ImageFormat#YUV_420_888</li>
+     * <li>ImageFormat#NV21</li>
+     * <li>ImageFormat#YV12</li>
+     * <li>Implementation-defined formats, i.e. StreamConfiguration#isOutputSupportedFor(Class)</li>
+     * </ul>
+     * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with
+     * a processed format -- it will return 0 for a non-stalling stream.</p>
+     *
+     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+     */
+    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC =
+            new Key<Integer>("android.request.maxNumOutputProc", int.class);
+
+    /**
+     * <p>The maximum numbers of different types of output streams
+     * that can be configured and used simultaneously by a camera device
+     * for any processed (and stalling) formats.</p>
+     * <p>This value contains the max number of output simultaneous
+     * streams for any processed (but not-stalling) formats.</p>
+     * <p>This lists the upper bound of the number of output streams supported by
+     * the camera device. Using more streams simultaneously may require more hardware and
+     * CPU resources that will consume more power. The image format for this kind of an output stream can
+     * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
+     * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations &gt; 0.
+     * Typically only the <code>JPEG</code> format (ImageFormat#JPEG)</p>
+     * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with
+     * a processed format -- it will return a non-0 value for a stalling stream.</p>
+     *
+     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+     */
+    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC_STALLING =
+            new Key<Integer>("android.request.maxNumOutputProcStalling", int.class);
+
+    /**
      * <p>The maximum numbers of any type of input streams
      * that can be configured and used simultaneously by a camera device.</p>
      * <p>When set to 0, it means no input stream is supported.</p>
@@ -750,7 +853,7 @@
      * value.</p>
      * <p>The following keys may return <code>null</code> unless they are enabled:</p>
      * <ul>
-     * <li>{@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} (non-null iff {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON)</li>
+     * <li>android.statistics.lensShadingMap (non-null iff {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON)</li>
      * </ul>
      * <p>(Those sometimes-null keys should nevertheless be listed here
      * if they are available.)</p>
@@ -761,7 +864,6 @@
      * <p>TODO: This should be used by #getAvailableCaptureResultKeys.</p>
      *
      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
      * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
      * @hide
      */
@@ -1229,8 +1331,8 @@
     /**
      * <p>Range of valid sensitivities</p>
      */
-    public static final Key<int[]> SENSOR_INFO_SENSITIVITY_RANGE =
-            new Key<int[]>("android.sensor.info.sensitivityRange", int[].class);
+    public static final Key<android.util.Range<Integer>> SENSOR_INFO_SENSITIVITY_RANGE =
+            new Key<android.util.Range<Integer>>("android.sensor.info.sensitivityRange", new TypeReference<android.util.Range<Integer>>() {{ }});
 
     /**
      * <p>Arrangement of color filters on sensor;
@@ -1251,8 +1353,8 @@
      *
      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
      */
-    public static final Key<long[]> SENSOR_INFO_EXPOSURE_TIME_RANGE =
-            new Key<long[]>("android.sensor.info.exposureTimeRange", long[].class);
+    public static final Key<android.util.Range<Long>> SENSOR_INFO_EXPOSURE_TIME_RANGE =
+            new Key<android.util.Range<Long>>("android.sensor.info.exposureTimeRange", new TypeReference<android.util.Range<Long>>() {{ }});
 
     /**
      * <p>Maximum possible frame duration (minimum frame
@@ -1276,8 +1378,8 @@
      * array</p>
      * <p>Needed for FOV calculation for old API</p>
      */
-    public static final Key<float[]> SENSOR_INFO_PHYSICAL_SIZE =
-            new Key<float[]>("android.sensor.info.physicalSize", float[].class);
+    public static final Key<android.util.SizeF> SENSOR_INFO_PHYSICAL_SIZE =
+            new Key<android.util.SizeF>("android.sensor.info.physicalSize", android.util.SizeF.class);
 
     /**
      * <p>Dimensions of full pixel array, possibly
@@ -1383,8 +1485,8 @@
      *
      * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
      */
-    public static final Key<Rational[]> SENSOR_CALIBRATION_TRANSFORM1 =
-            new Key<Rational[]>("android.sensor.calibrationTransform1", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_CALIBRATION_TRANSFORM1 =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.calibrationTransform1", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>A per-device calibration transform matrix that maps from the
@@ -1404,8 +1506,8 @@
      *
      * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
      */
-    public static final Key<Rational[]> SENSOR_CALIBRATION_TRANSFORM2 =
-            new Key<Rational[]>("android.sensor.calibrationTransform2", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_CALIBRATION_TRANSFORM2 =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.calibrationTransform2", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>A matrix that transforms color values from CIE XYZ color space to
@@ -1426,8 +1528,8 @@
      *
      * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
      */
-    public static final Key<Rational[]> SENSOR_COLOR_TRANSFORM1 =
-            new Key<Rational[]>("android.sensor.colorTransform1", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_COLOR_TRANSFORM1 =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.colorTransform1", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>A matrix that transforms color values from CIE XYZ color space to
@@ -1450,8 +1552,8 @@
      *
      * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
      */
-    public static final Key<Rational[]> SENSOR_COLOR_TRANSFORM2 =
-            new Key<Rational[]>("android.sensor.colorTransform2", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_COLOR_TRANSFORM2 =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.colorTransform2", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>A matrix that transforms white balanced camera colors from the reference
@@ -1470,8 +1572,8 @@
      *
      * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
      */
-    public static final Key<Rational[]> SENSOR_FORWARD_MATRIX1 =
-            new Key<Rational[]>("android.sensor.forwardMatrix1", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_FORWARD_MATRIX1 =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.forwardMatrix1", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>A matrix that transforms white balanced camera colors from the reference
@@ -1492,8 +1594,8 @@
      *
      * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
      */
-    public static final Key<Rational[]> SENSOR_FORWARD_MATRIX2 =
-            new Key<Rational[]>("android.sensor.forwardMatrix2", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_FORWARD_MATRIX2 =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.forwardMatrix2", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>A fixed black level offset for each of the color filter arrangement
@@ -1560,8 +1662,8 @@
      * android.statistics.faceIds and
      * android.statistics.faceLandmarks outputs.</p>
      */
-    public static final Key<byte[]> STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
-            new Key<byte[]>("android.statistics.info.availableFaceDetectModes", byte[].class);
+    public static final Key<int[]> STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
+            new Key<int[]>("android.statistics.info.availableFaceDetectModes", int[].class);
 
     /**
      * <p>Maximum number of simultaneously detectable
@@ -1584,19 +1686,16 @@
 
     /**
      * <p>Maximum number of supported points in the
-     * tonemap curve that can be used for {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}, or
-     * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, or {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}.</p>
+     * tonemap curve that can be used for {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.</p>
      * <p>If the actual number of points provided by the application (in
-     * android.tonemap.curve*)  is less than max, the camera device will
+     * {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}*)  is less than max, the camera device will
      * resample the curve to its internal representation, using linear
      * interpolation.</p>
      * <p>The output curves in the result metadata may have a different number
      * of points than the input curves, and will represent the actual
      * hardware curves used as closely as possible when linearly interpolated.</p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_BLUE
-     * @see CaptureRequest#TONEMAP_CURVE_GREEN
-     * @see CaptureRequest#TONEMAP_CURVE_RED
+     * @see CaptureRequest#TONEMAP_CURVE
      */
     public static final Key<Integer> TONEMAP_MAX_CURVE_POINTS =
             new Key<Integer>("android.tonemap.maxCurvePoints", int.class);
@@ -1609,8 +1708,8 @@
      *
      * @see CaptureRequest#TONEMAP_MODE
      */
-    public static final Key<byte[]> TONEMAP_AVAILABLE_TONE_MAP_MODES =
-            new Key<byte[]>("android.tonemap.availableToneMapModes", byte[].class);
+    public static final Key<int[]> TONEMAP_AVAILABLE_TONE_MAP_MODES =
+            new Key<int[]>("android.tonemap.availableToneMapModes", int[].class);
 
     /**
      * <p>A list of camera LEDs that are available on this system.</p>
diff --git a/core/java/android/hardware/camera2/CameraManager.java b/core/java/android/hardware/camera2/CameraManager.java
index 03b342c..83db056 100644
--- a/core/java/android/hardware/camera2/CameraManager.java
+++ b/core/java/android/hardware/camera2/CameraManager.java
@@ -220,6 +220,7 @@
     private CameraDevice openCameraDeviceUserAsync(String cameraId,
             CameraDevice.StateListener listener, Handler handler)
             throws CameraAccessException {
+        CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
         CameraDevice device = null;
         try {
 
@@ -231,7 +232,8 @@
                         new android.hardware.camera2.impl.CameraDevice(
                                 cameraId,
                                 listener,
-                                handler);
+                                handler,
+                                characteristics);
 
                 BinderHolder holder = new BinderHolder();
 
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index 4cde601..27b4fb8ee 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -279,9 +279,7 @@
      * <p>TODO: This should be @hide</p>
      * <ul>
      * <li>Manual tonemap control<ul>
-     * <li>{@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}</li>
-     * <li>{@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}</li>
-     * <li>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}</li>
+     * <li>{@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}</li>
      * <li>{@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}</li>
      * <li>{@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</li>
      * </ul>
@@ -292,8 +290,8 @@
      * </ul>
      * </li>
      * <li>Lens shading map information<ul>
-     * <li>{@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap}</li>
-     * <li>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}</li>
+     * <li>android.statistics.lensShadingMap</li>
+     * <li>android.lens.info.shadingMapSize</li>
      * </ul>
      * </li>
      * </ul>
@@ -304,11 +302,7 @@
      *
      * @see CaptureRequest#COLOR_CORRECTION_GAINS
      * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
-     * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
-     * @see CaptureRequest#TONEMAP_CURVE_BLUE
-     * @see CaptureRequest#TONEMAP_CURVE_GREEN
-     * @see CaptureRequest#TONEMAP_CURVE_RED
+     * @see CaptureRequest#TONEMAP_CURVE
      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
      * @see CaptureRequest#TONEMAP_MODE
      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
@@ -1548,17 +1542,14 @@
 
     /**
      * <p>Use the tone mapping curve specified in
-     * the android.tonemap.curve* entries.</p>
+     * the {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}* entries.</p>
      * <p>All color enhancement and tonemapping must be disabled, except
      * for applying the tonemapping curve specified by
-     * {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}, {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}, or
-     * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}.</p>
+     * {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.</p>
      * <p>Must not slow down frame rate relative to raw
      * sensor output.</p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_BLUE
-     * @see CaptureRequest#TONEMAP_CURVE_GREEN
-     * @see CaptureRequest#TONEMAP_CURVE_RED
+     * @see CaptureRequest#TONEMAP_CURVE
      * @see CaptureRequest#TONEMAP_MODE
      */
     public static final int TONEMAP_MODE_CONTRAST_CURVE = 0;
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index a4aa296..79659f1 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -537,30 +537,24 @@
      *
      * @see CaptureRequest#COLOR_CORRECTION_MODE
      */
-    public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM =
-            new Key<Rational[]>("android.colorCorrection.transform", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> COLOR_CORRECTION_TRANSFORM =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.colorCorrection.transform", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>Gains applying to Bayer raw color channels for
      * white-balance.</p>
-     * <p>The 4-channel white-balance gains are defined in
-     * the order of <code>[R G_even G_odd B]</code>, where <code>G_even</code> is the gain
-     * for green pixels on even rows of the output, and <code>G_odd</code>
-     * is the gain for green pixels on the odd rows. if a HAL
-     * does not support a separate gain for even/odd green channels,
-     * it should use the <code>G_even</code> value, and write <code>G_odd</code> equal to
-     * <code>G_even</code> in the output result metadata.</p>
-     * <p>This array is either set by the camera device when the request
-     * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
-     * directly by the application in the request when the
-     * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
-     * <p>The output should be the gains actually applied by the camera device to
-     * the current frame.</p>
+     * <p>These per-channel gains are either set by the camera device
+     * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not
+     * TRANSFORM_MATRIX, or directly by the application in the
+     * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is
+     * TRANSFORM_MATRIX.</p>
+     * <p>The gains in the result metadata are the gains actually
+     * applied by the camera device to the current frame.</p>
      *
      * @see CaptureRequest#COLOR_CORRECTION_MODE
      */
-    public static final Key<float[]> COLOR_CORRECTION_GAINS =
-            new Key<float[]>("android.colorCorrection.gains", float[].class);
+    public static final Key<android.hardware.camera2.params.RggbChannelVector> COLOR_CORRECTION_GAINS =
+            new Key<android.hardware.camera2.params.RggbChannelVector>("android.colorCorrection.gains", android.hardware.camera2.params.RggbChannelVector.class);
 
     /**
      * <p>The desired setting for the camera device's auto-exposure
@@ -693,9 +687,6 @@
     /**
      * <p>List of areas to use for
      * metering.</p>
-     * <p>Each area is a rectangle plus weight: xmin, ymin,
-     * xmax, ymax, weight. The rectangle is defined to be inclusive of the
-     * specified coordinates.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
@@ -711,8 +702,8 @@
      * @see CaptureRequest#SCALER_CROP_REGION
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    public static final Key<int[]> CONTROL_AE_REGIONS =
-            new Key<int[]>("android.control.aeRegions", int[].class);
+    public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AE_REGIONS =
+            new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.aeRegions", android.hardware.camera2.params.MeteringRectangle[].class);
 
     /**
      * <p>Range over which fps can be adjusted to
@@ -722,8 +713,8 @@
      *
      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
      */
-    public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE =
-            new Key<int[]>("android.control.aeTargetFpsRange", int[].class);
+    public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
+            new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});
 
     /**
      * <p>Whether the camera device will trigger a precapture
@@ -768,26 +759,23 @@
     /**
      * <p>List of areas to use for focus
      * estimation.</p>
-     * <p>Each area is a rectangle plus weight: xmin, ymin,
-     * xmax, ymax, weight. The rectangle is defined to be inclusive of the
-     * specified coordinates.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
      * bottom-right pixel in the active pixel array. The weight
      * should be nonnegative.</p>
-     * <p>If all regions have 0 weight, then no specific focus area
-     * needs to be used by the camera device. If the focusing region is
-     * outside the the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture
-     * result metadata, the camera device will ignore the sections outside
-     * the region and output the used sections in the result metadata.</p>
+     * <p>If all regions have 0 weight, then no specific metering area
+     * needs to be used by the camera device. If the metering region is
+     * outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture result metadata,
+     * the camera device will ignore the sections outside the region and output the
+     * used sections in the result metadata.</p>
      *
      * @see CaptureRequest#SCALER_CROP_REGION
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    public static final Key<int[]> CONTROL_AF_REGIONS =
-            new Key<int[]>("android.control.afRegions", int[].class);
+    public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AF_REGIONS =
+            new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.afRegions", android.hardware.camera2.params.MeteringRectangle[].class);
 
     /**
      * <p>Whether the camera device will trigger autofocus for this request.</p>
@@ -854,27 +842,23 @@
     /**
      * <p>List of areas to use for illuminant
      * estimation.</p>
-     * <p>Only used in AUTO mode.</p>
-     * <p>Each area is a rectangle plus weight: xmin, ymin,
-     * xmax, ymax, weight. The rectangle is defined to be inclusive of the
-     * specified coordinates.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
      * bottom-right pixel in the active pixel array. The weight
      * should be nonnegative.</p>
-     * <p>If all regions have 0 weight, then no specific auto-white balance (AWB) area
-     * needs to be used by the camera device. If the AWB region is
-     * outside the the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture result metadata,
+     * <p>If all regions have 0 weight, then no specific metering area
+     * needs to be used by the camera device. If the metering region is
+     * outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture result metadata,
      * the camera device will ignore the sections outside the region and output the
      * used sections in the result metadata.</p>
      *
      * @see CaptureRequest#SCALER_CROP_REGION
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    public static final Key<int[]> CONTROL_AWB_REGIONS =
-            new Key<int[]>("android.control.awbRegions", int[].class);
+    public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AWB_REGIONS =
+            new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.awbRegions", android.hardware.camera2.params.MeteringRectangle[].class);
 
     /**
      * <p>Information to the camera device 3A (auto-exposure,
@@ -1068,8 +1052,15 @@
             new Key<Integer>("android.hotPixel.mode", int.class);
 
     /**
+     * <p>A location object to use when generating image GPS metadata.</p>
+     */
+    public static final Key<android.location.Location> JPEG_GPS_LOCATION =
+            new Key<android.location.Location>("android.jpeg.gpsLocation", android.location.Location.class);
+
+    /**
      * <p>GPS coordinates to include in output JPEG
      * EXIF</p>
+     * @hide
      */
     public static final Key<double[]> JPEG_GPS_COORDINATES =
             new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
@@ -1077,6 +1068,7 @@
     /**
      * <p>32 characters describing GPS algorithm to
      * include in EXIF</p>
+     * @hide
      */
     public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
             new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
@@ -1084,6 +1076,7 @@
     /**
      * <p>Time GPS fix was made to include in
      * EXIF</p>
+     * @hide
      */
     public static final Key<Long> JPEG_GPS_TIMESTAMP =
             new Key<Long>("android.jpeg.gpsTimestamp", long.class);
@@ -1432,8 +1425,8 @@
      * <p>When set to OFF mode, no lens shading correction will be applied by the
      * camera device, and an identity lens shading map data will be provided
      * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens
-     * shading map with size specified as <code>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ]</code>,
-     * the output {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} for this case will be an identity map
+     * shading map with size specified as <code>android.lens.info.shadingMapSize = [ 4, 3 ]</code>,
+     * the output android.statistics.lensShadingMap for this case will be an identity map
      * shown below:</p>
      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
      * 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
@@ -1445,8 +1438,8 @@
      * <p>When set to other modes, lens shading correction will be applied by the
      * camera device. Applications can request lens shading map data by setting
      * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide
-     * lens shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap}, with size specified
-     * by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}; the returned shading map data will be the one
+     * lens shading map data in android.statistics.lensShadingMap, with size specified
+     * by android.lens.info.shadingMapSize; the returned shading map data will be the one
      * applied by the camera device for this capture request.</p>
      * <p>The shading map data may depend on the AE and AWB statistics, therefore the reliability
      * of the map data may be affected by the AE and AWB algorithms. When AE and AWB are in
@@ -1456,8 +1449,6 @@
      *
      * @see CaptureRequest#CONTROL_AE_MODE
      * @see CaptureRequest#CONTROL_AWB_MODE
-     * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
      * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
      * @see #SHADING_MODE_OFF
      * @see #SHADING_MODE_FAST
@@ -1498,10 +1489,8 @@
      * <p>Whether the camera device will output the lens
      * shading map in output result metadata.</p>
      * <p>When set to ON,
-     * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} must be provided in
+     * android.statistics.lensShadingMap must be provided in
      * the output result metadata.</p>
-     *
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
      * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
      * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
      */
@@ -1512,10 +1501,10 @@
      * <p>Tonemapping / contrast / gamma curve for the blue
      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
      * CONTRAST_CURVE.</p>
-     * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p>
+     * <p>See android.tonemap.curveRed for more details.</p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_RED
      * @see CaptureRequest#TONEMAP_MODE
+     * @hide
      */
     public static final Key<float[]> TONEMAP_CURVE_BLUE =
             new Key<float[]>("android.tonemap.curveBlue", float[].class);
@@ -1524,10 +1513,10 @@
      * <p>Tonemapping / contrast / gamma curve for the green
      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
      * CONTRAST_CURVE.</p>
-     * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p>
+     * <p>See android.tonemap.curveRed for more details.</p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_RED
      * @see CaptureRequest#TONEMAP_MODE
+     * @hide
      */
     public static final Key<float[]> TONEMAP_CURVE_GREEN =
             new Key<float[]>("android.tonemap.curveGreen", float[].class);
@@ -1537,7 +1526,7 @@
      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
      * CONTRAST_CURVE.</p>
      * <p>Each channel's curve is defined by an array of control points:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} =
+     * <pre><code>android.tonemap.curveRed =
      * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
      * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
      * <p>These are sorted in order of increasing <code>Pin</code>; it is always
@@ -1553,15 +1542,15 @@
      * only specify the red channel and the precision is limited to 4
      * digits, for conciseness.</p>
      * <p>Linear mapping:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 0, 1.0, 1.0 ]
+     * <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
      * </code></pre>
      * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
      * <p>Invert mapping:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 1.0, 1.0, 0 ]
+     * <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
      * </code></pre>
      * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [
+     * <pre><code>android.tonemap.curveRed = [
      * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
      * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
      * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
@@ -1569,7 +1558,7 @@
      * </code></pre>
      * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [
+     * <pre><code>android.tonemap.curveRed = [
      * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
      * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
      * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
@@ -1577,14 +1566,67 @@
      * </code></pre>
      * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_RED
      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
      * @see CaptureRequest#TONEMAP_MODE
+     * @hide
      */
     public static final Key<float[]> TONEMAP_CURVE_RED =
             new Key<float[]>("android.tonemap.curveRed", float[].class);
 
     /**
+     * <p>Tonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
+     * is CONTRAST_CURVE.</p>
+     * <p>The tonemapCurve consist of three curves for each of red, green, and blue
+     * channels respectively. The following example uses the red channel as an
+     * example. The same logic applies to green and blue channel.
+     * Each channel's curve is defined by an array of control points:</p>
+     * <pre><code>curveRed =
+     * [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
+     * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
+     * <p>These are sorted in order of increasing <code>Pin</code>; it is always
+     * guaranteed that input values 0.0 and 1.0 are included in the list to
+     * define a complete mapping. For input values between control points,
+     * the camera device must linearly interpolate between the control
+     * points.</p>
+     * <p>Each curve can have an independent number of points, and the number
+     * of points can be less than max (that is, the request doesn't have to
+     * always provide a curve with number of points equivalent to
+     * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
+     * <p>A few examples, and their corresponding graphical mappings; these
+     * only specify the red channel and the precision is limited to 4
+     * digits, for conciseness.</p>
+     * <p>Linear mapping:</p>
+     * <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
+     * </code></pre>
+     * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+     * <p>Invert mapping:</p>
+     * <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
+     * </code></pre>
+     * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+     * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+     * <pre><code>curveRed = [
+     * (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
+     * (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
+     * (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
+     * (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
+     * </code></pre>
+     * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+     * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+     * <pre><code>curveRed = [
+     * (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
+     * (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
+     * (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
+     * (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
+     * </code></pre>
+     * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+     *
+     * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+     * @see CaptureRequest#TONEMAP_MODE
+     */
+    public static final Key<android.hardware.camera2.params.TonemapCurve> TONEMAP_CURVE =
+            new Key<android.hardware.camera2.params.TonemapCurve>("android.tonemap.curve", android.hardware.camera2.params.TonemapCurve.class);
+
+    /**
      * <p>High-level global contrast/gamma/tonemapping control.</p>
      * <p>When switching to an application-defined contrast curve by setting
      * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
@@ -1600,8 +1642,7 @@
      * <p>This must be set to a valid mode in
      * {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}.</p>
      * <p>When using either FAST or HIGH_QUALITY, the camera device will
-     * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed},
-     * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, and {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}.
+     * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.
      * These values are always available, and as close as possible to the
      * actually used nonlinear/nonglobal transforms.</p>
      * <p>If a request is sent with CONTRAST_CURVE with the camera device's
@@ -1609,9 +1650,7 @@
      * roughly the same.</p>
      *
      * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES
-     * @see CaptureRequest#TONEMAP_CURVE_BLUE
-     * @see CaptureRequest#TONEMAP_CURVE_GREEN
-     * @see CaptureRequest#TONEMAP_CURVE_RED
+     * @see CaptureRequest#TONEMAP_CURVE
      * @see CaptureRequest#TONEMAP_MODE
      * @see #TONEMAP_MODE_CONTRAST_CURVE
      * @see #TONEMAP_MODE_FAST
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 9aa56cf..aa202ac 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -383,30 +383,24 @@
      *
      * @see CaptureRequest#COLOR_CORRECTION_MODE
      */
-    public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM =
-            new Key<Rational[]>("android.colorCorrection.transform", Rational[].class);
+    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> COLOR_CORRECTION_TRANSFORM =
+            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.colorCorrection.transform", android.hardware.camera2.params.ColorSpaceTransform.class);
 
     /**
      * <p>Gains applying to Bayer raw color channels for
      * white-balance.</p>
-     * <p>The 4-channel white-balance gains are defined in
-     * the order of <code>[R G_even G_odd B]</code>, where <code>G_even</code> is the gain
-     * for green pixels on even rows of the output, and <code>G_odd</code>
-     * is the gain for green pixels on the odd rows. if a HAL
-     * does not support a separate gain for even/odd green channels,
-     * it should use the <code>G_even</code> value, and write <code>G_odd</code> equal to
-     * <code>G_even</code> in the output result metadata.</p>
-     * <p>This array is either set by the camera device when the request
-     * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
-     * directly by the application in the request when the
-     * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
-     * <p>The output should be the gains actually applied by the camera device to
-     * the current frame.</p>
+     * <p>These per-channel gains are either set by the camera device
+     * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not
+     * TRANSFORM_MATRIX, or directly by the application in the
+     * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is
+     * TRANSFORM_MATRIX.</p>
+     * <p>The gains in the result metadata are the gains actually
+     * applied by the camera device to the current frame.</p>
      *
      * @see CaptureRequest#COLOR_CORRECTION_MODE
      */
-    public static final Key<float[]> COLOR_CORRECTION_GAINS =
-            new Key<float[]>("android.colorCorrection.gains", float[].class);
+    public static final Key<android.hardware.camera2.params.RggbChannelVector> COLOR_CORRECTION_GAINS =
+            new Key<android.hardware.camera2.params.RggbChannelVector>("android.colorCorrection.gains", android.hardware.camera2.params.RggbChannelVector.class);
 
     /**
      * <p>The desired setting for the camera device's auto-exposure
@@ -539,9 +533,6 @@
     /**
      * <p>List of areas to use for
      * metering.</p>
-     * <p>Each area is a rectangle plus weight: xmin, ymin,
-     * xmax, ymax, weight. The rectangle is defined to be inclusive of the
-     * specified coordinates.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
@@ -557,8 +548,8 @@
      * @see CaptureRequest#SCALER_CROP_REGION
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    public static final Key<int[]> CONTROL_AE_REGIONS =
-            new Key<int[]>("android.control.aeRegions", int[].class);
+    public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AE_REGIONS =
+            new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.aeRegions", android.hardware.camera2.params.MeteringRectangle[].class);
 
     /**
      * <p>Range over which fps can be adjusted to
@@ -568,8 +559,8 @@
      *
      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
      */
-    public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE =
-            new Key<int[]>("android.control.aeTargetFpsRange", int[].class);
+    public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
+            new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});
 
     /**
      * <p>Whether the camera device will trigger a precapture
@@ -812,26 +803,23 @@
     /**
      * <p>List of areas to use for focus
      * estimation.</p>
-     * <p>Each area is a rectangle plus weight: xmin, ymin,
-     * xmax, ymax, weight. The rectangle is defined to be inclusive of the
-     * specified coordinates.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
      * bottom-right pixel in the active pixel array. The weight
      * should be nonnegative.</p>
-     * <p>If all regions have 0 weight, then no specific focus area
-     * needs to be used by the camera device. If the focusing region is
-     * outside the the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture
-     * result metadata, the camera device will ignore the sections outside
-     * the region and output the used sections in the result metadata.</p>
+     * <p>If all regions have 0 weight, then no specific metering area
+     * needs to be used by the camera device. If the metering region is
+     * outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture result metadata,
+     * the camera device will ignore the sections outside the region and output the
+     * used sections in the result metadata.</p>
      *
      * @see CaptureRequest#SCALER_CROP_REGION
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    public static final Key<int[]> CONTROL_AF_REGIONS =
-            new Key<int[]>("android.control.afRegions", int[].class);
+    public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AF_REGIONS =
+            new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.afRegions", android.hardware.camera2.params.MeteringRectangle[].class);
 
     /**
      * <p>Whether the camera device will trigger autofocus for this request.</p>
@@ -1295,27 +1283,23 @@
     /**
      * <p>List of areas to use for illuminant
      * estimation.</p>
-     * <p>Only used in AUTO mode.</p>
-     * <p>Each area is a rectangle plus weight: xmin, ymin,
-     * xmax, ymax, weight. The rectangle is defined to be inclusive of the
-     * specified coordinates.</p>
      * <p>The coordinate system is based on the active pixel array,
      * with (0,0) being the top-left pixel in the active pixel array, and
      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
      * bottom-right pixel in the active pixel array. The weight
      * should be nonnegative.</p>
-     * <p>If all regions have 0 weight, then no specific auto-white balance (AWB) area
-     * needs to be used by the camera device. If the AWB region is
-     * outside the the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture result metadata,
+     * <p>If all regions have 0 weight, then no specific metering area
+     * needs to be used by the camera device. If the metering region is
+     * outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in capture result metadata,
      * the camera device will ignore the sections outside the region and output the
      * used sections in the result metadata.</p>
      *
      * @see CaptureRequest#SCALER_CROP_REGION
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      */
-    public static final Key<int[]> CONTROL_AWB_REGIONS =
-            new Key<int[]>("android.control.awbRegions", int[].class);
+    public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AWB_REGIONS =
+            new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.awbRegions", android.hardware.camera2.params.MeteringRectangle[].class);
 
     /**
      * <p>Information to the camera device 3A (auto-exposure,
@@ -1656,8 +1640,15 @@
             new Key<Integer>("android.hotPixel.mode", int.class);
 
     /**
+     * <p>A location object to use when generating image GPS metadata.</p>
+     */
+    public static final Key<android.location.Location> JPEG_GPS_LOCATION =
+            new Key<android.location.Location>("android.jpeg.gpsLocation", android.location.Location.class);
+
+    /**
      * <p>GPS coordinates to include in output JPEG
      * EXIF</p>
+     * @hide
      */
     public static final Key<double[]> JPEG_GPS_COORDINATES =
             new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
@@ -1665,6 +1656,7 @@
     /**
      * <p>32 characters describing GPS algorithm to
      * include in EXIF</p>
+     * @hide
      */
     public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
             new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
@@ -1672,6 +1664,7 @@
     /**
      * <p>Time GPS fix was made to include in
      * EXIF</p>
+     * @hide
      */
     public static final Key<Long> JPEG_GPS_TIMESTAMP =
             new Key<Long>("android.jpeg.gpsTimestamp", long.class);
@@ -1793,8 +1786,8 @@
      * <p>If variable focus not supported, can still report
      * fixed depth of field range</p>
      */
-    public static final Key<float[]> LENS_FOCUS_RANGE =
-            new Key<float[]>("android.lens.focusRange", float[].class);
+    public static final Key<android.util.Range<Float>> LENS_FOCUS_RANGE =
+            new Key<android.util.Range<Float>>("android.lens.focusRange", new TypeReference<android.util.Range<Float>>() {{ }});
 
     /**
      * <p>Sets whether the camera device uses optical image stabilization (OIS)
@@ -2161,8 +2154,8 @@
      * <p>When set to OFF mode, no lens shading correction will be applied by the
      * camera device, and an identity lens shading map data will be provided
      * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens
-     * shading map with size specified as <code>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ]</code>,
-     * the output {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} for this case will be an identity map
+     * shading map with size specified as <code>android.lens.info.shadingMapSize = [ 4, 3 ]</code>,
+     * the output android.statistics.lensShadingMap for this case will be an identity map
      * shown below:</p>
      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
      * 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
@@ -2174,8 +2167,8 @@
      * <p>When set to other modes, lens shading correction will be applied by the
      * camera device. Applications can request lens shading map data by setting
      * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide
-     * lens shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap}, with size specified
-     * by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}; the returned shading map data will be the one
+     * lens shading map data in android.statistics.lensShadingMap, with size specified
+     * by android.lens.info.shadingMapSize; the returned shading map data will be the one
      * applied by the camera device for this capture request.</p>
      * <p>The shading map data may depend on the AE and AWB statistics, therefore the reliability
      * of the map data may be affected by the AE and AWB algorithms. When AE and AWB are in
@@ -2185,8 +2178,6 @@
      *
      * @see CaptureRequest#CONTROL_AE_MODE
      * @see CaptureRequest#CONTROL_AWB_MODE
-     * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
      * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
      * @see #SHADING_MODE_OFF
      * @see #SHADING_MODE_FAST
@@ -2276,13 +2267,12 @@
      * The map is assumed to be bilinearly interpolated between the sample points.</p>
      * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
-     * The shading map is stored in a fully interleaved format, and its size
-     * is provided in the camera static metadata by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}.</p>
+     * The shading map is stored in a fully interleaved format.</p>
      * <p>The shading map should have on the order of 30-40 rows and columns,
      * and must be smaller than 64x64.</p>
      * <p>As an example, given a very small map defined as:</p>
-     * <pre><code>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ]
-     * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} =
+     * <pre><code>width,height = [ 4, 3 ]
+     * values =
      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
      * 1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
      * 1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
@@ -2301,8 +2291,54 @@
      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
      *
      * @see CaptureRequest#COLOR_CORRECTION_MODE
-     * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
+     */
+    public static final Key<android.hardware.camera2.params.LensShadingMap> STATISTICS_LENS_SHADING_CORRECTION_MAP =
+            new Key<android.hardware.camera2.params.LensShadingMap>("android.statistics.lensShadingCorrectionMap", android.hardware.camera2.params.LensShadingMap.class);
+
+    /**
+     * <p>The shading map is a low-resolution floating-point map
+     * that lists the coefficients used to correct for vignetting, for each
+     * Bayer color channel.</p>
+     * <p>The least shaded section of the image should have a gain factor
+     * of 1; all other sections should have gains above 1.</p>
+     * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
+     * must take into account the colorCorrection settings.</p>
+     * <p>The shading map is for the entire active pixel array, and is not
+     * affected by the crop region specified in the request. Each shading map
+     * entry is the value of the shading compensation map over a specific
+     * pixel on the sensor.  Specifically, with a (N x M) resolution shading
+     * map, and an active pixel array size (W x H), shading map entry
+     * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+     * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+     * The map is assumed to be bilinearly interpolated between the sample points.</p>
+     * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
+     * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+     * The shading map is stored in a fully interleaved format, and its size
+     * is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
+     * <p>The shading map should have on the order of 30-40 rows and columns,
+     * and must be smaller than 64x64.</p>
+     * <p>As an example, given a very small map defined as:</p>
+     * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
+     * android.statistics.lensShadingMap =
+     * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
+     * 1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
+     * 1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
+     * 1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
+     * 1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
+     * 1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
+     * </code></pre>
+     * <p>The low-resolution scaling map images for each channel are
+     * (displayed using nearest-neighbor interpolation):</p>
+     * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+     * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+     * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+     * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+     * <p>As a visualization only, inverting the full-color map to recover an
+     * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
+     * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+     *
+     * @see CaptureRequest#COLOR_CORRECTION_MODE
+     * @hide
      */
     public static final Key<float[]> STATISTICS_LENS_SHADING_MAP =
             new Key<float[]>("android.statistics.lensShadingMap", float[].class);
@@ -2402,17 +2438,15 @@
      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
      */
-    public static final Key<int[]> STATISTICS_HOT_PIXEL_MAP =
-            new Key<int[]>("android.statistics.hotPixelMap", int[].class);
+    public static final Key<android.graphics.Point[]> STATISTICS_HOT_PIXEL_MAP =
+            new Key<android.graphics.Point[]>("android.statistics.hotPixelMap", android.graphics.Point[].class);
 
     /**
      * <p>Whether the camera device will output the lens
      * shading map in output result metadata.</p>
      * <p>When set to ON,
-     * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} must be provided in
+     * android.statistics.lensShadingMap must be provided in
      * the output result metadata.</p>
-     *
-     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
      * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
      * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
      */
@@ -2423,10 +2457,10 @@
      * <p>Tonemapping / contrast / gamma curve for the blue
      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
      * CONTRAST_CURVE.</p>
-     * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p>
+     * <p>See android.tonemap.curveRed for more details.</p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_RED
      * @see CaptureRequest#TONEMAP_MODE
+     * @hide
      */
     public static final Key<float[]> TONEMAP_CURVE_BLUE =
             new Key<float[]>("android.tonemap.curveBlue", float[].class);
@@ -2435,10 +2469,10 @@
      * <p>Tonemapping / contrast / gamma curve for the green
      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
      * CONTRAST_CURVE.</p>
-     * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p>
+     * <p>See android.tonemap.curveRed for more details.</p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_RED
      * @see CaptureRequest#TONEMAP_MODE
+     * @hide
      */
     public static final Key<float[]> TONEMAP_CURVE_GREEN =
             new Key<float[]>("android.tonemap.curveGreen", float[].class);
@@ -2448,7 +2482,7 @@
      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
      * CONTRAST_CURVE.</p>
      * <p>Each channel's curve is defined by an array of control points:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} =
+     * <pre><code>android.tonemap.curveRed =
      * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
      * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
      * <p>These are sorted in order of increasing <code>Pin</code>; it is always
@@ -2464,15 +2498,15 @@
      * only specify the red channel and the precision is limited to 4
      * digits, for conciseness.</p>
      * <p>Linear mapping:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 0, 1.0, 1.0 ]
+     * <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
      * </code></pre>
      * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
      * <p>Invert mapping:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 1.0, 1.0, 0 ]
+     * <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
      * </code></pre>
      * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [
+     * <pre><code>android.tonemap.curveRed = [
      * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
      * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
      * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
@@ -2480,7 +2514,7 @@
      * </code></pre>
      * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
-     * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [
+     * <pre><code>android.tonemap.curveRed = [
      * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
      * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
      * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
@@ -2488,14 +2522,67 @@
      * </code></pre>
      * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
      *
-     * @see CaptureRequest#TONEMAP_CURVE_RED
      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
      * @see CaptureRequest#TONEMAP_MODE
+     * @hide
      */
     public static final Key<float[]> TONEMAP_CURVE_RED =
             new Key<float[]>("android.tonemap.curveRed", float[].class);
 
     /**
+     * <p>Tonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
+     * is CONTRAST_CURVE.</p>
+     * <p>The tonemapCurve consist of three curves for each of red, green, and blue
+     * channels respectively. The following example uses the red channel as an
+     * example. The same logic applies to green and blue channel.
+     * Each channel's curve is defined by an array of control points:</p>
+     * <pre><code>curveRed =
+     * [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
+     * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
+     * <p>These are sorted in order of increasing <code>Pin</code>; it is always
+     * guaranteed that input values 0.0 and 1.0 are included in the list to
+     * define a complete mapping. For input values between control points,
+     * the camera device must linearly interpolate between the control
+     * points.</p>
+     * <p>Each curve can have an independent number of points, and the number
+     * of points can be less than max (that is, the request doesn't have to
+     * always provide a curve with number of points equivalent to
+     * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
+     * <p>A few examples, and their corresponding graphical mappings; these
+     * only specify the red channel and the precision is limited to 4
+     * digits, for conciseness.</p>
+     * <p>Linear mapping:</p>
+     * <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
+     * </code></pre>
+     * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+     * <p>Invert mapping:</p>
+     * <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
+     * </code></pre>
+     * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+     * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+     * <pre><code>curveRed = [
+     * (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
+     * (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
+     * (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
+     * (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
+     * </code></pre>
+     * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+     * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+     * <pre><code>curveRed = [
+     * (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
+     * (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
+     * (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
+     * (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
+     * </code></pre>
+     * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+     *
+     * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+     * @see CaptureRequest#TONEMAP_MODE
+     */
+    public static final Key<android.hardware.camera2.params.TonemapCurve> TONEMAP_CURVE =
+            new Key<android.hardware.camera2.params.TonemapCurve>("android.tonemap.curve", android.hardware.camera2.params.TonemapCurve.class);
+
+    /**
      * <p>High-level global contrast/gamma/tonemapping control.</p>
      * <p>When switching to an application-defined contrast curve by setting
      * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
@@ -2511,8 +2598,7 @@
      * <p>This must be set to a valid mode in
      * {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}.</p>
      * <p>When using either FAST or HIGH_QUALITY, the camera device will
-     * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed},
-     * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, and {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}.
+     * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.
      * These values are always available, and as close as possible to the
      * actually used nonlinear/nonglobal transforms.</p>
      * <p>If a request is sent with CONTRAST_CURVE with the camera device's
@@ -2520,9 +2606,7 @@
      * roughly the same.</p>
      *
      * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES
-     * @see CaptureRequest#TONEMAP_CURVE_BLUE
-     * @see CaptureRequest#TONEMAP_CURVE_GREEN
-     * @see CaptureRequest#TONEMAP_CURVE_RED
+     * @see CaptureRequest#TONEMAP_CURVE
      * @see CaptureRequest#TONEMAP_MODE
      * @see #TONEMAP_MODE_CONTRAST_CURVE
      * @see #TONEMAP_MODE_FAST
diff --git a/core/java/android/hardware/camera2/impl/CameraDevice.java b/core/java/android/hardware/camera2/impl/CameraDevice.java
index 7b24976..9a4c531 100644
--- a/core/java/android/hardware/camera2/impl/CameraDevice.java
+++ b/core/java/android/hardware/camera2/impl/CameraDevice.java
@@ -20,6 +20,8 @@
 
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
 import android.hardware.camera2.CaptureRequest;
 import android.hardware.camera2.CaptureResult;
 import android.hardware.camera2.ICameraDeviceCallbacks;
@@ -73,6 +75,7 @@
     private final SparseArray<Surface> mConfiguredOutputs = new SparseArray<Surface>();
 
     private final String mCameraId;
+    private final CameraCharacteristics mCharacteristics;
 
     /**
      * A list tracking request and its expected last frame.
@@ -151,13 +154,15 @@
         }
     };
 
-    public CameraDevice(String cameraId, StateListener listener, Handler handler) {
+    public CameraDevice(String cameraId, StateListener listener, Handler handler,
+                        CameraCharacteristics characteristics) {
         if (cameraId == null || listener == null || handler == null) {
             throw new IllegalArgumentException("Null argument given");
         }
         mCameraId = cameraId;
         mDeviceListener = listener;
         mDeviceHandler = handler;
+        mCharacteristics = characteristics;
 
         final int MAX_TAG_LEN = 23;
         String tag = String.format("CameraDevice-JV-%s", mCameraId);
@@ -851,11 +856,18 @@
         @Override
         public void onResultReceived(CameraMetadataNative result,
                 CaptureResultExtras resultExtras) throws RemoteException {
+
             int requestId = resultExtras.getRequestId();
             if (DEBUG) {
                 Log.v(TAG, "Received result frame " + resultExtras.getFrameNumber() + " for id "
                         + requestId);
             }
+
+
+            // TODO: Handle CameraCharacteristics access from CaptureResult correctly.
+            result.set(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE,
+                    getCharacteristics().get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE));
+
             final CaptureListenerHolder holder;
             synchronized (mLock) {
                 holder = CameraDevice.this.mCaptureListenerMap.get(requestId);
@@ -965,4 +977,8 @@
             return (mRemoteDevice == null);
         }
     }
+
+    private CameraCharacteristics getCharacteristics() {
+        return mCharacteristics;
+    }
 }
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index ab2c49a..dc0c652 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -43,13 +43,19 @@
 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration;
 import android.hardware.camera2.marshal.impl.MarshalQueryableString;
 import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.LensShadingMap;
 import android.hardware.camera2.params.StreamConfiguration;
 import android.hardware.camera2.params.StreamConfigurationDuration;
 import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.params.TonemapCurve;
 import android.hardware.camera2.utils.TypeReference;
+import android.location.Location;
+import android.location.LocationManager;
 import android.os.Parcelable;
 import android.os.Parcel;
 import android.util.Log;
+import android.util.Pair;
+import android.util.Size;
 
 import com.android.internal.util.Preconditions;
 
@@ -57,6 +63,7 @@
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.util.ArrayList;
+import java.util.HashMap;
 
 /**
  * Implementation of camera metadata marshal/unmarshal across Binder to
@@ -209,6 +216,37 @@
     // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h
     public static final int NATIVE_JPEG_FORMAT = 0x21;
 
+    private static final String CELLID_PROCESS = "CELLID";
+    private static final String GPS_PROCESS = "GPS";
+
+    private static String translateLocationProviderToProcess(final String provider) {
+        if (provider == null) {
+            return null;
+        }
+        switch(provider) {
+            case LocationManager.GPS_PROVIDER:
+                return GPS_PROCESS;
+            case LocationManager.NETWORK_PROVIDER:
+                return CELLID_PROCESS;
+            default:
+                return null;
+        }
+    }
+
+    private static String translateProcessToLocationProvider(final String process) {
+        if (process == null) {
+            return null;
+        }
+        switch(process) {
+            case GPS_PROCESS:
+                return LocationManager.GPS_PROVIDER;
+            case CELLID_PROCESS:
+                return LocationManager.NETWORK_PROVIDER;
+            default:
+                return null;
+        }
+    }
+
     public CameraMetadataNative() {
         super();
         mMetadataPtr = nativeAllocate();
@@ -297,9 +335,9 @@
     public <T> T get(Key<T> key) {
         Preconditions.checkNotNull(key, "key must not be null");
 
-        T value = getOverride(key);
-        if (value != null) {
-            return value;
+        Pair<T, Boolean> override = getOverride(key);
+        if (override.second) {
+            return override.first;
         }
 
         return getBase(key);
@@ -409,23 +447,44 @@
         ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
         return marshaler.unmarshal(buffer);
     }
-
     // Need overwrite some metadata that has different definitions between native
     // and managed sides.
     @SuppressWarnings("unchecked")
-    private <T> T getOverride(Key<T> key) {
+    private <T> Pair<T, Boolean> getOverride(Key<T> key) {
+        T value = null;
+        boolean override = true;
+
         if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_FORMATS)) {
-            return (T) getAvailableFormats();
+            value = (T) getAvailableFormats();
         } else if (key.equals(CaptureResult.STATISTICS_FACES)) {
-            return (T) getFaces();
+            value = (T) getFaces();
         } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
-            return (T) getFaceRectangles();
+            value = (T) getFaceRectangles();
         } else if (key.equals(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)) {
-            return (T) getStreamConfigurationMap();
+            value = (T) getStreamConfigurationMap();
+        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) {
+            value = (T) getMaxRegions(key);
+        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) {
+            value = (T) getMaxRegions(key);
+        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) {
+            value = (T) getMaxRegions(key);
+        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) {
+            value = (T) getMaxNumOutputs(key);
+        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) {
+            value = (T) getMaxNumOutputs(key);
+        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) {
+            value = (T) getMaxNumOutputs(key);
+        } else if (key.equals(CaptureRequest.TONEMAP_CURVE)) {
+            value = (T) getTonemapCurve();
+        } else if (key.equals(CaptureResult.JPEG_GPS_LOCATION)) {
+            value = (T) getGpsLocation();
+        } else if (key.equals(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP)) {
+            value = (T) getLensShadingMap();
+        } else {
+            override = false;
         }
 
-        // For other keys, get() falls back to getBase()
-        return null;
+        return Pair.create(value, override);
     }
 
     private int[] getAvailableFormats() {
@@ -541,6 +600,62 @@
         return fixedFaceRectangles;
     }
 
+    private LensShadingMap getLensShadingMap() {
+        float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP);
+        if (lsmArray == null) {
+            Log.w(TAG, "getLensShadingMap - Lens shading map was null.");
+            return null;
+        }
+        Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE);
+        LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth());
+        return map;
+    }
+
+    private Location getGpsLocation() {
+        String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD);
+        Location l = new Location(translateProcessToLocationProvider(processingMethod));
+
+        double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES);
+        Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP);
+
+        if (timeStamp != null) {
+            l.setTime(timeStamp);
+        } else {
+            Log.w(TAG, "getGpsLocation - No timestamp for GPS location.");
+        }
+
+        if (coords != null) {
+            l.setLatitude(coords[0]);
+            l.setLongitude(coords[1]);
+            l.setAltitude(coords[2]);
+        } else {
+            Log.w(TAG, "getGpsLocation - No coordinates for GPS location");
+        }
+
+        return l;
+    }
+
+    private boolean setGpsLocation(Location l) {
+        if (l == null) {
+            return false;
+        }
+
+        double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() };
+        String processMethod = translateLocationProviderToProcess(l.getProvider());
+        long timestamp = l.getTime();
+
+        set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp);
+        set(CaptureRequest.JPEG_GPS_COORDINATES, coords);
+
+        if (processMethod == null) {
+            Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" +
+                    "provider");
+        } else {
+            setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod);
+        }
+        return true;
+    }
+
     private StreamConfigurationMap getStreamConfigurationMap() {
         StreamConfiguration[] configurations = getBase(
                 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
@@ -552,6 +667,63 @@
         return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations);
     }
 
+    private <T> Integer getMaxRegions(Key<T> key) {
+        final int AE = 0;
+        final int AWB = 1;
+        final int AF = 2;
+
+        // The order of the elements is: (AE, AWB, AF)
+        int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS);
+
+        if (maxRegions == null) {
+            return null;
+        }
+
+        if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) {
+            return maxRegions[AE];
+        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) {
+            return maxRegions[AWB];
+        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) {
+            return maxRegions[AF];
+        } else {
+            throw new AssertionError("Invalid key " + key);
+        }
+    }
+
+    private <T> Integer getMaxNumOutputs(Key<T> key) {
+        final int RAW = 0;
+        final int PROC = 1;
+        final int PROC_STALLING = 2;
+
+        // The order of the elements is: (raw, proc+nonstalling, proc+stalling)
+        int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS);
+
+        if (maxNumOutputs == null) {
+            return null;
+        }
+
+        if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) {
+            return maxNumOutputs[RAW];
+        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) {
+            return maxNumOutputs[PROC];
+        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) {
+            return maxNumOutputs[PROC_STALLING];
+        } else {
+            throw new AssertionError("Invalid key " + key);
+        }
+    }
+
+    private <T> TonemapCurve getTonemapCurve() {
+        float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED);
+        float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN);
+        float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE);
+        if (red == null || green == null || blue == null) {
+            return null;
+        }
+        TonemapCurve tc = new TonemapCurve(red, green, blue);
+        return tc;
+    }
+
     private <T> void setBase(CameraCharacteristics.Key<T> key, T value) {
         setBase(key.getNativeKey(), value);
     }
@@ -591,8 +763,11 @@
             return setAvailableFormats((int[]) value);
         } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
             return setFaceRectangles((Rect[]) value);
+        } else if (key.equals(CaptureRequest.TONEMAP_CURVE)) {
+            return setTonemapCurve((TonemapCurve) value);
+        } else if (key.equals(CaptureResult.JPEG_GPS_LOCATION)) {
+            return setGpsLocation((Location) value);
         }
-
         // For other keys, set() falls back to setBase().
         return false;
     }
@@ -646,6 +821,24 @@
         return true;
     }
 
+    private <T> boolean setTonemapCurve(TonemapCurve tc) {
+        if (tc == null) {
+            return false;
+        }
+
+        float[][] curve = new float[3][];
+        for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) {
+            int pointCount = tc.getPointCount(i);
+            curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE];
+            tc.copyColorCurve(i, curve[i], 0);
+        }
+        setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]);
+        setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]);
+        setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]);
+
+        return true;
+    }
+
     private long mMetadataPtr; // native CameraMetadata*
 
     private native long nativeAllocate();
diff --git a/core/java/android/hardware/camera2/params/ColorSpaceTransform.java b/core/java/android/hardware/camera2/params/ColorSpaceTransform.java
index fa8c8ea..503af26 100644
--- a/core/java/android/hardware/camera2/params/ColorSpaceTransform.java
+++ b/core/java/android/hardware/camera2/params/ColorSpaceTransform.java
@@ -162,7 +162,7 @@
     public void copyElements(Rational[] destination, int offset) {
         checkArgumentNonnegative(offset, "offset must not be negative");
         checkNotNull(destination, "destination must not be null");
-        if (destination.length + offset < COUNT) {
+        if (destination.length - offset < COUNT) {
             throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
         }
 
@@ -197,7 +197,7 @@
     public void copyElements(int[] destination, int offset) {
         checkArgumentNonnegative(offset, "offset must not be negative");
         checkNotNull(destination, "destination must not be null");
-        if (destination.length + offset < COUNT_INT) {
+        if (destination.length - offset < COUNT_INT) {
             throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
         }
 
diff --git a/core/java/android/hardware/camera2/params/LensShadingMap.java b/core/java/android/hardware/camera2/params/LensShadingMap.java
index b328f578..9bbc33a 100644
--- a/core/java/android/hardware/camera2/params/LensShadingMap.java
+++ b/core/java/android/hardware/camera2/params/LensShadingMap.java
@@ -28,7 +28,7 @@
 /**
  * Immutable class for describing a {@code 4 x N x M} lens shading map of floats.
  *
- * @see CameraCharacteristics#LENS_SHADING_MAP
+ * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP
  */
 public final class LensShadingMap {
 
@@ -62,12 +62,12 @@
     public LensShadingMap(final float[] elements, final int rows, final int columns) {
 
         mRows = checkArgumentPositive(rows, "rows must be positive");
-        mColumns = checkArgumentPositive(rows, "columns must be positive");
+        mColumns = checkArgumentPositive(columns, "columns must be positive");
         mElements = checkNotNull(elements, "elements must not be null");
 
         if (elements.length != getGainFactorCount()) {
             throw new IllegalArgumentException("elements must be " + getGainFactorCount() +
-                    " length");
+                    " length, received " + elements.length);
         }
 
         // Every element must be finite and >= 1.0f
@@ -242,4 +242,4 @@
     private final int mRows;
     private final int mColumns;
     private final float[] mElements;
-};
+}
diff --git a/core/java/android/hardware/camera2/params/MeteringRectangle.java b/core/java/android/hardware/camera2/params/MeteringRectangle.java
index a26c57d..a7a3b59 100644
--- a/core/java/android/hardware/camera2/params/MeteringRectangle.java
+++ b/core/java/android/hardware/camera2/params/MeteringRectangle.java
@@ -57,7 +57,7 @@
      * @param height height >= 0
      * @param meteringWeight weight >= 0
      *
-     * @throws IllegalArgumentException if any of the parameters were non-negative
+     * @throws IllegalArgumentException if any of the parameters were negative
      */
     public MeteringRectangle(int x, int y, int width, int height, int meteringWeight) {
         mX = checkArgumentNonnegative(x, "x must be nonnegative");
@@ -74,7 +74,7 @@
      * @param dimensions a non-{@code null} {@link android.util.Size Size} with width, height >= 0
      * @param meteringWeight weight >= 0
      *
-     * @throws IllegalArgumentException if any of the parameters were non-negative
+     * @throws IllegalArgumentException if any of the parameters were negative
      * @throws NullPointerException if any of the arguments were null
      */
     public MeteringRectangle(Point xy, Size dimensions, int meteringWeight) {
@@ -94,7 +94,7 @@
      * @param rect a non-{@code null} rectangle with all x,y,w,h dimensions >= 0
      * @param meteringWeight weight >= 0
      *
-     * @throws IllegalArgumentException if any of the parameters were non-negative
+     * @throws IllegalArgumentException if any of the parameters were negative
      * @throws NullPointerException if any of the arguments were null
      */
     public MeteringRectangle(Rect rect, int meteringWeight) {
@@ -210,7 +210,7 @@
                 && mY == other.mY
                 && mWidth == other.mWidth
                 && mHeight == other.mHeight
-                && mWidth == other.mWidth);
+                && mWeight == other.mWeight);
     }
 
     /**
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
index a3caba2..fe51215 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
@@ -41,6 +41,7 @@
 import android.hardware.camera2.params.StreamConfiguration;
 import android.hardware.camera2.params.StreamConfigurationDuration;
 import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.params.TonemapCurve;
 import android.hardware.camera2.utils.TypeReference;
 
 import static android.hardware.camera2.impl.CameraMetadataNative.*;
@@ -49,6 +50,7 @@
 import java.lang.reflect.Array;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.util.Arrays;
 import java.util.List;
 
 /**
@@ -1015,6 +1017,26 @@
             assertNull(resultSimpleFaces[i].getMouthPosition());
         }
 
+        /**
+         * Read/Write TonemapCurve
+         */
+        float[] red = new float[] {0.0f, 0.0f, 1.0f, 1.0f};
+        float[] green = new float[] {0.0f, 1.0f, 1.0f, 0.0f};
+        float[] blue = new float[] {
+                0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f,
+                0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f,
+                0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f,
+                0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f};
+        TonemapCurve tcIn = new TonemapCurve(red, green, blue);
+        mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn);
+        float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED);
+        float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN);
+        float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE);
+        assertTrue("Input and output tonemap curve should match", Arrays.equals(red, redOut));
+        assertTrue("Input and output tonemap curve should match", Arrays.equals(green, greenOut));
+        assertTrue("Input and output tonemap curve should match", Arrays.equals(blue, blueOut));
+        TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE);
+        assertTrue("Input and output tonemap curve should match", tcIn.equals(tcOut));
     }
 
     /**
@@ -1166,6 +1188,48 @@
         }
     }
 
+    private <T> void assertKeyValueEquals(T expected, CameraCharacteristics.Key<T> key) {
+        assertKeyValueEquals(expected, key.getNativeKey());
+    }
+
+    private <T> void assertKeyValueEquals(T expected, Key<T> key) {
+        T actual = mMetadata.get(key);
+
+        assertEquals("Expected value for key " + key + " to match", expected, actual);
+    }
+
+    @SmallTest
+    public void testOverrideMaxRegions() {
+        // All keys are null before doing any writes.
+        assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+        assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
+        assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+
+        mMetadata.set(CameraCharacteristics.CONTROL_MAX_REGIONS,
+                new int[] { /*AE*/1, /*AWB*/2, /*AF*/3 });
+
+        // All keys are the expected value after doing a write
+        assertKeyValueEquals(1, CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+        assertKeyValueEquals(2, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
+        assertKeyValueEquals(3, CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+    }
+
+    @SmallTest
+    public void testOverrideMaxNumOutputStreams() {
+        // All keys are null before doing any writes.
+        assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW);
+        assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC);
+        assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING);
+
+        mMetadata.set(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS,
+                new int[] { /*AE*/1, /*AWB*/2, /*AF*/3 });
+
+        // All keys are the expected value after doing a write
+        assertKeyValueEquals(1, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW);
+        assertKeyValueEquals(2, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC);
+        assertKeyValueEquals(3, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING);
+    }
+
     @SmallTest
     public void testCaptureResult() {
         mMetadata.set(CaptureRequest.CONTROL_AE_MODE,