Merge "camera3: Remove noiseModelCoefficients."
diff --git a/camera/docs/docs.html b/camera/docs/docs.html
index 5f95f25..a62f56d 100644
--- a/camera/docs/docs.html
+++ b/camera/docs/docs.html
@@ -268,16 +268,9 @@
           </ul>
         </li>
         <li>
-          <span class="toc_kind_header">static</span>
-          <ul class="toc_section">
-
-            <li><a href="#static_android.hotPixel.info.map">android.hotPixel.info.map</a></li>
-
-          </ul>
-        </li>
-        <li>
           <span class="toc_kind_header">dynamic</span>
           <ul class="toc_section">
+            <li><a href="#dynamic_android.hotPixel.map">android.hotPixel.map</a></li>
             <li><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a></li>
           </ul>
         </li>
@@ -427,6 +420,10 @@
             <li><a href="#static_android.request.maxNumInputStreams">android.request.maxNumInputStreams</a></li>
             <li><a href="#static_android.request.pipelineMaxDepth">android.request.pipelineMaxDepth</a></li>
             <li><a href="#static_android.request.partialResultCount">android.request.partialResultCount</a></li>
+            <li><a href="#static_android.request.availableCapabilities">android.request.availableCapabilities</a></li>
+            <li><a href="#static_android.request.availableRequestKeys">android.request.availableRequestKeys</a></li>
+            <li><a href="#static_android.request.availableResultKeys">android.request.availableResultKeys</a></li>
+            <li><a href="#static_android.request.availableCharacteristicsKeys">android.request.availableCharacteristicsKeys</a></li>
           </ul>
         </li>
         <li>
@@ -461,6 +458,10 @@
             <li><a href="#static_android.scaler.availableProcessedSizes">android.scaler.availableProcessedSizes</a></li>
             <li><a href="#static_android.scaler.availableRawMinDurations">android.scaler.availableRawMinDurations</a></li>
             <li><a href="#static_android.scaler.availableRawSizes">android.scaler.availableRawSizes</a></li>
+            <li><a href="#static_android.scaler.availableInputOutputFormatsMap">android.scaler.availableInputOutputFormatsMap</a></li>
+            <li><a href="#static_android.scaler.availableStreamConfigurations">android.scaler.availableStreamConfigurations</a></li>
+            <li><a href="#static_android.scaler.availableMinFrameDurations">android.scaler.availableMinFrameDurations</a></li>
+            <li><a href="#static_android.scaler.availableStallDurations">android.scaler.availableStallDurations</a></li>
           </ul>
         </li>
         <li>
@@ -499,17 +500,9 @@
 
             <li><a href="#static_android.sensor.baseGainFactor">android.sensor.baseGainFactor</a></li>
             <li><a href="#static_android.sensor.blackLevelPattern">android.sensor.blackLevelPattern</a></li>
-            <li><a href="#static_android.sensor.calibrationTransform1">android.sensor.calibrationTransform1</a></li>
-            <li><a href="#static_android.sensor.calibrationTransform2">android.sensor.calibrationTransform2</a></li>
-            <li><a href="#static_android.sensor.colorTransform1">android.sensor.colorTransform1</a></li>
-            <li><a href="#static_android.sensor.colorTransform2">android.sensor.colorTransform2</a></li>
-            <li><a href="#static_android.sensor.forwardMatrix1">android.sensor.forwardMatrix1</a></li>
-            <li><a href="#static_android.sensor.forwardMatrix2">android.sensor.forwardMatrix2</a></li>
             <li><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a></li>
             <li><a href="#static_android.sensor.orientation">android.sensor.orientation</a></li>
             <li><a href="#static_android.sensor.profileHueSatMapDimensions">android.sensor.profileHueSatMapDimensions</a></li>
-            <li><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a></li>
-            <li><a href="#static_android.sensor.referenceIlluminant2">android.sensor.referenceIlluminant2</a></li>
             <li><a href="#static_android.sensor.availableTestPatternModes">android.sensor.availableTestPatternModes</a></li>
           </ul>
         </li>
@@ -521,6 +514,10 @@
             <li><a href="#dynamic_android.sensor.sensitivity">android.sensor.sensitivity</a></li>
             <li><a href="#dynamic_android.sensor.timestamp">android.sensor.timestamp</a></li>
             <li><a href="#dynamic_android.sensor.temperature">android.sensor.temperature</a></li>
+            <li><a href="#dynamic_android.sensor.referenceIlluminant">android.sensor.referenceIlluminant</a></li>
+            <li><a href="#dynamic_android.sensor.calibrationTransform">android.sensor.calibrationTransform</a></li>
+            <li><a href="#dynamic_android.sensor.colorTransform">android.sensor.colorTransform</a></li>
+            <li><a href="#dynamic_android.sensor.forwardMatrix">android.sensor.forwardMatrix</a></li>
             <li><a href="#dynamic_android.sensor.neutralColorPoint">android.sensor.neutralColorPoint</a></li>
             <li><a href="#dynamic_android.sensor.profileHueSatMap">android.sensor.profileHueSatMap</a></li>
             <li><a href="#dynamic_android.sensor.profileToneCurve">android.sensor.profileToneCurve</a></li>
@@ -5889,31 +5886,36 @@
             <td class="entry_type">
                 <span class="entry_type_name entry_type_name_enum">byte</span>
 
-              <span class="entry_type_visibility"> [system]</span>
+              <span class="entry_type_visibility"> [public]</span>
 
                 <ul class="entry_type_enum">
                   <li>
                     <span class="entry_type_enum_name">OFF</span>
-                    <span class="entry_type_enum_notes"><p>No hot pixel correction can be
-applied</p></span>
+                    <span class="entry_type_enum_notes"><p>The frame rate must not be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>No hot pixel correction is applied.<wbr/></p></span>
                   </li>
                   <li>
                     <span class="entry_type_enum_name">FAST</span>
-                    <span class="entry_type_enum_notes"><p>Frame rate must not be reduced compared to raw
-Bayer output</p></span>
+                    <span class="entry_type_enum_notes"><p>The frame rate must not be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>Hot pixel correction is applied.<wbr/></p></span>
                   </li>
                   <li>
                     <span class="entry_type_enum_name">HIGH_QUALITY</span>
-                    <span class="entry_type_enum_notes"><p>Frame rate may be reduced by high
-quality</p></span>
+                    <span class="entry_type_enum_notes"><p>The frame rate may be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>A high-quality hot pixel correction is applied.<wbr/></p></span>
                   </li>
                 </ul>
 
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>Set operational mode for hot pixel
-correction</p>
+              <p>Set operational mode for hot pixel correction.<wbr/></p>
+<p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
+that do not accurately encode the incoming light (i.<wbr/>e.<wbr/> pixels that
+are stuck at an arbitrary value).<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -5938,79 +5940,6 @@
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
-
-      <thead class="entries_header">
-        <tr>
-          <th class="th_name">Property Name</th>
-          <th class="th_type">Type</th>
-          <th class="th_description">Description</th>
-          <th class="th_units">Units</th>
-          <th class="th_range">Range</th>
-          <th class="th_tags">Tags</th>
-        </tr>
-      </thead>
-
-      <tbody>
-
-        
-
-        
-
-        
-
-        
-                
-            
-
-                
-          <tr class="entry" id="static_android.hotPixel.info.map">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>hot<wbr/>Pixel.<wbr/>info.<wbr/>map
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">int32</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  2 x n
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">list of coordinates based on android.<wbr/>sensor.<wbr/>pixel<wbr/>Array<wbr/>Size</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Location of hot/<wbr/>defective pixels on
-sensor</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_ADV">ADV</a></li>
-              </ul>
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-        
-        
-
-        
-
-      <!-- end of kind -->
-      </tbody>
       <tr><td colspan="6" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
@@ -6035,6 +5964,68 @@
         
 
                 
+          <tr class="entry" id="dynamic_android.hotPixel.map">
+            <td class="entry_name" rowspan="3">
+              android.<wbr/>hot<wbr/>Pixel.<wbr/>map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2 x n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+                <div class="entry_type_notes">list of coordinates based on android.<wbr/>sensor.<wbr/>pixel<wbr/>Array<wbr/>Size</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of <code>(x,<wbr/> y)</code> coordinates of hot/<wbr/>defective pixels on the
+sensor,<wbr/> where <code>(x,<wbr/> y)</code> lies between <code>(0,<wbr/> 0)</code>,<wbr/> which is the top-left
+of the pixel array,<wbr/> and the width,<wbr/>height of the pixel array given in
+<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/>  This may include hot pixels
+that lie outside of the active array bounds given by
+android.<wbr/>sensor.<wbr/>active<wbr/>Array<wbr/>Size.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>n &lt;= number of pixels on the sensor.<wbr/>
+The <code>(x,<wbr/> y)</code> coordinates must be bounded by
+<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_ADV">ADV</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A hotpixel map contains the coordinates of pixels on the camera
+sensor that do report valid values (usually due to defects in
+the camera sensor).<wbr/> This includes pixels that are stuck at certain
+values,<wbr/> or have a response that does not accuractly encode the
+incoming light from the scene.<wbr/></p>
+<p>To avoid performance issues,<wbr/> there should be significantly fewer hot
+pixels than actual pixels on the camera sensor.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
           <tr class="entry" id="dynamic_android.hotPixel.mode">
             <td class="entry_name" rowspan="1">
               android.<wbr/>hot<wbr/>Pixel.<wbr/>mode
@@ -6042,31 +6033,36 @@
             <td class="entry_type">
                 <span class="entry_type_name entry_type_name_enum">byte</span>
 
-              <span class="entry_type_visibility"> [system]</span>
+              <span class="entry_type_visibility"> [public]</span>
 
                 <ul class="entry_type_enum">
                   <li>
                     <span class="entry_type_enum_name">OFF</span>
-                    <span class="entry_type_enum_notes"><p>No hot pixel correction can be
-applied</p></span>
+                    <span class="entry_type_enum_notes"><p>The frame rate must not be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>No hot pixel correction is applied.<wbr/></p></span>
                   </li>
                   <li>
                     <span class="entry_type_enum_name">FAST</span>
-                    <span class="entry_type_enum_notes"><p>Frame rate must not be reduced compared to raw
-Bayer output</p></span>
+                    <span class="entry_type_enum_notes"><p>The frame rate must not be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>Hot pixel correction is applied.<wbr/></p></span>
                   </li>
                   <li>
                     <span class="entry_type_enum_name">HIGH_QUALITY</span>
-                    <span class="entry_type_enum_notes"><p>Frame rate may be reduced by high
-quality</p></span>
+                    <span class="entry_type_enum_notes"><p>The frame rate may be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>A high-quality hot pixel correction is applied.<wbr/></p></span>
                   </li>
                 </ul>
 
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>Set operational mode for hot pixel
-correction</p>
+              <p>Set operational mode for hot pixel correction.<wbr/></p>
+<p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
+that do not accurately encode the incoming light (i.<wbr/>e.<wbr/> pixels that
+are stuck at an arbitrary value).<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -6479,10 +6475,10 @@
 <li>The sizes will be sorted by increasing pixel area (width x height).<wbr/>
 If several resolutions have the same area,<wbr/> they will be sorted by increasing width.<wbr/></li>
 <li>The aspect ratio of the largest thumbnail size will be same as the
-aspect ratio of largest size in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a>.<wbr/>
+aspect ratio of largest JPEG output size in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a>.<wbr/>
 The largest size is defined as the size that has the largest pixel area
 in a given size list.<wbr/></li>
-<li>Each size in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a> will have at least
+<li>Each output JPEG size in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> will have at least
 one corresponding size that has the same aspect ratio in availableThumbnailSizes,<wbr/>
 and vice versa.<wbr/></li>
 <li>All non (0,<wbr/> 0) sizes will have non-zero widths and heights.<wbr/></li>
@@ -9527,6 +9523,415 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="static_android.request.availableCapabilities">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Capabilities
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">BACKWARD_COMPATIBLE</span>
+                    <span class="entry_type_enum_notes"><p>The minimal set of capabilities that every camera
+device (regardless of <a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>)
+will support.<wbr/></p>
+<p>The full set of features supported by this capability makes
+the camera2 api backwards compatible with the camera1
+(android.<wbr/>hardware.<wbr/>Camera) API.<wbr/></p>
+<p>TODO: @hide this.<wbr/> Doesn't really mean anything except
+act as a catch-all for all the 'base' functionality.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">OPTIONAL</span>
+                    <span class="entry_type_enum_notes"><p>This is a catch-all capability to include all other
+tags or functionality not encapsulated by one of the other
+capabilities.<wbr/></p>
+<p>A typical example is all tags marked 'optional'.<wbr/></p>
+<p>TODO: @hide.<wbr/> We may not need this if we @hide all the optional
+tags not belonging to a capability.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MANUAL_SENSOR</span>
+                    <span class="entry_type_enum_notes"><p>The camera device can be manually controlled (3A algorithms such
+as auto exposure,<wbr/> and auto focus can be
+bypassed),<wbr/> this includes but is not limited to:</p>
+<ul>
+<li>Manual exposure control<ul>
+<li><a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a></li>
+<li><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></li>
+</ul>
+</li>
+<li>Manual sensitivity control<ul>
+<li><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></li>
+<li><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></li>
+<li><a href="#static_android.sensor.baseGainFactor">android.<wbr/>sensor.<wbr/>base<wbr/>Gain<wbr/>Factor</a></li>
+</ul>
+</li>
+<li>Manual lens control<ul>
+<li>android.<wbr/>lens.<wbr/>*</li>
+</ul>
+</li>
+<li>Manual flash control<ul>
+<li>android.<wbr/>flash.<wbr/>*</li>
+</ul>
+</li>
+<li>Manual black level locking<ul>
+<li><a href="#controls_android.blackLevel.lock">android.<wbr/>black<wbr/>Level.<wbr/>lock</a></li>
+</ul>
+</li>
+</ul>
+<p>If any of the above 3A algorithms are enabled,<wbr/> then the camera
+device will accurately report the values applied by 3A in the
+result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">GCAM</span>
+                    <span class="entry_type_enum_optional">optional</span>
+                    <span class="entry_type_enum_notes"><p>TODO: This should be @hide</p>
+<ul>
+<li>Manual tonemap control<ul>
+<li><a href="#controls_android.tonemap.curveBlue">android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue</a></li>
+<li><a href="#controls_android.tonemap.curveGreen">android.<wbr/>tonemap.<wbr/>curve<wbr/>Green</a></li>
+<li><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a></li>
+<li><a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a></li>
+<li><a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></li>
+</ul>
+</li>
+<li>Manual white balance control<ul>
+<li><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a></li>
+<li><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a></li>
+</ul>
+</li>
+<li>Lens shading map information<ul>
+<li><a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a></li>
+<li><a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a></li>
+</ul>
+</li>
+</ul>
+<p>If auto white balance is enabled,<wbr/> then the camera device
+will accurately report the values applied by AWB in the result.<wbr/></p>
+<p>The camera device will also support everything in MANUAL_<wbr/>SENSOR
+except manual lens control and manual flash control.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZSL</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports the Zero Shutter Lag use case.<wbr/></p>
+<ul>
+<li>At least one input stream can be used.<wbr/></li>
+<li>RAW_<wbr/>OPAQUE is supported as an output/<wbr/>input format</li>
+<li>Using RAW_<wbr/>OPAQUE does not cause a frame rate drop
+  relative to the sensor's maximum capture rate (at that
+  resolution).<wbr/></li>
+<li>RAW_<wbr/>OPAQUE will be reprocessable into both YUV_<wbr/>420_<wbr/>888
+  and JPEG formats.<wbr/></li>
+<li>The maximum available resolution for RAW_<wbr/>OPAQUE streams
+  (both input/<wbr/>output) will match the maximum available
+  resolution of JPEG streams.<wbr/></li>
+</ul></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DNG</span>
+                    <span class="entry_type_enum_optional">optional</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports outputting RAW buffers that can be
+saved offline into a DNG format.<wbr/> It can reprocess DNG
+files (produced from the same camera device) back into YUV.<wbr/></p>
+<ul>
+<li>At least one input stream can be used.<wbr/></li>
+<li>RAW16 is supported as output/<wbr/>input format.<wbr/></li>
+<li>RAW16 is reprocessable into both YUV_<wbr/>420_<wbr/>888 and JPEG
+  formats.<wbr/></li>
+<li>The maximum available resolution for RAW16 streams (both
+  input/<wbr/>output) will match the value in
+  <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></li>
+<li>All DNG-related optional metadata entries are provided
+  by the camera device.<wbr/></li>
+</ul></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of capabilities that the camera device
+advertises as fully supporting.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A capability is a contract that the camera device makes in order
+to be able to satisfy one or more use cases.<wbr/></p>
+<p>Listing a capability guarantees that the whole set of features
+required to support a common use will all be available.<wbr/></p>
+<p>Using a subset of the functionality provided by an unsupported
+capability may be possible on a specific camera device implementation;
+to do this query each of <a href="#static_android.request.availableRequestKeys">android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys</a>,<wbr/>
+<a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a>,<wbr/>
+<a href="#static_android.request.availableCharacteristicsKeys">android.<wbr/>request.<wbr/>available<wbr/>Characteristics<wbr/>Keys</a>.<wbr/></p>
+<p>XX: Maybe these should go into android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level
+as a table instead?</p>
+<p>The following capabilities are guaranteed to be available on
+<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> <code>==</code> FULL devices:</p>
+<ul>
+<li>MANUAL_<wbr/>SENSOR</li>
+<li>ZSL</li>
+</ul>
+<p>Other capabilities may be available on either FULL or LIMITED
+devices,<wbr/> but the app.<wbr/> should query this field to be sure.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Additional constraint details per-capability will be available
+in the Compatibility Test Suite.<wbr/></p>
+<p>BACKWARD_<wbr/>COMPATIBLE capability requirements are not explicitly listed.<wbr/>
+Instead refer to "BC" tags and the camera CTS tests in the
+android.<wbr/>hardware.<wbr/>cts package.<wbr/></p>
+<p>Listed controls that can be either request or result (e.<wbr/>g.<wbr/>
+<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) must be available both in the
+request and the result in order to be considered to be
+capability-compliant.<wbr/></p>
+<p>For example,<wbr/> if the HAL claims to support MANUAL control,<wbr/>
+then exposure time must be configurable via the request <em>and</em>
+the actual exposure applied must be available via
+the result.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableRequestKeys">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all keys that the camera device has available
+to use with CaptureRequest.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Attempting to set a key into a CaptureRequest that is not
+listed here will result in an invalid request and will be rejected
+by the camera device.<wbr/></p>
+<p>This field can be used to query the feature set of a camera device
+at a more granular level than capabilities.<wbr/> This is especially
+important for optional keys that are not listed under any capability
+in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+<p>TODO: This should be used by #getAvailableCaptureRequestKeys.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
+extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not consume any request tags that are not listed either
+here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via CameraCharacteristics#getAvailableCaptureRequestKeys.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableResultKeys">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all keys that the camera device has available
+to use with CaptureResult.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Attempting to get a key from a CaptureResult that is not
+listed here will always return a <code>null</code> value.<wbr/> Getting a key from
+a CaptureResult that is listed here must never return a <code>null</code>
+value.<wbr/></p>
+<p>The following keys may return <code>null</code> unless they are enabled:</p>
+<ul>
+<li><a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> (non-null iff <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON)</li>
+</ul>
+<p>(Those sometimes-null keys should nevertheless be listed here
+if they are available.<wbr/>)</p>
+<p>This field can be used to query the feature set of a camera device
+at a more granular level than capabilities.<wbr/> This is especially
+important for optional keys that are not listed under any capability
+in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+<p>TODO: This should be used by #getAvailableCaptureResultKeys.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Tags listed here must always have an entry in the result metadata,<wbr/>
+even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
+matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
+<p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
+extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not produce any result tags that are not listed either
+here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via CameraCharacteristics#getAvailableCaptureResultKeys.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableCharacteristicsKeys">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Characteristics<wbr/>Keys
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all keys that the camera device has available
+to use with CameraCharacteristics.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry follows the same rules as
+<a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a> (except that it applies for
+CameraCharacteristics instead of CaptureResult).<wbr/> See above for more
+details.<wbr/></p>
+<p>TODO: This should be used by CameraCharacteristics#getKeys.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Tags listed here must always have an entry in the static info metadata,<wbr/>
+even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
+matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
+<p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
+extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not have any tags in its static info that are not listed
+either here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via CameraCharacteristics#getKeys.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -10020,7 +10425,7 @@
 
             <td class="entry_description">
               <p>The list of image formats that are supported by this
-camera device.<wbr/></p>
+camera device for output streams.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -10098,6 +10503,7 @@
             </td>
 
             <td class="entry_range">
+              <p><strong>Deprecated</strong>.<wbr/> Do not use.<wbr/> TODO: Remove property.<wbr/></p>
             </td>
 
             <td class="entry_tags">
@@ -10150,6 +10556,7 @@
             </td>
 
             <td class="entry_range">
+              <p><strong>Deprecated</strong>.<wbr/> Do not use.<wbr/> TODO: Remove property.<wbr/></p>
             </td>
 
             <td class="entry_tags">
@@ -10250,6 +10657,7 @@
             </td>
 
             <td class="entry_range">
+              <p><strong>Deprecated</strong>.<wbr/> Do not use.<wbr/> TODO: Remove property.<wbr/></p>
             </td>
 
             <td class="entry_tags">
@@ -10304,6 +10712,7 @@
             </td>
 
             <td class="entry_range">
+              <p><strong>Deprecated</strong>.<wbr/> Do not use.<wbr/> TODO: Remove property.<wbr/></p>
             </td>
 
             <td class="entry_tags">
@@ -10381,6 +10790,7 @@
             </td>
 
             <td class="entry_range">
+              <p><strong>Deprecated</strong>.<wbr/> Do not use.<wbr/> TODO: Remove property.<wbr/></p>
             </td>
 
             <td class="entry_tags">
@@ -10434,7 +10844,8 @@
             </td>
 
             <td class="entry_range">
-              <p>Must include: - sensor maximum resolution</p>
+              <p><strong>Deprecated</strong>.<wbr/> Do not use.<wbr/> TODO: Remove property.<wbr/>
+Must include: - sensor maximum resolution.<wbr/></p>
             </td>
 
             <td class="entry_tags">
@@ -10446,6 +10857,476 @@
           <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
            <!-- end of entry -->
         
+                
+          <tr class="entry" id="static_android.scaler.availableInputOutputFormatsMap">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as imageFormat]</span>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The mapping of image formats that are supported by this
+camera device for input streams,<wbr/> to their corresponding output formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>See <a href="#static_android.scaler.availableFormats">android.<wbr/>scaler.<wbr/>available<wbr/>Formats</a> for enum definitions.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All camera devices with at least 1
+android.<wbr/>request.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams will have at least one
+available input format.<wbr/></p>
+<p>The camera device will support the following map of formats,<wbr/>
+if its dependent capability is supported:</p>
+<table>
+<thead>
+<tr>
+<th align="left">Input Format</th>
+<th align="left">Output Format</th>
+<th align="left">Capability</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="left">RAW_<wbr/>OPAQUE</td>
+<td align="left">JPEG</td>
+<td align="left">ZSL</td>
+</tr>
+<tr>
+<td align="left">RAW_<wbr/>OPAQUE</td>
+<td align="left">YUV_<wbr/>420_<wbr/>888</td>
+<td align="left">ZSL</td>
+</tr>
+<tr>
+<td align="left">RAW_<wbr/>OPAQUE</td>
+<td align="left">RAW16</td>
+<td align="left">DNG</td>
+</tr>
+<tr>
+<td align="left">RAW16</td>
+<td align="left">YUV_<wbr/>420_<wbr/>888</td>
+<td align="left">DNG</td>
+</tr>
+<tr>
+<td align="left">RAW16</td>
+<td align="left">JPEG</td>
+<td align="left">DNG</td>
+</tr>
+</tbody>
+</table>
+<p>For ZSL-capable camera devices,<wbr/> using the RAW_<wbr/>OPAQUE format
+as either input or output will never hurt maximum frame rate (i.<wbr/>e.<wbr/>
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> will not have RAW_<wbr/>OPAQUE).<wbr/></p>
+<p>Attempting to configure an input stream with output streams not
+listed as available in this map is not valid.<wbr/></p>
+<p>TODO: Add java type mapping for this property.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value is encoded as a variable-size array-of-arrays.<wbr/>
+The inner array always contains <code>[format,<wbr/> length,<wbr/> ...<wbr/>]</code> where
+<code>...<wbr/></code> has <code>length</code> elements.<wbr/> An inner array is followed by another
+inner array if the total metadata entry size hasn't yet been exceeded.<wbr/></p>
+<p>A code sample to read/<wbr/>write this encoding (with a device that
+supports reprocessing RAW_<wbr/>OPAQUE to RAW16,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> and JPEG,<wbr/>
+and reprocessing RAW16 to YUV_<wbr/>420_<wbr/>888 and JPEG):</p>
+<pre><code>//<wbr/> reading
+int32_<wbr/>t* contents = &amp;entry.<wbr/>i32[0];
+for (size_<wbr/>t i = 0; i &lt; entry.<wbr/>count; ) {
+    int32_<wbr/>t format = contents[i++];
+    int32_<wbr/>t length = contents[i++];
+    int32_<wbr/>t output_<wbr/>formats[length];
+    memcpy(&amp;output_<wbr/>formats[0],<wbr/> &amp;contents[i],<wbr/>
+           length * sizeof(int32_<wbr/>t));
+    i += length;
+}
+
+//<wbr/> writing (static example,<wbr/> DNG+ZSL)
+int32_<wbr/>t[] contents = {
+  RAW_<wbr/>OPAQUE,<wbr/> 3,<wbr/> RAW16,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> BLOB,<wbr/>
+  RAW16,<wbr/> 2,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> BLOB,<wbr/>
+};
+update_<wbr/>camera_<wbr/>metadata_<wbr/>entry(metadata,<wbr/> index,<wbr/> &amp;contents[0],<wbr/>
+      sizeof(contents)/<wbr/>sizeof(contents[0]),<wbr/> &amp;updated_<wbr/>entry);
+</code></pre>
+<p>If the HAL claims to support any of the capabilities listed in the
+above details,<wbr/> then it must also support all the input-output
+combinations listed for that capability.<wbr/> It can optionally support
+additional formats if it so chooses.<wbr/></p>
+<p>Refer to <a href="#static_android.scaler.availableFormats">android.<wbr/>scaler.<wbr/>available<wbr/>Formats</a> for the enum values
+which correspond to HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>* in
+system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableStreamConfigurations">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 4
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OUTPUT</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INPUT</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The available stream configurations that this
+camera device supports
+(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The configurations are listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code>
+tuples.<wbr/></p>
+<p>All camera devices will support sensor maximum resolution (defined by
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) for the JPEG format.<wbr/></p>
+<p>For a given use case,<wbr/> the actual maximum supported resolution
+may be lower than what is listed here,<wbr/> depending on the destination
+Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
+the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
+smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
+can provide.<wbr/></p>
+<p>Please reference the documentation for the image data destination to
+check if it limits the maximum size for image data.<wbr/></p>
+<p>Not all output formats may be supported in a configuration with
+an input stream of a particular format.<wbr/> For more details,<wbr/> see
+<a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a>.<wbr/></p>
+<p>The following table describes the minimum required output stream
+configurations based on the hardware level
+(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>):</p>
+<table>
+<thead>
+<tr>
+<th align="center">Format</th>
+<th align="center">Size</th>
+<th align="center">Hardware Level</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">JPEG</td>
+<td align="center"><a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></td>
+<td align="center">Any</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">1920x1080 (1080p)</td>
+<td align="center">Any</td>
+<td align="center">if 1080p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">1280x720 (720)</td>
+<td align="center">Any</td>
+<td align="center">if 720p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">640x480 (480p)</td>
+<td align="center">Any</td>
+<td align="center">if 480p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">320x240 (240p)</td>
+<td align="center">Any</td>
+<td align="center">if 240p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">all output sizes available for JPEG</td>
+<td align="center">FULL</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center">YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">all output sizes available for JPEG,<wbr/> up to the maximum video size</td>
+<td align="center">LIMITED</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center">IMPLEMENTATION_<wbr/>DEFINED</td>
+<td align="center">same as YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">Any</td>
+<td align="center"></td>
+</tr>
+</tbody>
+</table>
+<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> for additional
+mandatory stream configurations on a per-capability basis.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>It is recommended (but not mandatory) to also include half/<wbr/>quarter
+of sensor maximum resolution for JPEG formats (regardless of hardware
+level).<wbr/></p>
+<p>(The following is a rewording of the above required table):</p>
+<p>The HAL must include sensor maximum resolution (defined by
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>).<wbr/></p>
+<p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
+the HAL must include all YUV_<wbr/>420_<wbr/>888 sizes that have JPEG sizes listed
+here as output streams.<wbr/></p>
+<p>It must also include each below resolution if it is smaller than or
+equal to the sensor maximum resolution (for both YUV_<wbr/>420_<wbr/>888 and JPEG
+formats),<wbr/> as output streams:</p>
+<ul>
+<li>240p (320 x 240)</li>
+<li>480p (640 x 480)</li>
+<li>720p (1280 x 720)</li>
+<li>1080p (1920 x 1080)</li>
+</ul>
+<p>For LIMITED capability devices
+(<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
+the HAL only has to list up to the maximum video size
+supported by the device.<wbr/></p>
+<p>Regardless of hardware level,<wbr/> every output resolution available for
+YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.<wbr/></p>
+<p>This supercedes the following fields,<wbr/> which are now deprecated:</p>
+<ul>
+<li>availableFormats</li>
+<li>available[Processed,<wbr/>Raw,<wbr/>Jpeg]Sizes</li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableMinFrameDurations">
+            <td class="entry_name" rowspan="3">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the minimum frame duration for each
+format/<wbr/>size combination.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This should correspond to the frame duration when only that
+stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
+set to either OFF or FAST.<wbr/></p>
+<p>When multiple streams are used in a request,<wbr/> the minimum frame
+duration will be max(individual stream min durations).<wbr/></p>
+<p>The minimum frame duration of a stream (of a particular format,<wbr/> size)
+is the same regardless of whether the stream is input or output.<wbr/></p>
+<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
+calculating the max frame rate.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableStallDurations">
+            <td class="entry_name" rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the maximum stall duration for each
+format/<wbr/>size combination.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A stall duration is how much extra time would get added
+to the normal minimum frame duration for a repeating request
+that has streams with non-zero stall.<wbr/></p>
+<p>For example,<wbr/> consider JPEG captures which have the following
+characteristics:</p>
+<ul>
+<li>JPEG streams act like processed YUV streams in requests for which
+they are not included; in requests in which they are directly
+referenced,<wbr/> they act as JPEG streams.<wbr/> This is because supporting a
+JPEG stream requires the underlying YUV data to always be ready for
+use by a JPEG encoder,<wbr/> but the encoder will only be used (and impact
+frame duration) on requests that actually reference a JPEG stream.<wbr/></li>
+<li>The JPEG processor can run concurrently to the rest of the camera
+pipeline,<wbr/> but cannot process more than 1 capture at a time.<wbr/></li>
+</ul>
+<p>In other words,<wbr/> using a repeating YUV request would result
+in a steady frame rate (let's say it's 30 FPS).<wbr/> If a single
+JPEG request is submitted periodically,<wbr/> the frame rate will stay
+at 30 FPS (as long as we wait for the previous JPEG to return each
+time).<wbr/> If we try to submit a repeating YUV + JPEG request,<wbr/> then
+the frame rate will drop from 30 FPS.<wbr/></p>
+<p>In general,<wbr/> submitting a new request with a non-0 stall time
+stream will <em>not</em> cause a frame rate drop unless there are still
+outstanding buffers for that stream from previous requests.<wbr/></p>
+<p>Submitting a repeating request with streams (call this <code>S</code>)
+is the same as setting the minimum frame duration from
+the normal minimum frame duration corresponding to <code>S</code>,<wbr/> added with
+the maximum stall duration for <code>S</code>.<wbr/></p>
+<p>If interleaving requests with and without a stall duration,<wbr/>
+a request will stall by the maximum of the remaining times
+for each can-stall stream with outstanding buffers.<wbr/></p>
+<p>This means that a stalling request will not have an exposure start
+until the stall has completed.<wbr/></p>
+<p>This should correspond to the stall duration when only that stream is
+active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode) set to FAST
+or OFF.<wbr/> Setting any of the processing modes to HIGH_<wbr/>QUALITY
+effectively results in an indeterminate stall duration for all
+streams in a request (the regular stall calculation rules are
+ignored).<wbr/></p>
+<p>The following formats may always have a stall duration:</p>
+<ul>
+<li>JPEG</li>
+<li>RAW16</li>
+</ul>
+<p>The following formats will never have a stall duration:</p>
+<ul>
+<li>YUV_<wbr/>420_<wbr/>888</li>
+<li>IMPLEMENTATION_<wbr/>DEFINED</li>
+</ul>
+<p>All other formats may or may not have an allowed stall duration on
+a per-capability basis; refer to android.<wbr/>request.<wbr/>available<wbr/>Capabilities
+for more details.<wbr/></p>
+<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> for more information about
+calculating the max frame rate (absent stalls).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If possible,<wbr/> it is recommended that all non-JPEG formats
+(such as RAW16) should not have a stall duration.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
         
 
       <!-- end of kind -->
@@ -10654,7 +11535,7 @@
 
             <td class="entry_range">
               <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
-android.<wbr/>scaler.<wbr/>available*Min<wbr/>Durations.<wbr/> The duration
+<a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a>.<wbr/> The duration
 is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
             </td>
 
@@ -10701,59 +11582,42 @@
 largest requested stream resolution.<wbr/></li>
 <li>Using more than one output stream in a request does not affect the
 frame duration.<wbr/></li>
-<li>JPEG streams act like processed YUV streams in requests for which
-they are not included; in requests in which they are directly
-referenced,<wbr/> they act as JPEG streams.<wbr/> This is because supporting a
-JPEG stream requires the underlying YUV data to always be ready for
-use by a JPEG encoder,<wbr/> but the encoder will only be used (and impact
-frame duration) on requests that actually reference a JPEG stream.<wbr/></li>
-<li>The JPEG processor can run concurrently to the rest of the camera
-pipeline,<wbr/> but cannot process more than 1 capture at a time.<wbr/></li>
+<li>Certain format-streams may need to do additional background processing
+before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
+can run concurrently to the rest of the camera pipeline,<wbr/> but
+cannot process more than 1 capture at a time.<wbr/></li>
 </ul>
 <p>The necessary information for the application,<wbr/> given the model above,<wbr/>
-is provided via the android.<wbr/>scaler.<wbr/>available*Min<wbr/>Durations fields.<wbr/>
+is provided via the <a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a> field.<wbr/>
 These are used to determine the maximum frame rate /<wbr/> minimum frame
 duration that is possible for a given stream configuration.<wbr/></p>
 <p>Specifically,<wbr/> the application can use the following rules to
-determine the minimum frame duration it can request from the HAL
+determine the minimum frame duration it can request from the camera
 device:</p>
 <ol>
-<li>Given the application's currently configured set of output
-streams,<wbr/> <code>S</code>,<wbr/> divide them into three sets: streams in a JPEG format
-<code>SJ</code>,<wbr/> streams in a raw sensor format <code>SR</code>,<wbr/> and the rest ('processed')
-<code>SP</code>.<wbr/></li>
-<li>For each subset of streams,<wbr/> find the largest resolution (by pixel
-count) in the subset.<wbr/> This gives (at most) three resolutions <code>RJ</code>,<wbr/>
-<code>RR</code>,<wbr/> and <code>RP</code>.<wbr/></li>
-<li>If <code>RJ</code> is greater than <code>RP</code>,<wbr/> set <code>RP</code> equal to <code>RJ</code>.<wbr/> If there is
-no exact match for <code>RP == RJ</code> (in particular there isn't an available
-processed resolution at the same size as <code>RJ</code>),<wbr/> then set <code>RP</code> equal
-to the smallest processed resolution that is larger than <code>RJ</code>.<wbr/> If
-there are no processed resolutions larger than <code>RJ</code>,<wbr/> then set <code>RJ</code> to
-the processed resolution closest to <code>RJ</code>.<wbr/></li>
-<li>If <code>RP</code> is greater than <code>RR</code>,<wbr/> set <code>RR</code> equal to <code>RP</code>.<wbr/> If there is
-no exact match for <code>RR == RP</code> (in particular there isn't an available
-raw resolution at the same size as <code>RP</code>),<wbr/> then set <code>RR</code> equal to
-or to the smallest raw resolution that is larger than <code>RP</code>.<wbr/> If
-there are no raw resolutions larger than <code>RP</code>,<wbr/> then set <code>RR</code> to
-the raw resolution closest to <code>RP</code>.<wbr/></li>
-<li>Look up the matching minimum frame durations in the property lists
-<a href="#static_android.scaler.availableJpegMinDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Min<wbr/>Durations</a>,<wbr/>
-<a href="#static_android.scaler.availableRawMinDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Min<wbr/>Durations</a>,<wbr/> and
-<a href="#static_android.scaler.availableProcessedMinDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Min<wbr/>Durations</a>.<wbr/>  This gives three
-minimum frame durations <code>FJ</code>,<wbr/> <code>FR</code>,<wbr/> and <code>FP</code>.<wbr/></li>
-<li>If a stream of requests do not use a JPEG stream,<wbr/> then the minimum
-supported frame duration for each request is <code>max(FR,<wbr/> FP)</code>.<wbr/></li>
-<li>If a stream of requests all use the JPEG stream,<wbr/> then the minimum
-supported frame duration for each request is <code>max(FR,<wbr/> FP,<wbr/> FJ)</code>.<wbr/></li>
-<li>If a mix of JPEG-using and non-JPEG-using requests is submitted by
-the application,<wbr/> then the HAL will have to delay JPEG-using requests
-whenever the JPEG encoder is still busy processing an older capture.<wbr/>
-This will happen whenever a JPEG-using request starts capture less
-than <code>FJ</code> <em>ns</em> after a previous JPEG-using request.<wbr/> The minimum
-supported frame duration will vary between the values calculated in
-#6 and #7.<wbr/></li>
+<li>Let the set of currently configured input/<wbr/>output streams
+be called <code>S</code>.<wbr/></li>
+<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by
+looking it up in <a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a> (with
+its respective size/<wbr/>format).<wbr/> Let this set of frame durations be called
+<code>F</code>.<wbr/></li>
+<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
+for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
+used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
 </ol>
+<p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>),<wbr/> then the frame duration in
+<code>F</code> determines the steady state frame rate that the application will
+get if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind
+of request be called <code>Rsimple</code>.<wbr/></p>
+<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+by a single capture of a new request <code>Rstall</code> (which has at least
+one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+same minimum frame duration this will not cause a frame rate loss
+if all buffers from the previous <code>Rstall</code> have already been
+delivered.<wbr/></p>
+<p>For more details about stalling,<wbr/> see
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
             </td>
           </tr>
 
@@ -11316,6 +12180,13 @@
 <p>android.<wbr/>sensor.<wbr/>max<wbr/>Frame<wbr/>Duration must be greater or equal to the
 android.<wbr/>sensor.<wbr/>exposure<wbr/>Time<wbr/>Range max value (since exposure time
 overrides frame duration).<wbr/></p>
+<p>Available minimum frame durations for JPEG must be no greater
+than that of the YUV_<wbr/>420_<wbr/>888/<wbr/>IMPLEMENTATION_<wbr/>DEFINED
+minimum frame durations (for that respective size).<wbr/></p>
+<p>Since JPEG processing is considered offline and can take longer than
+a single uncompressed capture,<wbr/> refer to
+android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations
+for details about encoding this scenario.<wbr/></p>
             </td>
           </tr>
 
@@ -11384,7 +12255,7 @@
                 <span class="entry_type_array">
                   2
                 </span>
-              <span class="entry_type_visibility"> [system as size]</span>
+              <span class="entry_type_visibility"> [public as size]</span>
 
 
             </td> <!-- entry_type -->
@@ -11413,8 +12284,9 @@
           </tr>
           <tr class="entry_cont">
             <td class="entry_details" colspan="5">
-              <p>The maximum output resolution for raw format in
-<a href="#static_android.scaler.availableRawSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Sizes</a> will be equal to this size.<wbr/></p>
+              <p>Maximum output resolution for raw format must
+match this in
+android.<wbr/>scaler.<wbr/>info.<wbr/>available<wbr/>Sizes<wbr/>Per<wbr/>Format.<wbr/></p>
             </td>
           </tr>
 
@@ -11562,264 +12434,6 @@
            <!-- end of entry -->
         
                 
-          <tr class="entry" id="static_android.sensor.calibrationTransform1">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">rational</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  9
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Per-device calibration on top of color space
-transform 1</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-              </ul>
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
-          <tr class="entry" id="static_android.sensor.calibrationTransform2">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">rational</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  9
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Per-device calibration on top of color space
-transform 2</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-              </ul>
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
-          <tr class="entry" id="static_android.sensor.colorTransform1">
-            <td class="entry_name" rowspan="3">
-              android.<wbr/>sensor.<wbr/>color<wbr/>Transform1
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">rational</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  9
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Linear mapping from XYZ (D50) color space to
-reference linear sensor color,<wbr/> for first reference
-illuminant</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-              </ul>
-            </td>
-
-          </tr>
-          <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
-          </tr>
-          <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>Use as follows XYZ = inv(transform) * clip( (raw -
-black level(raw) ) /<wbr/> ( white level - max black level) ).<wbr/>
-At least in the simple case</p>
-            </td>
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
-          <tr class="entry" id="static_android.sensor.colorTransform2">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>sensor.<wbr/>color<wbr/>Transform2
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">rational</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  9
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Linear mapping from XYZ (D50) color space to
-reference linear sensor color,<wbr/> for second reference
-illuminant</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-              </ul>
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
-          <tr class="entry" id="static_android.sensor.forwardMatrix1">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">rational</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  9
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Used by DNG for better WB
-adaptation</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-              </ul>
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
-          <tr class="entry" id="static_android.sensor.forwardMatrix2">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">rational</span>
-                <span class="entry_type_container">x</span>
-
-                <span class="entry_type_array">
-                  9
-                </span>
-              <span class="entry_type_visibility"> [system]</span>
-                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Used by DNG for better WB
-adaptation</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-              </ul>
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
           <tr class="entry" id="static_android.sensor.maxAnalogSensitivity">
             <td class="entry_name" rowspan="3">
               android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity
@@ -11965,169 +12579,6 @@
            <!-- end of entry -->
         
                 
-          <tr class="entry" id="static_android.sensor.referenceIlluminant1">
-            <td class="entry_name" rowspan="3">
-              android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name entry_type_name_enum">byte</span>
-
-              <span class="entry_type_visibility"> [system]</span>
-
-                <ul class="entry_type_enum">
-                  <li>
-                    <span class="entry_type_enum_name">DAYLIGHT</span>
-                    <span class="entry_type_enum_value">1</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">FLUORESCENT</span>
-                    <span class="entry_type_enum_value">2</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">TUNGSTEN</span>
-                    <span class="entry_type_enum_value">3</span>
-                    <span class="entry_type_enum_notes"><p>Incandescent light</p></span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">FLASH</span>
-                    <span class="entry_type_enum_value">4</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">FINE_WEATHER</span>
-                    <span class="entry_type_enum_value">9</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">CLOUDY_WEATHER</span>
-                    <span class="entry_type_enum_value">10</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">SHADE</span>
-                    <span class="entry_type_enum_value">11</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">DAYLIGHT_FLUORESCENT</span>
-                    <span class="entry_type_enum_value">12</span>
-                    <span class="entry_type_enum_notes"><p>D 5700 - 7100K</p></span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">DAY_WHITE_FLUORESCENT</span>
-                    <span class="entry_type_enum_value">13</span>
-                    <span class="entry_type_enum_notes"><p>N 4600 - 5400K</p></span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">COOL_WHITE_FLUORESCENT</span>
-                    <span class="entry_type_enum_value">14</span>
-                    <span class="entry_type_enum_notes"><p>W 3900 - 4500K</p></span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">WHITE_FLUORESCENT</span>
-                    <span class="entry_type_enum_value">15</span>
-                    <span class="entry_type_enum_notes"><p>WW 3200 - 3700K</p></span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">STANDARD_A</span>
-                    <span class="entry_type_enum_value">17</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">STANDARD_B</span>
-                    <span class="entry_type_enum_value">18</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">STANDARD_C</span>
-                    <span class="entry_type_enum_value">19</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">D55</span>
-                    <span class="entry_type_enum_value">20</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">D65</span>
-                    <span class="entry_type_enum_value">21</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">D75</span>
-                    <span class="entry_type_enum_value">22</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">D50</span>
-                    <span class="entry_type_enum_value">23</span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">ISO_STUDIO_TUNGSTEN</span>
-                    <span class="entry_type_enum_value">24</span>
-                  </li>
-                </ul>
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Light source used to define transform
-1</p>
-            </td>
-
-            <td class="entry_units">
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_DNG">DNG</a></li>
-                  <li><a href="#tag_EXIF">EXIF</a></li>
-              </ul>
-            </td>
-
-          </tr>
-          <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
-          </tr>
-          <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>[EXIF LightSource tag] Must all these be
-supported? Need CCT for each!</p>
-            </td>
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
-          <tr class="entry" id="static_android.sensor.referenceIlluminant2">
-            <td class="entry_name" rowspan="1">
-              android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2
-            </td>
-            <td class="entry_type">
-                <span class="entry_type_name">byte</span>
-
-              <span class="entry_type_visibility"> [system]</span>
-
-
-            </td> <!-- entry_type -->
-
-            <td class="entry_description">
-              <p>Light source used to define transform
-2</p>
-            </td>
-
-            <td class="entry_units">
-              Same as illuminant 1
-            </td>
-
-            <td class="entry_range">
-            </td>
-
-            <td class="entry_tags">
-            </td>
-
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
-           <!-- end of entry -->
-        
-                
           <tr class="entry" id="static_android.sensor.availableTestPatternModes">
             <td class="entry_name" rowspan="1">
               android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes
@@ -12260,7 +12711,7 @@
 
             <td class="entry_range">
               <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
-android.<wbr/>scaler.<wbr/>available*Min<wbr/>Durations.<wbr/> The duration
+<a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a>.<wbr/> The duration
 is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
             </td>
 
@@ -12307,59 +12758,42 @@
 largest requested stream resolution.<wbr/></li>
 <li>Using more than one output stream in a request does not affect the
 frame duration.<wbr/></li>
-<li>JPEG streams act like processed YUV streams in requests for which
-they are not included; in requests in which they are directly
-referenced,<wbr/> they act as JPEG streams.<wbr/> This is because supporting a
-JPEG stream requires the underlying YUV data to always be ready for
-use by a JPEG encoder,<wbr/> but the encoder will only be used (and impact
-frame duration) on requests that actually reference a JPEG stream.<wbr/></li>
-<li>The JPEG processor can run concurrently to the rest of the camera
-pipeline,<wbr/> but cannot process more than 1 capture at a time.<wbr/></li>
+<li>Certain format-streams may need to do additional background processing
+before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
+can run concurrently to the rest of the camera pipeline,<wbr/> but
+cannot process more than 1 capture at a time.<wbr/></li>
 </ul>
 <p>The necessary information for the application,<wbr/> given the model above,<wbr/>
-is provided via the android.<wbr/>scaler.<wbr/>available*Min<wbr/>Durations fields.<wbr/>
+is provided via the <a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a> field.<wbr/>
 These are used to determine the maximum frame rate /<wbr/> minimum frame
 duration that is possible for a given stream configuration.<wbr/></p>
 <p>Specifically,<wbr/> the application can use the following rules to
-determine the minimum frame duration it can request from the HAL
+determine the minimum frame duration it can request from the camera
 device:</p>
 <ol>
-<li>Given the application's currently configured set of output
-streams,<wbr/> <code>S</code>,<wbr/> divide them into three sets: streams in a JPEG format
-<code>SJ</code>,<wbr/> streams in a raw sensor format <code>SR</code>,<wbr/> and the rest ('processed')
-<code>SP</code>.<wbr/></li>
-<li>For each subset of streams,<wbr/> find the largest resolution (by pixel
-count) in the subset.<wbr/> This gives (at most) three resolutions <code>RJ</code>,<wbr/>
-<code>RR</code>,<wbr/> and <code>RP</code>.<wbr/></li>
-<li>If <code>RJ</code> is greater than <code>RP</code>,<wbr/> set <code>RP</code> equal to <code>RJ</code>.<wbr/> If there is
-no exact match for <code>RP == RJ</code> (in particular there isn't an available
-processed resolution at the same size as <code>RJ</code>),<wbr/> then set <code>RP</code> equal
-to the smallest processed resolution that is larger than <code>RJ</code>.<wbr/> If
-there are no processed resolutions larger than <code>RJ</code>,<wbr/> then set <code>RJ</code> to
-the processed resolution closest to <code>RJ</code>.<wbr/></li>
-<li>If <code>RP</code> is greater than <code>RR</code>,<wbr/> set <code>RR</code> equal to <code>RP</code>.<wbr/> If there is
-no exact match for <code>RR == RP</code> (in particular there isn't an available
-raw resolution at the same size as <code>RP</code>),<wbr/> then set <code>RR</code> equal to
-or to the smallest raw resolution that is larger than <code>RP</code>.<wbr/> If
-there are no raw resolutions larger than <code>RP</code>,<wbr/> then set <code>RR</code> to
-the raw resolution closest to <code>RP</code>.<wbr/></li>
-<li>Look up the matching minimum frame durations in the property lists
-<a href="#static_android.scaler.availableJpegMinDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Min<wbr/>Durations</a>,<wbr/>
-<a href="#static_android.scaler.availableRawMinDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Min<wbr/>Durations</a>,<wbr/> and
-<a href="#static_android.scaler.availableProcessedMinDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Min<wbr/>Durations</a>.<wbr/>  This gives three
-minimum frame durations <code>FJ</code>,<wbr/> <code>FR</code>,<wbr/> and <code>FP</code>.<wbr/></li>
-<li>If a stream of requests do not use a JPEG stream,<wbr/> then the minimum
-supported frame duration for each request is <code>max(FR,<wbr/> FP)</code>.<wbr/></li>
-<li>If a stream of requests all use the JPEG stream,<wbr/> then the minimum
-supported frame duration for each request is <code>max(FR,<wbr/> FP,<wbr/> FJ)</code>.<wbr/></li>
-<li>If a mix of JPEG-using and non-JPEG-using requests is submitted by
-the application,<wbr/> then the HAL will have to delay JPEG-using requests
-whenever the JPEG encoder is still busy processing an older capture.<wbr/>
-This will happen whenever a JPEG-using request starts capture less
-than <code>FJ</code> <em>ns</em> after a previous JPEG-using request.<wbr/> The minimum
-supported frame duration will vary between the values calculated in
-#6 and #7.<wbr/></li>
+<li>Let the set of currently configured input/<wbr/>output streams
+be called <code>S</code>.<wbr/></li>
+<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by
+looking it up in <a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a> (with
+its respective size/<wbr/>format).<wbr/> Let this set of frame durations be called
+<code>F</code>.<wbr/></li>
+<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
+for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
+used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
 </ol>
+<p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>),<wbr/> then the frame duration in
+<code>F</code> determines the steady state frame rate that the application will
+get if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind
+of request be called <code>Rsimple</code>.<wbr/></p>
+<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+by a single capture of a new request <code>Rstall</code> (which has at least
+one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+same minimum frame duration this will not cause a frame rate loss
+if all buffers from the previous <code>Rstall</code> have already been
+delivered.<wbr/></p>
+<p>For more details about stalling,<wbr/> see
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
             </td>
           </tr>
 
@@ -12504,6 +12938,294 @@
            <!-- end of entry -->
         
                 
+          <tr class="entry" id="dynamic_android.sensor.referenceIlluminant">
+            <td class="entry_name" rowspan="3">
+              android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_value">1</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_value">2</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TUNGSTEN</span>
+                    <span class="entry_type_enum_value">3</span>
+                    <span class="entry_type_enum_notes"><p>Incandescent light</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLASH</span>
+                    <span class="entry_type_enum_value">4</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FINE_WEATHER</span>
+                    <span class="entry_type_enum_value">9</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CLOUDY_WEATHER</span>
+                    <span class="entry_type_enum_value">10</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_value">11</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DAYLIGHT_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">12</span>
+                    <span class="entry_type_enum_notes"><p>D 5700 - 7100K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DAY_WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">13</span>
+                    <span class="entry_type_enum_notes"><p>N 4600 - 5400K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COOL_WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">14</span>
+                    <span class="entry_type_enum_notes"><p>W 3900 - 4500K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">15</span>
+                    <span class="entry_type_enum_notes"><p>WW 3200 - 3700K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STANDARD_A</span>
+                    <span class="entry_type_enum_value">17</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STANDARD_B</span>
+                    <span class="entry_type_enum_value">18</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STANDARD_C</span>
+                    <span class="entry_type_enum_value">19</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D55</span>
+                    <span class="entry_type_enum_value">20</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D65</span>
+                    <span class="entry_type_enum_value">21</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D75</span>
+                    <span class="entry_type_enum_value">22</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D50</span>
+                    <span class="entry_type_enum_value">23</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ISO_STUDIO_TUNGSTEN</span>
+                    <span class="entry_type_enum_value">24</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A reference illumination source roughly matching the current scene
+illumination,<wbr/> which is used to describe the sensor color space
+transformations.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DNG">DNG</a></li>
+                  <li><a href="#tag_EXIF">EXIF</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The values in this tag correspond to the values defined for the
+EXIF LightSource tag.<wbr/> These illuminants are standard light sources
+that are often used for calibrating camera devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.calibrationTransform">
+            <td class="entry_name" rowspan="3">
+              android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A per-device calibration transform matrix to be applied after the
+color space transform when rendering the raw image buffer.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DNG">DNG</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
+contains a per-device calibration transform that maps colors
+from reference camera color space (i.<wbr/>e.<wbr/> the "golden module"
+colorspace) into this camera device's linear native sensor color
+space for the current scene illumination and white balance choice.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.colorTransform">
+            <td class="entry_name" rowspan="3">
+              android.<wbr/>sensor.<wbr/>color<wbr/>Transform
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A matrix that transforms color values from CIE XYZ color space to
+reference camera color space when rendering the raw image buffer.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DNG">DNG</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
+contains a color transform matrix that maps colors from the CIE
+XYZ color space to the reference camera raw color space (i.<wbr/>e.<wbr/> the
+"golden module" colorspace) for the current scene illumination and
+white balance choice.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.forwardMatrix">
+            <td class="entry_name" rowspan="3">
+              android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A matrix that transforms white balanced camera colors to the CIE XYZ
+colorspace with a D50 whitepoint.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DNG">DNG</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
+a color transform matrix that maps a unit vector in the linear native
+sensor color space to the D50 whitepoint in CIE XYZ color space.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
           <tr class="entry" id="dynamic_android.sensor.neutralColorPoint">
             <td class="entry_name" rowspan="3">
               android.<wbr/>sensor.<wbr/>neutral<wbr/>Color<wbr/>Point
@@ -15256,7 +15978,7 @@
 
                 
           <tr class="entry" id="static_android.info.supportedHardwareLevel">
-            <td class="entry_name" rowspan="1">
+            <td class="entry_name" rowspan="5">
               android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level
             </td>
             <td class="entry_type">
@@ -15276,14 +15998,7 @@
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>The camera 3 HAL device can implement one of two possible
-operational modes; limited and full.<wbr/> Full support is
-expected from new higher-end devices.<wbr/> Limited mode has
-hardware requirements roughly in line with those for a
-camera HAL device v1 implementation,<wbr/> and is expected from
-older or inexpensive devices.<wbr/> Full is a strict superset of
-limited,<wbr/> and they share the same essential operational flow.<wbr/></p>
-<p>For full details refer to "S3.<wbr/> Operational Modes" in camera3.<wbr/>h</p>
+              <p>Generally classifies the overall set of the camera device functionality.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -15297,7 +16012,40 @@
             </td>
 
           </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Camera devices will come in two flavors: LIMITED and FULL.<wbr/></p>
+<p>A FULL device has the most support possible and will enable the
+widest range of use cases such as:</p>
+<ul>
+<li>30 FPS at maximum resolution (== sensor resolution)</li>
+<li>Per frame control</li>
+<li>Manual sensor control</li>
+<li>Zero Shutter Lag (ZSL)</li>
+</ul>
+<p>A LIMITED device may have some or none of the above characteristics.<wbr/>
+To find out more refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+            </td>
+          </tr>
 
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The camera 3 HAL device can implement one of two possible
+operational modes; limited and full.<wbr/> Full support is
+expected from new higher-end devices.<wbr/> Limited mode has
+hardware requirements roughly in line with those for a
+camera HAL device v1 implementation,<wbr/> and is expected from
+older or inexpensive devices.<wbr/> Full is a strict superset of
+limited,<wbr/> and they share the same essential operational flow.<wbr/></p>
+<p>For full details refer to "S3.<wbr/> Operational Modes" in camera3.<wbr/>h</p>
+            </td>
+          </tr>
 
           <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
            <!-- end of entry -->
@@ -15902,6 +16650,8 @@
           <li><a href="#static_android.scaler.availableProcessedMinDurations">android.scaler.availableProcessedMinDurations</a> (static)</li>
           <li><a href="#static_android.scaler.availableProcessedSizes">android.scaler.availableProcessedSizes</a> (static)</li>
           <li><a href="#static_android.scaler.availableRawMinDurations">android.scaler.availableRawMinDurations</a> (static)</li>
+          <li><a href="#static_android.scaler.availableMinFrameDurations">android.scaler.availableMinFrameDurations</a> (static)</li>
+          <li><a href="#static_android.scaler.availableStallDurations">android.scaler.availableStallDurations</a> (static)</li>
           <li><a href="#controls_android.sensor.frameDuration">android.sensor.frameDuration</a> (controls)</li>
           <li><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a> (static)</li>
           <li><a href="#static_android.sensor.info.maxFrameDuration">android.sensor.info.maxFrameDuration</a> (static)</li>
@@ -15968,7 +16718,7 @@
           <li><a href="#static_android.flash.colorTemperature">android.flash.colorTemperature</a> (static)</li>
           <li><a href="#static_android.flash.maxEnergy">android.flash.maxEnergy</a> (static)</li>
           <li><a href="#controls_android.geometric.strength">android.geometric.strength</a> (controls)</li>
-          <li><a href="#static_android.hotPixel.info.map">android.hotPixel.info.map</a> (static)</li>
+          <li><a href="#dynamic_android.hotPixel.map">android.hotPixel.map</a> (dynamic)</li>
           <li><a href="#static_android.lens.opticalAxisAngle">android.lens.opticalAxisAngle</a> (static)</li>
           <li><a href="#controls_android.shading.strength">android.shading.strength</a> (controls)</li>
         </ul>
@@ -15983,14 +16733,11 @@
           <li><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a> (static)</li>
           <li><a href="#static_android.sensor.info.whiteLevel">android.sensor.info.whiteLevel</a> (static)</li>
           <li><a href="#static_android.sensor.blackLevelPattern">android.sensor.blackLevelPattern</a> (static)</li>
-          <li><a href="#static_android.sensor.calibrationTransform1">android.sensor.calibrationTransform1</a> (static)</li>
-          <li><a href="#static_android.sensor.calibrationTransform2">android.sensor.calibrationTransform2</a> (static)</li>
-          <li><a href="#static_android.sensor.colorTransform1">android.sensor.colorTransform1</a> (static)</li>
-          <li><a href="#static_android.sensor.colorTransform2">android.sensor.colorTransform2</a> (static)</li>
-          <li><a href="#static_android.sensor.forwardMatrix1">android.sensor.forwardMatrix1</a> (static)</li>
-          <li><a href="#static_android.sensor.forwardMatrix2">android.sensor.forwardMatrix2</a> (static)</li>
           <li><a href="#static_android.sensor.profileHueSatMapDimensions">android.sensor.profileHueSatMapDimensions</a> (static)</li>
-          <li><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a> (static)</li>
+          <li><a href="#dynamic_android.sensor.referenceIlluminant">android.sensor.referenceIlluminant</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.calibrationTransform">android.sensor.calibrationTransform</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.colorTransform">android.sensor.colorTransform</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.forwardMatrix">android.sensor.forwardMatrix</a> (dynamic)</li>
           <li><a href="#dynamic_android.sensor.neutralColorPoint">android.sensor.neutralColorPoint</a> (dynamic)</li>
           <li><a href="#dynamic_android.sensor.profileHueSatMap">android.sensor.profileHueSatMap</a> (dynamic)</li>
           <li><a href="#dynamic_android.sensor.profileToneCurve">android.sensor.profileToneCurve</a> (dynamic)</li>
@@ -15999,7 +16746,7 @@
       </li> <!-- tag_DNG -->
       <li id="tag_EXIF">EXIF - None
         <ul class="tags_entries">
-          <li><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a> (static)</li>
+          <li><a href="#dynamic_android.sensor.referenceIlluminant">android.sensor.referenceIlluminant</a> (dynamic)</li>
         </ul>
       </li> <!-- tag_EXIF -->
       <li id="tag_HAL2">HAL2 - 
diff --git a/camera/docs/metadata_properties.xml b/camera/docs/metadata_properties.xml
index 7b111f2..486caf4 100644
--- a/camera/docs/metadata_properties.xml
+++ b/camera/docs/metadata_properties.xml
@@ -1779,39 +1779,76 @@
     </section>
     <section name="hotPixel">
       <controls>
-        <entry name="mode" type="byte" enum="true">
+        <entry name="mode" type="byte" visibility="public" enum="true">
           <enum>
             <value>OFF
-            <notes>No hot pixel correction can be
-            applied</notes></value>
+              <notes>
+              The frame rate must not be reduced relative to sensor raw output
+              for this option.
+
+              No hot pixel correction is applied.
+              </notes>
+            </value>
             <value>FAST
-            <notes>Frame rate must not be reduced compared to raw
-            Bayer output</notes></value>
+              <notes>
+              The frame rate must not be reduced relative to sensor raw output
+              for this option.
+
+              Hot pixel correction is applied.
+              </notes>
+            </value>
             <value>HIGH_QUALITY
-            <notes>Frame rate may be reduced by high
-            quality</notes></value>
+              <notes>
+              The frame rate may be reduced relative to sensor raw output
+              for this option.
+
+              A high-quality hot pixel correction is applied.
+              </notes>
+            </value>
           </enum>
-          <description>Set operational mode for hot pixel
-          correction</description>
+          <description>
+          Set operational mode for hot pixel correction.
+
+          Hotpixel correction interpolates out, or otherwise removes, pixels
+          that do not accurately encode the incoming light (i.e. pixels that
+          are stuck at an arbitrary value).
+          </description>
           <tag id="V1" />
         </entry>
       </controls>
-      <static>
-        <namespace name="info">
-          <entry name="map" type="int32"
-          type_notes="list of coordinates based on android.sensor.pixelArraySize"
-          container="array">
-            <array>
-              <size>2</size>
-              <size>n</size>
-            </array>
-            <description>Location of hot/defective pixels on
-            sensor</description>
-            <tag id="ADV" />
-          </entry>
-        </namespace>
-      </static>
       <dynamic>
+        <entry name="map" type="int32" visibility="public"
+        type_notes="list of coordinates based on android.sensor.pixelArraySize"
+        container="array">
+          <array>
+            <size>2</size>
+            <size>n</size>
+          </array>
+          <description>
+          List of `(x, y)` coordinates of hot/defective pixels on the
+          sensor, where `(x, y)` lies between `(0, 0)`, which is the top-left
+          of the pixel array, and the width,height of the pixel array given in
+          android.sensor.info.pixelArraySize.  This may include hot pixels
+          that lie outside of the active array bounds given by
+          android.sensor.activeArraySize.
+          </description>
+          <range>
+          n &lt;= number of pixels on the sensor.
+          The `(x, y)` coordinates must be bounded by
+          android.sensor.info.pixelArraySize.
+          </range>
+          <hal_details>
+          A hotpixel map contains the coordinates of pixels on the camera
+          sensor that do report valid values (usually due to defects in
+          the camera sensor). This includes pixels that are stuck at certain
+          values, or have a response that does not accuractly encode the
+          incoming light from the scene.
+
+          To avoid performance issues, there should be significantly fewer hot
+          pixels than actual pixels on the camera sensor.
+          </hal_details>
+          <tag id="ADV" />
+        </entry>
         <clone entry="android.hotPixel.mode" kind="controls">
           <tag id="V1" />
         </clone>
@@ -1893,10 +1930,10 @@
           * The sizes will be sorted by increasing pixel area (width x height).
           If several resolutions have the same area, they will be sorted by increasing width.
           * The aspect ratio of the largest thumbnail size will be same as the
-          aspect ratio of largest size in android.scaler.availableJpegSizes.
+          aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
           The largest size is defined as the size that has the largest pixel area
           in a given size list.
-          * Each size in android.scaler.availableJpegSizes will have at least
+          * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
           one corresponding size that has the same aspect ratio in availableThumbnailSizes,
           and vice versa.
           * All non (0, 0) sizes will have non-zero widths and heights.</details>
@@ -2731,6 +2768,272 @@
           the metadata via later partial results.
           </details>
         </entry>
+        <entry name="availableCapabilities" type="byte" visibility="public"
+          enum="true">
+          <enum>
+            <value>BACKWARD_COMPATIBLE
+              <notes>The minimal set of capabilities that every camera
+                device (regardless of android.info.supportedHardwareLevel)
+                will support.
+
+                The full set of features supported by this capability makes
+                the camera2 api backwards compatible with the camera1
+                (android.hardware.Camera) API.
+
+                TODO: @hide this. Doesn't really mean anything except
+                act as a catch-all for all the 'base' functionality.
+              </notes>
+            </value>
+            <value>OPTIONAL
+              <notes>This is a catch-all capability to include all other
+              tags or functionality not encapsulated by one of the other
+              capabilities.
+
+              A typical example is all tags marked 'optional'.
+
+              TODO: @hide. We may not need this if we @hide all the optional
+              tags not belonging to a capability.
+              </notes>
+            </value>
+            <value>MANUAL_SENSOR
+              <notes>
+              The camera device can be manually controlled (3A algorithms such
+              as auto exposure, and auto focus can be
+              bypassed), this includes but is not limited to:
+
+              * Manual exposure control
+                  * android.sensor.exposureTime
+                  * android.sensor.info.exposureTimeRange
+              * Manual sensitivity control
+                  * android.sensor.sensitivity
+                  * android.sensor.info.sensitivityRange
+                  * android.sensor.baseGainFactor
+              * Manual lens control
+                  * android.lens.*
+              * Manual flash control
+                  * android.flash.*
+              * Manual black level locking
+                  * android.blackLevel.lock
+
+              If any of the above 3A algorithms are enabled, then the camera
+              device will accurately report the values applied by 3A in the
+              result.
+              </notes>
+            </value>
+            <value optional="true">GCAM
+              <notes>
+              TODO: This should be @hide
+
+              * Manual tonemap control
+                    * android.tonemap.curveBlue
+                    * android.tonemap.curveGreen
+                    * android.tonemap.curveRed
+                    * android.tonemap.mode
+                    * android.tonemap.maxCurvePoints
+              * Manual white balance control
+                  * android.colorCorrection.transform
+                  * android.colorCorrection.gains
+              * Lens shading map information
+                    * android.statistics.lensShadingMap
+                    * android.lens.info.shadingMapSize
+
+              If auto white balance is enabled, then the camera device
+              will accurately report the values applied by AWB in the result.
+
+              The camera device will also support everything in MANUAL_SENSOR
+              except manual lens control and manual flash control.
+              </notes>
+            </value>
+            <value>ZSL
+              <notes>
+              The camera device supports the Zero Shutter Lag use case.
+
+              * At least one input stream can be used.
+              * RAW_OPAQUE is supported as an output/input format
+              * Using RAW_OPAQUE does not cause a frame rate drop
+                relative to the sensor's maximum capture rate (at that
+                resolution).
+              * RAW_OPAQUE will be reprocessable into both YUV_420_888
+                and JPEG formats.
+              * The maximum available resolution for RAW_OPAQUE streams
+                (both input/output) will match the maximum available
+                resolution of JPEG streams.
+              </notes>
+            </value>
+            <value optional="true">DNG
+              <notes>
+              The camera device supports outputting RAW buffers that can be
+              saved offline into a DNG format. It can reprocess DNG
+              files (produced from the same camera device) back into YUV.
+
+              * At least one input stream can be used.
+              * RAW16 is supported as output/input format.
+              * RAW16 is reprocessable into both YUV_420_888 and JPEG
+                formats.
+              * The maximum available resolution for RAW16 streams (both
+                input/output) will match the value in
+                android.sensor.info.pixelArraySize.
+              * All DNG-related optional metadata entries are provided
+                by the camera device.
+              </notes>
+            </value>
+          </enum>
+          <description>List of capabilities that the camera device
+          advertises as fully supporting.</description>
+          <details>
+          A capability is a contract that the camera device makes in order
+          to be able to satisfy one or more use cases.
+
+          Listing a capability guarantees that the whole set of features
+          required to support a common use will all be available.
+
+          Using a subset of the functionality provided by an unsupported
+          capability may be possible on a specific camera device implementation;
+          to do this query each of android.request.availableRequestKeys,
+          android.request.availableResultKeys,
+          android.request.availableCharacteristicsKeys.
+
+          XX: Maybe these should go into android.info.supportedHardwareLevel
+          as a table instead?
+
+          The following capabilities are guaranteed to be available on
+          android.info.supportedHardwareLevel `==` FULL devices:
+
+          * MANUAL_SENSOR
+          * ZSL
+
+          Other capabilities may be available on either FULL or LIMITED
+          devices, but the app. should query this field to be sure.
+          </details>
+          <hal_details>
+          Additional constraint details per-capability will be available
+          in the Compatibility Test Suite.
+
+          BACKWARD_COMPATIBLE capability requirements are not explicitly listed.
+          Instead refer to "BC" tags and the camera CTS tests in the
+          android.hardware.cts package.
+
+          Listed controls that can be either request or result (e.g.
+          android.sensor.exposureTime) must be available both in the
+          request and the result in order to be considered to be
+          capability-compliant.
+
+          For example, if the HAL claims to support MANUAL control,
+          then exposure time must be configurable via the request _and_
+          the actual exposure applied must be available via
+          the result.
+          </hal_details>
+        </entry>
+        <entry name="availableRequestKeys" type="int32" visibility="hidden"
+          container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A list of all keys that the camera device has available
+          to use with CaptureRequest.</description>
+
+          <details>Attempting to set a key into a CaptureRequest that is not
+          listed here will result in an invalid request and will be rejected
+          by the camera device.
+
+          This field can be used to query the feature set of a camera device
+          at a more granular level than capabilities. This is especially
+          important for optional keys that are not listed under any capability
+          in android.request.availableCapabilities.
+
+          TODO: This should be used by #getAvailableCaptureRequestKeys.
+          </details>
+          <hal_details>
+          Vendor tags must not be listed here. Use the vendor tag metadata
+          extensions C api instead (refer to camera3.h for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not consume any request tags that are not listed either
+          here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible
+          via CameraCharacteristics#getAvailableCaptureRequestKeys.
+          </hal_details>
+        </entry>
+        <entry name="availableResultKeys" type="int32" visibility="hidden"
+          container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A list of all keys that the camera device has available
+          to use with CaptureResult.</description>
+
+          <details>Attempting to get a key from a CaptureResult that is not
+          listed here will always return a `null` value. Getting a key from
+          a CaptureResult that is listed here must never return a `null`
+          value.
+
+          The following keys may return `null` unless they are enabled:
+
+          * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
+
+          (Those sometimes-null keys should nevertheless be listed here
+          if they are available.)
+
+          This field can be used to query the feature set of a camera device
+          at a more granular level than capabilities. This is especially
+          important for optional keys that are not listed under any capability
+          in android.request.availableCapabilities.
+
+          TODO: This should be used by #getAvailableCaptureResultKeys.
+          </details>
+          <hal_details>
+          Tags listed here must always have an entry in the result metadata,
+          even if that size is 0 elements. Only array-type tags (e.g. lists,
+          matrices, strings) are allowed to have 0 elements.
+
+          Vendor tags must not be listed here. Use the vendor tag metadata
+          extensions C api instead (refer to camera3.h for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not produce any result tags that are not listed either
+          here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible
+          via CameraCharacteristics#getAvailableCaptureResultKeys.
+          </hal_details>
+        </entry>
+        <entry name="availableCharacteristicsKeys" type="int32" visibility="hidden"
+          container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A list of all keys that the camera device has available
+          to use with CameraCharacteristics.</description>
+          <details>This entry follows the same rules as
+          android.request.availableResultKeys (except that it applies for
+          CameraCharacteristics instead of CaptureResult). See above for more
+          details.
+
+          TODO: This should be used by CameraCharacteristics#getKeys.
+          </details>
+          <hal_details>
+          Tags listed here must always have an entry in the static info metadata,
+          even if that size is 0 elements. Only array-type tags (e.g. lists,
+          matrices, strings) are allowed to have 0 elements.
+
+          Vendor tags must not be listed here. Use the vendor tag metadata
+          extensions C api instead (refer to camera3.h for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not have any tags in its static info that are not listed
+          either here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible
+          via CameraCharacteristics#getKeys.
+          </hal_details>
+        </entry>
       </static>
     </section>
     <section name="scaler">
@@ -2846,7 +3149,7 @@
             </value>
           </enum>
           <description>The list of image formats that are supported by this
-          camera device.</description>
+          camera device for output streams.</description>
           <details>
           All camera devices will support JPEG and YUV_420_888 formats.
 
@@ -2883,6 +3186,7 @@
           for each resolution in android.scaler.availableJpegSizes.
           </description>
           <units>ns</units>
+          <range>**Deprecated**. Do not use. TODO: Remove property.</range>
           <details>
           This corresponds to the minimum steady-state frame duration when only
           that JPEG stream is active and captured in a burst, with all
@@ -2900,6 +3204,7 @@
             <size>2</size>
           </array>
           <description>The JPEG resolutions that are supported by this camera device.</description>
+          <range>**Deprecated**. Do not use. TODO: Remove property.</range>
           <details>
           The resolutions are listed as `(width, height)` pairs. All camera devices will support
           sensor maximum resolution (defined by android.sensor.info.activeArraySize).
@@ -2927,9 +3232,9 @@
           <description>For each available processed output size (defined in
           android.scaler.availableProcessedSizes), this property lists the
           minimum supportable frame duration for that size.
-
           </description>
           <units>ns</units>
+          <range>**Deprecated**. Do not use. TODO: Remove property.</range>
           <details>
           This should correspond to the frame duration when only that processed
           stream is active, with all processing (typically in android.*.mode)
@@ -2950,6 +3255,7 @@
           processed output streams, such as YV12, NV12, and
           platform opaque YUV/RGB streams to the GPU or video
           encoders.</description>
+          <range>**Deprecated**. Do not use. TODO: Remove property.</range>
           <details>
           The resolutions are listed as `(width, height)` pairs.
 
@@ -2990,6 +3296,7 @@
           supportable frame duration for that size.
           </description>
           <units>ns</units>
+          <range>**Deprecated**. Do not use. TODO: Remove property.</range>
           <details>
           Should correspond to the frame duration when only the raw stream is
           active.
@@ -3008,13 +3315,287 @@
           <description>The resolutions available for use with raw
           sensor output streams, listed as width,
           height</description>
-          <range>Must include: - sensor maximum resolution</range>
+          <range>**Deprecated**. Do not use. TODO: Remove property.
+          Must include: - sensor maximum resolution.</range>
         </entry>
       </static>
       <dynamic>
         <clone entry="android.scaler.cropRegion" kind="controls">
         </clone>
       </dynamic>
+      <static>
+        <entry name="availableInputOutputFormatsMap" type="int32"
+        visibility="public"
+        container="array" typedef="imageFormat">
+          <array>
+            <size>n</size>
+          </array>
+          <description>The mapping of image formats that are supported by this
+          camera device for input streams, to their corresponding output formats.
+          </description>
+          <range>See android.scaler.availableFormats for enum definitions.</range>
+          <details>
+          All camera devices with at least 1
+          android.request.request.maxNumInputStreams will have at least one
+          available input format.
+
+          The camera device will support the following map of formats,
+          if its dependent capability is supported:
+
+            Input Format  | Output Format    | Capability
+          :---------------|:-----------------|:----------
+          RAW_OPAQUE      | JPEG             | ZSL
+          RAW_OPAQUE      | YUV_420_888      | ZSL
+          RAW_OPAQUE      | RAW16            | DNG
+          RAW16           | YUV_420_888      | DNG
+          RAW16           | JPEG             | DNG
+
+          For ZSL-capable camera devices, using the RAW_OPAQUE format
+          as either input or output will never hurt maximum frame rate (i.e.
+          android.scaler.availableStallDurations will not have RAW_OPAQUE).
+
+          Attempting to configure an input stream with output streams not
+          listed as available in this map is not valid.
+
+          TODO: Add java type mapping for this property.
+          </details>
+          <hal_details>
+          This value is encoded as a variable-size array-of-arrays.
+          The inner array always contains `[format, length, ...]` where
+          `...` has `length` elements. An inner array is followed by another
+          inner array if the total metadata entry size hasn't yet been exceeded.
+
+          A code sample to read/write this encoding (with a device that
+          supports reprocessing RAW_OPAQUE to RAW16, YUV_420_888, and JPEG,
+          and reprocessing RAW16 to YUV_420_888 and JPEG):
+
+              // reading
+              int32_t* contents = &amp;entry.i32[0];
+              for (size_t i = 0; i &lt; entry.count; ) {
+                  int32_t format = contents[i++];
+                  int32_t length = contents[i++];
+                  int32_t output_formats[length];
+                  memcpy(&amp;output_formats[0], &amp;contents[i],
+                         length * sizeof(int32_t));
+                  i += length;
+              }
+
+              // writing (static example, DNG+ZSL)
+              int32_t[] contents = {
+                RAW_OPAQUE, 3, RAW16, YUV_420_888, BLOB,
+                RAW16, 2, YUV_420_888, BLOB,
+              };
+              update_camera_metadata_entry(metadata, index, &amp;contents[0],
+                    sizeof(contents)/sizeof(contents[0]), &amp;updated_entry);
+
+          If the HAL claims to support any of the capabilities listed in the
+          above details, then it must also support all the input-output
+          combinations listed for that capability. It can optionally support
+          additional formats if it so chooses.
+
+          Refer to android.scaler.availableFormats for the enum values
+          which correspond to HAL_PIXEL_FORMAT_* in
+          system/core/include/system/graphics.h.
+          </hal_details>
+        </entry>
+        <entry name="availableStreamConfigurations" type="int32" visibility="public"
+          enum="true" container="array">
+          <array>
+            <size>n</size>
+            <size>4</size>
+          </array>
+          <enum>
+            <value>OUTPUT</value>
+            <value>INPUT</value>
+          </enum>
+          <description>The available stream configurations that this
+          camera device supports
+          (i.e. format, width, height, output/input stream).
+          </description>
+          <details>
+          The configurations are listed as `(format, width, height, input?)`
+          tuples.
+
+          All camera devices will support sensor maximum resolution (defined by
+          android.sensor.info.activeArraySize) for the JPEG format.
+
+          For a given use case, the actual maximum supported resolution
+          may be lower than what is listed here, depending on the destination
+          Surface for the image data. For example, for recording video,
+          the video encoder chosen may have a maximum size limit (e.g. 1080p)
+          smaller than what the camera (e.g. maximum resolution is 3264x2448)
+          can provide.
+
+          Please reference the documentation for the image data destination to
+          check if it limits the maximum size for image data.
+
+          Not all output formats may be supported in a configuration with
+          an input stream of a particular format. For more details, see
+          android.scaler.availableInputOutputFormatsMap.
+
+          The following table describes the minimum required output stream
+          configurations based on the hardware level
+          (android.info.supportedHardwareLevel):
+
+          Format         | Size                                         | Hardware Level | Notes
+          :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+          JPEG           | android.sensor.info.activeArraySize          | Any            |
+          JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
+          JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
+          JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
+          JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
+          YUV_420_888    | all output sizes available for JPEG          | FULL           |
+          YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
+          IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |
+
+          Refer to android.request.availableCapabilities for additional
+          mandatory stream configurations on a per-capability basis.
+          </details>
+          <hal_details>
+          It is recommended (but not mandatory) to also include half/quarter
+          of sensor maximum resolution for JPEG formats (regardless of hardware
+          level).
+
+          (The following is a rewording of the above required table):
+
+          The HAL must include sensor maximum resolution (defined by
+          android.sensor.info.activeArraySize).
+
+          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
+          here as output streams.
+
+          It must also include each below resolution if it is smaller than or
+          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
+          formats), as output streams:
+
+          * 240p (320 x 240)
+          * 480p (640 x 480)
+          * 720p (1280 x 720)
+          * 1080p (1920 x 1080)
+
+          For LIMITED capability devices
+          (`android.info.supportedHardwareLevel == LIMITED`),
+          the HAL only has to list up to the maximum video size
+          supported by the device.
+
+          Regardless of hardware level, every output resolution available for
+          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
+
+          This supercedes the following fields, which are now deprecated:
+
+          * availableFormats
+          * available[Processed,Raw,Jpeg]Sizes
+          </hal_details>
+        </entry>
+        <entry name="availableMinFrameDurations" type="int64" visibility="public"
+        container="array">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the minimum frame duration for each
+          format/size combination.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          This should correspond to the frame duration when only that
+          stream is active, with all processing (typically in android.*.mode)
+          set to either OFF or FAST.
+
+          When multiple streams are used in a request, the minimum frame
+          duration will be max(individual stream min durations).
+
+          The minimum frame duration of a stream (of a particular format, size)
+          is the same regardless of whether the stream is input or output.
+
+          See android.sensor.frameDuration and
+          android.scaler.availableStallDurations for more details about
+          calculating the max frame rate.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableStallDurations" type="int64" visibility="public"
+        container="array">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the maximum stall duration for each
+          format/size combination.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          A stall duration is how much extra time would get added
+          to the normal minimum frame duration for a repeating request
+          that has streams with non-zero stall.
+
+          For example, consider JPEG captures which have the following
+          characteristics:
+
+          * JPEG streams act like processed YUV streams in requests for which
+          they are not included; in requests in which they are directly
+          referenced, they act as JPEG streams. This is because supporting a
+          JPEG stream requires the underlying YUV data to always be ready for
+          use by a JPEG encoder, but the encoder will only be used (and impact
+          frame duration) on requests that actually reference a JPEG stream.
+          * The JPEG processor can run concurrently to the rest of the camera
+          pipeline, but cannot process more than 1 capture at a time.
+
+          In other words, using a repeating YUV request would result
+          in a steady frame rate (let's say it's 30 FPS). If a single
+          JPEG request is submitted periodically, the frame rate will stay
+          at 30 FPS (as long as we wait for the previous JPEG to return each
+          time). If we try to submit a repeating YUV + JPEG request, then
+          the frame rate will drop from 30 FPS.
+
+          In general, submitting a new request with a non-0 stall time
+          stream will _not_ cause a frame rate drop unless there are still
+          outstanding buffers for that stream from previous requests.
+
+          Submitting a repeating request with streams (call this `S`)
+          is the same as setting the minimum frame duration from
+          the normal minimum frame duration corresponding to `S`, added with
+          the maximum stall duration for `S`.
+
+          If interleaving requests with and without a stall duration,
+          a request will stall by the maximum of the remaining times
+          for each can-stall stream with outstanding buffers.
+
+          This means that a stalling request will not have an exposure start
+          until the stall has completed.
+
+          This should correspond to the stall duration when only that stream is
+          active, with all processing (typically in android.*.mode) set to FAST
+          or OFF. Setting any of the processing modes to HIGH_QUALITY
+          effectively results in an indeterminate stall duration for all
+          streams in a request (the regular stall calculation rules are
+          ignored).
+
+          The following formats may always have a stall duration:
+
+          * JPEG
+          * RAW16
+
+          The following formats will never have a stall duration:
+
+          * YUV_420_888
+          * IMPLEMENTATION_DEFINED
+
+          All other formats may or may not have an allowed stall duration on
+          a per-capability basis; refer to android.request.availableCapabilities
+          for more details.
+
+          See android.sensor.frameDuration for more information about
+          calculating the max frame rate (absent stalls).
+          </details>
+          <hal_details>
+          If possible, it is recommended that all non-JPEG formats
+          (such as RAW16) should not have a stall duration.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+      </static>
     </section>
     <section name="sensor">
       <controls>
@@ -3035,7 +3616,7 @@
           start of next frame exposure.</description>
           <units>nanoseconds</units>
           <range>See android.sensor.info.maxFrameDuration,
-          android.scaler.available*MinDurations. The duration
+          android.scaler.availableMinFrameDurations. The duration
           is capped to `max(duration, exposureTime + overhead)`.</range>
           <details>
           The maximum frame rate that can be supported by a camera subsystem is
@@ -3069,59 +3650,45 @@
           largest requested stream resolution.
           * Using more than one output stream in a request does not affect the
           frame duration.
-          * JPEG streams act like processed YUV streams in requests for which
-          they are not included; in requests in which they are directly
-          referenced, they act as JPEG streams. This is because supporting a
-          JPEG stream requires the underlying YUV data to always be ready for
-          use by a JPEG encoder, but the encoder will only be used (and impact
-          frame duration) on requests that actually reference a JPEG stream.
-          * The JPEG processor can run concurrently to the rest of the camera
-          pipeline, but cannot process more than 1 capture at a time.
+          * Certain format-streams may need to do additional background processing
+          before data is consumed/produced by that stream. These processors
+          can run concurrently to the rest of the camera pipeline, but
+          cannot process more than 1 capture at a time.
 
           The necessary information for the application, given the model above,
-          is provided via the android.scaler.available*MinDurations fields.
+          is provided via the android.scaler.availableMinFrameDurations field.
           These are used to determine the maximum frame rate / minimum frame
           duration that is possible for a given stream configuration.
 
           Specifically, the application can use the following rules to
-          determine the minimum frame duration it can request from the HAL
+          determine the minimum frame duration it can request from the camera
           device:
 
-          1. Given the application's currently configured set of output
-          streams, `S`, divide them into three sets: streams in a JPEG format
-          `SJ`, streams in a raw sensor format `SR`, and the rest ('processed')
-          `SP`.
-          1. For each subset of streams, find the largest resolution (by pixel
-          count) in the subset. This gives (at most) three resolutions `RJ`,
-          `RR`, and `RP`.
-          1. If `RJ` is greater than `RP`, set `RP` equal to `RJ`. If there is
-          no exact match for `RP == RJ` (in particular there isn't an available
-          processed resolution at the same size as `RJ`), then set `RP` equal
-          to the smallest processed resolution that is larger than `RJ`. If
-          there are no processed resolutions larger than `RJ`, then set `RJ` to
-          the processed resolution closest to `RJ`.
-          1. If `RP` is greater than `RR`, set `RR` equal to `RP`. If there is
-          no exact match for `RR == RP` (in particular there isn't an available
-          raw resolution at the same size as `RP`), then set `RR` equal to
-          or to the smallest raw resolution that is larger than `RP`. If
-          there are no raw resolutions larger than `RP`, then set `RR` to
-          the raw resolution closest to `RP`.
-          1. Look up the matching minimum frame durations in the property lists
-          android.scaler.availableJpegMinDurations,
-          android.scaler.availableRawMinDurations, and
-          android.scaler.availableProcessedMinDurations.  This gives three
-          minimum frame durations `FJ`, `FR`, and `FP`.
-          1. If a stream of requests do not use a JPEG stream, then the minimum
-          supported frame duration for each request is `max(FR, FP)`.
-          1. If a stream of requests all use the JPEG stream, then the minimum
-          supported frame duration for each request is `max(FR, FP, FJ)`.
-          1. If a mix of JPEG-using and non-JPEG-using requests is submitted by
-          the application, then the HAL will have to delay JPEG-using requests
-          whenever the JPEG encoder is still busy processing an older capture.
-          This will happen whenever a JPEG-using request starts capture less
-          than `FJ` _ns_ after a previous JPEG-using request. The minimum
-          supported frame duration will vary between the values calculated in
-          \#6 and \#7.
+          1. Let the set of currently configured input/output streams
+          be called `S`.
+          1. Find the minimum frame durations for each stream in `S`, by
+          looking it up in android.scaler.availableMinFrameDurations (with
+          its respective size/format). Let this set of frame durations be called
+          `F`.
+          1. For any given request `R`, the minimum frame duration allowed
+          for `R` is the maximum out of all values in `F`. Let the streams
+          used in `R` be called `S_r`.
+
+          If none of the streams in `S_r` have a stall time (listed in
+          android.scaler.availableStallDurations), then the frame duration in
+          `F` determines the steady state frame rate that the application will
+          get if it uses `R` as a repeating request. Let this special kind
+          of request be called `Rsimple`.
+
+          A repeating request `Rsimple` can be _occasionally_ interleaved
+          by a single capture of a new request `Rstall` (which has at least
+          one in-use stream with a non-0 stall time) and if `Rstall` has the
+          same minimum frame duration this will not cause a frame rate loss
+          if all buffers from the previous `Rstall` have already been
+          delivered.
+
+          For more details about stalling, see
+          android.scaler.availableStallDurations.
           </details>
           <tag id="V1" />
           <tag id="BC" />
@@ -3223,6 +3790,15 @@
             android.sensor.maxFrameDuration must be greater or equal to the
             android.sensor.exposureTimeRange max value (since exposure time
             overrides frame duration).
+
+            Available minimum frame durations for JPEG must be no greater
+            than that of the YUV_420_888/IMPLEMENTATION_DEFINED
+            minimum frame durations (for that respective size).
+
+            Since JPEG processing is considered offline and can take longer than
+            a single uncompressed capture, refer to
+            android.scaler.availableStallDurations
+            for details about encoding this scenario.
             </hal_details>
             <tag id="BC" />
             <tag id="V1" />
@@ -3239,16 +3815,16 @@
             <tag id="V1" />
             <tag id="BC" />
           </entry>
-          <entry name="pixelArraySize" type="int32"
+          <entry name="pixelArraySize" type="int32" visibility="public"
           container="array" typedef="size">
             <array>
               <size>2</size>
             </array>
             <description>Dimensions of full pixel array, possibly
             including black calibration pixels.</description>
-            <details>The maximum output resolution for raw format in
-            android.scaler.availableRawSizes will be equal to this size.
-            </details>
+            <details>Maximum output resolution for raw format must
+            match this in
+            android.scaler.info.availableSizesPerFormat.</details>
             <tag id="DNG" />
             <tag id="BC" />
           </entry>
@@ -3287,71 +3863,6 @@
           </details>
           <tag id="DNG" />
         </entry>
-        <entry name="calibrationTransform1" type="rational"
-        type_notes="3x3 matrix in row-major-order"
-        container="array">
-          <array>
-            <size>9</size>
-          </array>
-          <description>Per-device calibration on top of color space
-          transform 1</description>
-          <tag id="DNG" />
-        </entry>
-        <entry name="calibrationTransform2" type="rational"
-        type_notes="3x3 matrix in row-major-order"
-        container="array">
-          <array>
-            <size>9</size>
-          </array>
-          <description>Per-device calibration on top of color space
-          transform 2</description>
-          <tag id="DNG" />
-        </entry>
-        <entry name="colorTransform1" type="rational"
-        type_notes="3x3 matrix in row-major-order"
-        container="array">
-          <array>
-            <size>9</size>
-          </array>
-          <description>Linear mapping from XYZ (D50) color space to
-          reference linear sensor color, for first reference
-          illuminant</description>
-          <details>Use as follows XYZ = inv(transform) * clip( (raw -
-          black level(raw) ) / ( white level - max black level) ).
-          At least in the simple case</details>
-          <tag id="DNG" />
-        </entry>
-        <entry name="colorTransform2" type="rational"
-        type_notes="3x3 matrix in row-major-order"
-        container="array">
-          <array>
-            <size>9</size>
-          </array>
-          <description>Linear mapping from XYZ (D50) color space to
-          reference linear sensor color, for second reference
-          illuminant</description>
-          <tag id="DNG" />
-        </entry>
-        <entry name="forwardMatrix1" type="rational"
-        type_notes="3x3 matrix in row-major-order"
-        container="array">
-          <array>
-            <size>9</size>
-          </array>
-          <description>Used by DNG for better WB
-          adaptation</description>
-          <tag id="DNG" />
-        </entry>
-        <entry name="forwardMatrix2" type="rational"
-        type_notes="3x3 matrix in row-major-order"
-        container="array">
-          <array>
-            <size>9</size>
-          </array>
-          <description>Used by DNG for better WB
-          adaptation</description>
-          <tag id="DNG" />
-        </entry>
         <entry name="maxAnalogSensitivity" type="int32" visibility="public"
         optional="true">
           <description>Maximum sensitivity that is implemented
@@ -3398,45 +3909,6 @@
           </details>
           <tag id="DNG" />
         </entry>
-        <entry name="referenceIlluminant1" type="byte" enum="true">
-          <enum>
-            <value id="1">DAYLIGHT</value>
-            <value id="2">FLUORESCENT</value>
-            <value id="3">TUNGSTEN
-            <notes>Incandescent light</notes></value>
-            <value id="4">FLASH</value>
-            <value id="9">FINE_WEATHER</value>
-            <value id="10">CLOUDY_WEATHER</value>
-            <value id="11">SHADE</value>
-            <value id="12">DAYLIGHT_FLUORESCENT
-            <notes>D 5700 - 7100K</notes></value>
-            <value id="13">DAY_WHITE_FLUORESCENT
-            <notes>N 4600 - 5400K</notes></value>
-            <value id="14">COOL_WHITE_FLUORESCENT
-            <notes>W 3900 - 4500K</notes></value>
-            <value id="15">WHITE_FLUORESCENT
-            <notes>WW 3200 - 3700K</notes></value>
-            <value id="17">STANDARD_A</value>
-            <value id="18">STANDARD_B</value>
-            <value id="19">STANDARD_C</value>
-            <value id="20">D55</value>
-            <value id="21">D65</value>
-            <value id="22">D75</value>
-            <value id="23">D50</value>
-            <value id="24">ISO_STUDIO_TUNGSTEN</value>
-          </enum>
-          <description>Light source used to define transform
-          1</description>
-          <details>[EXIF LightSource tag] Must all these be
-          supported? Need CCT for each!</details>
-          <tag id="DNG" />
-          <tag id="EXIF" />
-        </entry>
-        <entry name="referenceIlluminant2" type="byte">
-          <description>Light source used to define transform
-          2</description>
-          <units>Same as illuminant 1</units>
-        </entry>
       </static>
       <dynamic>
         <clone entry="android.sensor.exposureTime" kind="controls">
@@ -3467,6 +3939,104 @@
           <range>Optional. This value is missing if no temperature is available.</range>
           <tag id="FULL" />
         </entry>
+        <entry name="referenceIlluminant" type="byte" enum="true">
+          <enum>
+            <value id="1">DAYLIGHT</value>
+            <value id="2">FLUORESCENT</value>
+            <value id="3">TUNGSTEN
+            <notes>Incandescent light</notes></value>
+            <value id="4">FLASH</value>
+            <value id="9">FINE_WEATHER</value>
+            <value id="10">CLOUDY_WEATHER</value>
+            <value id="11">SHADE</value>
+            <value id="12">DAYLIGHT_FLUORESCENT
+            <notes>D 5700 - 7100K</notes></value>
+            <value id="13">DAY_WHITE_FLUORESCENT
+            <notes>N 4600 - 5400K</notes></value>
+            <value id="14">COOL_WHITE_FLUORESCENT
+            <notes>W 3900 - 4500K</notes></value>
+            <value id="15">WHITE_FLUORESCENT
+            <notes>WW 3200 - 3700K</notes></value>
+            <value id="17">STANDARD_A</value>
+            <value id="18">STANDARD_B</value>
+            <value id="19">STANDARD_C</value>
+            <value id="20">D55</value>
+            <value id="21">D65</value>
+            <value id="22">D75</value>
+            <value id="23">D50</value>
+            <value id="24">ISO_STUDIO_TUNGSTEN</value>
+          </enum>
+          <description>
+          A reference illumination source roughly matching the current scene
+          illumination, which is used to describe the sensor color space
+          transformations.
+          </description>
+          <details>
+          The values in this tag correspond to the values defined for the
+          EXIF LightSource tag. These illuminants are standard light sources
+          that are often used for calibrating camera devices.
+          </details>
+          <tag id="DNG" />
+          <tag id="EXIF" />
+        </entry>
+        <entry name="calibrationTransform" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A per-device calibration transform matrix to be applied after the
+          color space transform when rendering the raw image buffer.
+          </description>
+          <details>
+          This matrix is expressed as a 3x3 matrix in row-major-order, and
+          contains a per-device calibration transform that maps colors
+          from reference camera color space (i.e. the "golden module"
+          colorspace) into this camera device's linear native sensor color
+          space for the current scene illumination and white balance choice.
+          </details>
+          <tag id="DNG" />
+        </entry>
+        <entry name="colorTransform" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A matrix that transforms color values from CIE XYZ color space to
+          reference camera color space when rendering the raw image buffer.
+          </description>
+          <details>
+          This matrix is expressed as a 3x3 matrix in row-major-order, and
+          contains a color transform matrix that maps colors from the CIE
+          XYZ color space to the reference camera raw color space (i.e. the
+          "golden module" colorspace) for the current scene illumination and
+          white balance choice.
+          </details>
+          <tag id="DNG" />
+        </entry>
+        <entry name="forwardMatrix" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A matrix that transforms white balanced camera colors to the CIE XYZ
+          colorspace with a D50 whitepoint.
+          </description>
+          <details>
+          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+          a color transform matrix that maps a unit vector in the linear native
+          sensor color space to the D50 whitepoint in CIE XYZ color space.
+          </details>
+          <tag id="DNG" />
+        </entry>
         <entry name="neutralColorPoint" type="rational" visibility="public"
         optional="true" container="array">
           <array>
@@ -4336,6 +4906,24 @@
             <value>FULL</value>
           </enum>
           <description>
+          Generally classifies the overall set of the camera device functionality.
+          </description>
+          <range>Optional. Default value is LIMITED.</range>
+          <details>
+          Camera devices will come in two flavors: LIMITED and FULL.
+
+          A FULL device has the most support possible and will enable the
+          widest range of use cases such as:
+
+          * 30 FPS at maximum resolution (== sensor resolution)
+          * Per frame control
+          * Manual sensor control
+          * Zero Shutter Lag (ZSL)
+
+          A LIMITED device may have some or none of the above characteristics.
+          To find out more refer to android.request.availableCapabilities.
+          </details>
+          <hal_details>
           The camera 3 HAL device can implement one of two possible
           operational modes; limited and full. Full support is
           expected from new higher-end devices. Limited mode has
@@ -4345,8 +4933,7 @@
           limited, and they share the same essential operational flow.
 
           For full details refer to "S3. Operational Modes" in camera3.h
-          </description>
-          <range>Optional. Default value is LIMITED.</range>
+          </hal_details>
         </entry>
       </static>
     </section>
diff --git a/camera/include/system/camera_metadata_tags.h b/camera/include/system/camera_metadata_tags.h
index 8896145..aad5349 100644
--- a/camera/include/system/camera_metadata_tags.h
+++ b/camera/include/system/camera_metadata_tags.h
@@ -43,7 +43,6 @@
     ANDROID_FLASH_INFO,
     ANDROID_GEOMETRIC,
     ANDROID_HOT_PIXEL,
-    ANDROID_HOT_PIXEL_INFO,
     ANDROID_JPEG,
     ANDROID_LENS,
     ANDROID_LENS_INFO,
@@ -79,7 +78,6 @@
     ANDROID_FLASH_INFO_START       = ANDROID_FLASH_INFO        << 16,
     ANDROID_GEOMETRIC_START        = ANDROID_GEOMETRIC         << 16,
     ANDROID_HOT_PIXEL_START        = ANDROID_HOT_PIXEL         << 16,
-    ANDROID_HOT_PIXEL_INFO_START   = ANDROID_HOT_PIXEL_INFO    << 16,
     ANDROID_JPEG_START             = ANDROID_JPEG              << 16,
     ANDROID_LENS_START             = ANDROID_LENS              << 16,
     ANDROID_LENS_INFO_START        = ANDROID_LENS_INFO         << 16,
@@ -180,14 +178,11 @@
     ANDROID_GEOMETRIC_STRENGTH,                       // byte         | system
     ANDROID_GEOMETRIC_END,
 
-    ANDROID_HOT_PIXEL_MODE =                          // enum         | system
+    ANDROID_HOT_PIXEL_MODE =                          // enum         | public
             ANDROID_HOT_PIXEL_START,
+    ANDROID_HOT_PIXEL_MAP,                            // int32[]      | public
     ANDROID_HOT_PIXEL_END,
 
-    ANDROID_HOT_PIXEL_INFO_MAP =                      // int32[]      | system
-            ANDROID_HOT_PIXEL_INFO_START,
-    ANDROID_HOT_PIXEL_INFO_END,
-
     ANDROID_JPEG_GPS_COORDINATES =                    // double[]     | public
             ANDROID_JPEG_START,
     ANDROID_JPEG_GPS_PROCESSING_METHOD,               // byte         | public
@@ -253,6 +248,10 @@
     ANDROID_REQUEST_PIPELINE_DEPTH,                   // byte         | public
     ANDROID_REQUEST_PIPELINE_MAX_DEPTH,               // byte         | public
     ANDROID_REQUEST_PARTIAL_RESULT_COUNT,             // int32        | public
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES,           // enum         | public
+    ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,           // int32[]      | hidden
+    ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,            // int32[]      | hidden
+    ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,   // int32[]      | hidden
     ANDROID_REQUEST_END,
 
     ANDROID_SCALER_CROP_REGION =                      // int32[]      | public
@@ -265,6 +264,10 @@
     ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,         // int32[]      | public
     ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,       // int64[]      | system
     ANDROID_SCALER_AVAILABLE_RAW_SIZES,               // int32[]      | system
+    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,// int32[]      | public
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,   // enum[]       | public
+    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,     // int64[]      | public
+    ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,         // int64[]      | public
     ANDROID_SCALER_END,
 
     ANDROID_SENSOR_EXPOSURE_TIME =                    // int64        | public
@@ -273,19 +276,15 @@
     ANDROID_SENSOR_SENSITIVITY,                       // int32        | public
     ANDROID_SENSOR_BASE_GAIN_FACTOR,                  // rational     | public
     ANDROID_SENSOR_BLACK_LEVEL_PATTERN,               // int32[]      | public
-    ANDROID_SENSOR_CALIBRATION_TRANSFORM1,            // rational[]   | system
-    ANDROID_SENSOR_CALIBRATION_TRANSFORM2,            // rational[]   | system
-    ANDROID_SENSOR_COLOR_TRANSFORM1,                  // rational[]   | system
-    ANDROID_SENSOR_COLOR_TRANSFORM2,                  // rational[]   | system
-    ANDROID_SENSOR_FORWARD_MATRIX1,                   // rational[]   | system
-    ANDROID_SENSOR_FORWARD_MATRIX2,                   // rational[]   | system
     ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,            // int32        | public
     ANDROID_SENSOR_ORIENTATION,                       // int32        | public
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,    // int32[]      | public
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1,             // enum         | system
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT2,             // byte         | system
     ANDROID_SENSOR_TIMESTAMP,                         // int64        | public
     ANDROID_SENSOR_TEMPERATURE,                       // float        | public
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT,              // enum         | system
+    ANDROID_SENSOR_CALIBRATION_TRANSFORM,             // rational[]   | public
+    ANDROID_SENSOR_COLOR_TRANSFORM,                   // rational[]   | public
+    ANDROID_SENSOR_FORWARD_MATRIX,                    // rational[]   | public
     ANDROID_SENSOR_NEUTRAL_COLOR_POINT,               // rational[]   | public
     ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,               // float[]      | public
     ANDROID_SENSOR_PROFILE_TONE_CURVE,                // float[]      | public
@@ -301,7 +300,7 @@
     ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,          // int64[]      | public
     ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,           // int64        | public
     ANDROID_SENSOR_INFO_PHYSICAL_SIZE,                // float[]      | public
-    ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,             // int32[]      | system
+    ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,             // int32[]      | public
     ANDROID_SENSOR_INFO_WHITE_LEVEL,                  // int32        | system
     ANDROID_SENSOR_INFO_END,
 
@@ -585,7 +584,6 @@
 
 
 
-
 // ANDROID_LENS_OPTICAL_STABILIZATION_MODE
 typedef enum camera_metadata_enum_android_lens_optical_stabilization_mode {
     ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
@@ -640,6 +638,16 @@
     ANDROID_REQUEST_TYPE_REPROCESS,
 } camera_metadata_enum_android_request_type_t;
 
+// ANDROID_REQUEST_AVAILABLE_CAPABILITIES
+typedef enum camera_metadata_enum_android_request_available_capabilities {
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OPTIONAL,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ZSL,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DNG,
+} camera_metadata_enum_android_request_available_capabilities_t;
+
 
 // ANDROID_SCALER_AVAILABLE_FORMATS
 typedef enum camera_metadata_enum_android_scaler_available_formats {
@@ -652,29 +660,35 @@
     ANDROID_SCALER_AVAILABLE_FORMATS_BLOB                       = 0x21,
 } camera_metadata_enum_android_scaler_available_formats_t;
 
+// ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+typedef enum camera_metadata_enum_android_scaler_available_stream_configurations {
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
+} camera_metadata_enum_android_scaler_available_stream_configurations_t;
 
-// ANDROID_SENSOR_REFERENCE_ILLUMINANT1
-typedef enum camera_metadata_enum_android_sensor_reference_illuminant1 {
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT               = 1,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT            = 2,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN               = 3,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH                  = 4,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER           = 9,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER         = 10,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE                  = 11,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT   = 12,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT  = 13,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT      = 15,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A             = 17,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B             = 18,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C             = 19,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55                    = 20,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65                    = 21,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75                    = 22,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50                    = 23,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN    = 24,
-} camera_metadata_enum_android_sensor_reference_illuminant1_t;
+
+// ANDROID_SENSOR_REFERENCE_ILLUMINANT
+typedef enum camera_metadata_enum_android_sensor_reference_illuminant {
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_DAYLIGHT                = 1,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_FLUORESCENT             = 2,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_TUNGSTEN                = 3,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_FLASH                   = 4,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_FINE_WEATHER            = 9,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_CLOUDY_WEATHER          = 10,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_SHADE                   = 11,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_DAYLIGHT_FLUORESCENT    = 12,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_DAY_WHITE_FLUORESCENT   = 13,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_COOL_WHITE_FLUORESCENT  = 14,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_WHITE_FLUORESCENT       = 15,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_STANDARD_A              = 17,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_STANDARD_B              = 18,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_STANDARD_C              = 19,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_D55                     = 20,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_D65                     = 21,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_D75                     = 22,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_D50                     = 23,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT_ISO_STUDIO_TUNGSTEN     = 24,
+} camera_metadata_enum_android_sensor_reference_illuminant_t;
 
 // ANDROID_SENSOR_TEST_PATTERN_MODE
 typedef enum camera_metadata_enum_android_sensor_test_pattern_mode {
diff --git a/camera/src/camera_metadata_tag_info.c b/camera/src/camera_metadata_tag_info.c
index 2c26ecc..b460129 100644
--- a/camera/src/camera_metadata_tag_info.c
+++ b/camera/src/camera_metadata_tag_info.c
@@ -39,7 +39,6 @@
     [ANDROID_FLASH_INFO]           = "android.flash.info",
     [ANDROID_GEOMETRIC]            = "android.geometric",
     [ANDROID_HOT_PIXEL]            = "android.hotPixel",
-    [ANDROID_HOT_PIXEL_INFO]       = "android.hotPixel.info",
     [ANDROID_JPEG]                 = "android.jpeg",
     [ANDROID_LENS]                 = "android.lens",
     [ANDROID_LENS_INFO]            = "android.lens.info",
@@ -76,8 +75,6 @@
                                        ANDROID_GEOMETRIC_END },
     [ANDROID_HOT_PIXEL]            = { ANDROID_HOT_PIXEL_START,
                                        ANDROID_HOT_PIXEL_END },
-    [ANDROID_HOT_PIXEL_INFO]       = { ANDROID_HOT_PIXEL_INFO_START,
-                                       ANDROID_HOT_PIXEL_INFO_END },
     [ANDROID_JPEG]                 = { ANDROID_JPEG_START,
                                        ANDROID_JPEG_END },
     [ANDROID_LENS]                 = { ANDROID_LENS_START,
@@ -251,11 +248,7 @@
         ANDROID_HOT_PIXEL_START] = {
     [ ANDROID_HOT_PIXEL_MODE - ANDROID_HOT_PIXEL_START ] =
     { "mode",                          TYPE_BYTE   },
-};
-
-static tag_info_t android_hot_pixel_info[ANDROID_HOT_PIXEL_INFO_END -
-        ANDROID_HOT_PIXEL_INFO_START] = {
-    [ ANDROID_HOT_PIXEL_INFO_MAP - ANDROID_HOT_PIXEL_INFO_START ] =
+    [ ANDROID_HOT_PIXEL_MAP - ANDROID_HOT_PIXEL_START ] =
     { "map",                           TYPE_INT32  },
 };
 
@@ -379,6 +372,14 @@
     { "pipelineMaxDepth",              TYPE_BYTE   },
     [ ANDROID_REQUEST_PARTIAL_RESULT_COUNT - ANDROID_REQUEST_START ] =
     { "partialResultCount",            TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_CAPABILITIES - ANDROID_REQUEST_START ] =
+    { "availableCapabilities",         TYPE_BYTE   },
+    [ ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS - ANDROID_REQUEST_START ] =
+    { "availableRequestKeys",          TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_RESULT_KEYS - ANDROID_REQUEST_START ] =
+    { "availableResultKeys",           TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS - ANDROID_REQUEST_START ] =
+    { "availableCharacteristicsKeys",  TYPE_INT32  },
 };
 
 static tag_info_t android_scaler[ANDROID_SCALER_END -
@@ -402,6 +403,15 @@
     { "availableRawMinDurations",      TYPE_INT64  },
     [ ANDROID_SCALER_AVAILABLE_RAW_SIZES - ANDROID_SCALER_START ] =
     { "availableRawSizes",             TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP - ANDROID_SCALER_START ] =
+    { "availableInputOutputFormatsMap",
+                                        TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS - ANDROID_SCALER_START ] =
+    { "availableStreamConfigurations", TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableMinFrameDurations",    TYPE_INT64  },
+    [ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableStallDurations",       TYPE_INT64  },
 };
 
 static tag_info_t android_sensor[ANDROID_SENSOR_END -
@@ -417,38 +427,27 @@
                 },
     [ ANDROID_SENSOR_BLACK_LEVEL_PATTERN - ANDROID_SENSOR_START ] =
     { "blackLevelPattern",             TYPE_INT32  },
-    [ ANDROID_SENSOR_CALIBRATION_TRANSFORM1 - ANDROID_SENSOR_START ] =
-    { "calibrationTransform1",         TYPE_RATIONAL
-                },
-    [ ANDROID_SENSOR_CALIBRATION_TRANSFORM2 - ANDROID_SENSOR_START ] =
-    { "calibrationTransform2",         TYPE_RATIONAL
-                },
-    [ ANDROID_SENSOR_COLOR_TRANSFORM1 - ANDROID_SENSOR_START ] =
-    { "colorTransform1",               TYPE_RATIONAL
-                },
-    [ ANDROID_SENSOR_COLOR_TRANSFORM2 - ANDROID_SENSOR_START ] =
-    { "colorTransform2",               TYPE_RATIONAL
-                },
-    [ ANDROID_SENSOR_FORWARD_MATRIX1 - ANDROID_SENSOR_START ] =
-    { "forwardMatrix1",                TYPE_RATIONAL
-                },
-    [ ANDROID_SENSOR_FORWARD_MATRIX2 - ANDROID_SENSOR_START ] =
-    { "forwardMatrix2",                TYPE_RATIONAL
-                },
     [ ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY - ANDROID_SENSOR_START ] =
     { "maxAnalogSensitivity",          TYPE_INT32  },
     [ ANDROID_SENSOR_ORIENTATION - ANDROID_SENSOR_START ] =
     { "orientation",                   TYPE_INT32  },
     [ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS - ANDROID_SENSOR_START ] =
     { "profileHueSatMapDimensions",    TYPE_INT32  },
-    [ ANDROID_SENSOR_REFERENCE_ILLUMINANT1 - ANDROID_SENSOR_START ] =
-    { "referenceIlluminant1",          TYPE_BYTE   },
-    [ ANDROID_SENSOR_REFERENCE_ILLUMINANT2 - ANDROID_SENSOR_START ] =
-    { "referenceIlluminant2",          TYPE_BYTE   },
     [ ANDROID_SENSOR_TIMESTAMP - ANDROID_SENSOR_START ] =
     { "timestamp",                     TYPE_INT64  },
     [ ANDROID_SENSOR_TEMPERATURE - ANDROID_SENSOR_START ] =
     { "temperature",                   TYPE_FLOAT  },
+    [ ANDROID_SENSOR_REFERENCE_ILLUMINANT - ANDROID_SENSOR_START ] =
+    { "referenceIlluminant",           TYPE_BYTE   },
+    [ ANDROID_SENSOR_CALIBRATION_TRANSFORM - ANDROID_SENSOR_START ] =
+    { "calibrationTransform",          TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_COLOR_TRANSFORM - ANDROID_SENSOR_START ] =
+    { "colorTransform",                TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_FORWARD_MATRIX - ANDROID_SENSOR_START ] =
+    { "forwardMatrix",                 TYPE_RATIONAL
+                },
     [ ANDROID_SENSOR_NEUTRAL_COLOR_POINT - ANDROID_SENSOR_START ] =
     { "neutralColorPoint",             TYPE_RATIONAL
                 },
@@ -593,7 +592,6 @@
     android_flash_info,
     android_geometric,
     android_hot_pixel,
-    android_hot_pixel_info,
     android_jpeg,
     android_lens,
     android_lens_info,
@@ -1324,8 +1322,7 @@
             }
             break;
         }
-
-        case ANDROID_HOT_PIXEL_INFO_MAP: {
+        case ANDROID_HOT_PIXEL_MAP: {
             break;
         }
 
@@ -1585,6 +1582,46 @@
         case ANDROID_REQUEST_PARTIAL_RESULT_COUNT: {
             break;
         }
+        case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
+            switch (value) {
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE:
+                    msg = "BACKWARD_COMPATIBLE";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OPTIONAL:
+                    msg = "OPTIONAL";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR:
+                    msg = "MANUAL_SENSOR";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_GCAM:
+                    msg = "GCAM";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ZSL:
+                    msg = "ZSL";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DNG:
+                    msg = "DNG";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS: {
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS: {
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
+            break;
+        }
 
         case ANDROID_SCALER_CROP_REGION: {
             break;
@@ -1645,6 +1682,30 @@
         case ANDROID_SCALER_AVAILABLE_RAW_SIZES: {
             break;
         }
+        case ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS: {
+            switch (value) {
+                case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT:
+                    msg = "OUTPUT";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT:
+                    msg = "INPUT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS: {
+            break;
+        }
 
         case ANDROID_SENSOR_EXPOSURE_TIME: {
             break;
@@ -1661,24 +1722,6 @@
         case ANDROID_SENSOR_BLACK_LEVEL_PATTERN: {
             break;
         }
-        case ANDROID_SENSOR_CALIBRATION_TRANSFORM1: {
-            break;
-        }
-        case ANDROID_SENSOR_CALIBRATION_TRANSFORM2: {
-            break;
-        }
-        case ANDROID_SENSOR_COLOR_TRANSFORM1: {
-            break;
-        }
-        case ANDROID_SENSOR_COLOR_TRANSFORM2: {
-            break;
-        }
-        case ANDROID_SENSOR_FORWARD_MATRIX1: {
-            break;
-        }
-        case ANDROID_SENSOR_FORWARD_MATRIX2: {
-            break;
-        }
         case ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY: {
             break;
         }
@@ -1688,81 +1731,87 @@
         case ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS: {
             break;
         }
-        case ANDROID_SENSOR_REFERENCE_ILLUMINANT1: {
+        case ANDROID_SENSOR_TIMESTAMP: {
+            break;
+        }
+        case ANDROID_SENSOR_TEMPERATURE: {
+            break;
+        }
+        case ANDROID_SENSOR_REFERENCE_ILLUMINANT: {
             switch (value) {
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_DAYLIGHT:
                     msg = "DAYLIGHT";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_FLUORESCENT:
                     msg = "FLUORESCENT";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_TUNGSTEN:
                     msg = "TUNGSTEN";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_FLASH:
                     msg = "FLASH";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_FINE_WEATHER:
                     msg = "FINE_WEATHER";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_CLOUDY_WEATHER:
                     msg = "CLOUDY_WEATHER";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_SHADE:
                     msg = "SHADE";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_DAYLIGHT_FLUORESCENT:
                     msg = "DAYLIGHT_FLUORESCENT";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_DAY_WHITE_FLUORESCENT:
                     msg = "DAY_WHITE_FLUORESCENT";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_COOL_WHITE_FLUORESCENT:
                     msg = "COOL_WHITE_FLUORESCENT";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_WHITE_FLUORESCENT:
                     msg = "WHITE_FLUORESCENT";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_STANDARD_A:
                     msg = "STANDARD_A";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_STANDARD_B:
                     msg = "STANDARD_B";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_STANDARD_C:
                     msg = "STANDARD_C";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_D55:
                     msg = "D55";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_D65:
                     msg = "D65";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_D75:
                     msg = "D75";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_D50:
                     msg = "D50";
                     ret = 0;
                     break;
-                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN:
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT_ISO_STUDIO_TUNGSTEN:
                     msg = "ISO_STUDIO_TUNGSTEN";
                     ret = 0;
                     break;
@@ -1771,13 +1820,13 @@
             }
             break;
         }
-        case ANDROID_SENSOR_REFERENCE_ILLUMINANT2: {
+        case ANDROID_SENSOR_CALIBRATION_TRANSFORM: {
             break;
         }
-        case ANDROID_SENSOR_TIMESTAMP: {
+        case ANDROID_SENSOR_COLOR_TRANSFORM: {
             break;
         }
-        case ANDROID_SENSOR_TEMPERATURE: {
+        case ANDROID_SENSOR_FORWARD_MATRIX: {
             break;
         }
         case ANDROID_SENSOR_NEUTRAL_COLOR_POINT: {