Camera: Enhance support for monochrome camera

- Add new Color Filter Array enum.
- Clarify doc for Bayer pattern related metadata.
- Add DngCreator support for monochrome camera raw.

Test: Camera CTS
Test: Capture a monochrome DNG image and inspect with LightRoom
Bug: 70216652
Change-Id: I329f224e3763dd5c777815a3cbb9dd7bd198c038
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index 444ca87..7148b12 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -28,8 +28,8 @@
 import android.hardware.camera2.utils.TypeReference;
 import android.util.Rational;
 
-import java.util.Arrays;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
@@ -2668,7 +2668,8 @@
     /**
      * <p>The arrangement of color filters on sensor;
      * represents the colors in the top-left 2x2 section of
-     * the sensor, in reading order.</p>
+     * the sensor, in reading order, for a Bayer camera, or the
+     * light spectrum it captures for MONOCHROME camera.</p>
      * <p><b>Possible values:</b>
      * <ul>
      *   <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB RGGB}</li>
@@ -2676,6 +2677,8 @@
      *   <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG GBRG}</li>
      *   <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR BGGR}</li>
      *   <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB RGB}</li>
+     *   <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO MONO}</li>
+     *   <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR NIR}</li>
      * </ul></p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Full capability</b> -
@@ -2688,6 +2691,8 @@
      * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG
      * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR
      * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB
+     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO
+     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR
      */
     @PublicKey
     public static final Key<Integer> SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =
@@ -2919,6 +2924,8 @@
      * <p>Some devices may choose to provide a second set of calibration
      * information for improved quality, including
      * {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2} and its corresponding matrices.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Possible values:</b>
      * <ul>
      *   <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT DAYLIGHT}</li>
@@ -2981,6 +2988,8 @@
      * <p>If this key is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
      * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
      * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} will also be present.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Range of valid values:</b><br>
      * Any value listed in {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
@@ -3006,6 +3015,8 @@
      * colorspace) into this camera device's native sensor color
      * space under the first reference illuminant
      * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
      *
@@ -3029,6 +3040,8 @@
      * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
      * <p>This matrix will only be present if the second reference
      * illuminant is present.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
      *
@@ -3053,6 +3066,8 @@
      * and the CIE XYZ colorspace when calculating this transform will
      * match the standard white point for the first reference illuminant
      * (i.e. no chromatic adaptation will be applied by this transform).</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
      *
@@ -3079,6 +3094,8 @@
      * (i.e. no chromatic adaptation will be applied by this transform).</p>
      * <p>This matrix will only be present if the second reference
      * illuminant is present.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
      *
@@ -3101,6 +3118,8 @@
      * this matrix is chosen so that the standard white point for this reference
      * illuminant in the reference sensor colorspace is mapped to D50 in the
      * CIE XYZ colorspace.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
      *
@@ -3125,6 +3144,8 @@
      * CIE XYZ colorspace.</p>
      * <p>This matrix will only be present if the second reference
      * illuminant is present.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      * <p><b>Permission {@link android.Manifest.permission#CAMERA } is needed to access this property</b></p>
      *
@@ -3153,6 +3174,7 @@
      * level values. For raw capture in particular, it is recommended to use
      * pixels from {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} to calculate black
      * level values for each frame.</p>
+     * <p>For a MONOCHROME camera device, all of the 2x2 channels must have the same values.</p>
      * <p><b>Range of valid values:</b><br>
      * &gt;= 0 for each.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java
index ac00f14..dc6cffc 100644
--- a/core/java/android/hardware/camera2/CameraDevice.java
+++ b/core/java/android/hardware/camera2/CameraDevice.java
@@ -357,7 +357,7 @@
      * </p>
      *
      * <p>MONOCHROME-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}
-     * includes {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME MONOCHROME})
+     * includes {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME MONOCHROME}) devices
      * supporting {@link android.graphics.ImageFormat#Y8 Y8} support substituting {@code YUV}
      * streams with {@code Y8} in all guaranteed stream combinations for the device's hardware level
      * and capabilities.</p>
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index ffc2264..402472a 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -880,11 +880,15 @@
 
     /**
      * <p>The camera device is a monochrome camera that doesn't contain a color filter array,
-     * and the pixel values on U and V planes are all 128.</p>
+     * and for YUV_420_888 stream, the pixel values on U and V planes are all 128.</p>
      * <p>A MONOCHROME camera must support the guaranteed stream combinations required for
      * its device level and capabilities. Additionally, if the monochrome camera device
      * supports Y8 format, all mandatory stream combination requirements related to {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888} apply
-     * to {@link android.graphics.ImageFormat#Y8 Y8} as well.</p>
+     * to {@link android.graphics.ImageFormat#Y8 Y8} as well. There are no
+     * mandatory stream combination requirements with regard to
+     * {@link android.graphics.ImageFormat#Y8 Y8} for Bayer camera devices.</p>
+     * <p>Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME
+     * camera will be either MONO or NIR.</p>
      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
      */
     public static final int REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12;
@@ -937,6 +941,23 @@
      */
     public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4;
 
+    /**
+     * <p>Sensor doesn't have any Bayer color filter.
+     * Such sensor captures visible light in monochrome. The exact weighting and
+     * wavelengths captured is not specified, but generally only includes the visible
+     * frequencies. This value implies a MONOCHROME camera.</p>
+     * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+     */
+    public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO = 5;
+
+    /**
+     * <p>Sensor has a near infrared filter capturing light with wavelength between
+     * roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This
+     * value implies a MONOCHROME camera.</p>
+     * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+     */
+    public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR = 6;
+
     //
     // Enumeration values for CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
     //
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index 2744e91..8ebaf2f 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -24,8 +24,8 @@
 import android.hardware.camera2.impl.SyntheticKey;
 import android.hardware.camera2.params.OutputConfiguration;
 import android.hardware.camera2.utils.HashCodeHelpers;
-import android.hardware.camera2.utils.TypeReference;
 import android.hardware.camera2.utils.SurfaceUtils;
+import android.hardware.camera2.utils.TypeReference;
 import android.os.Parcel;
 import android.os.Parcelable;
 import android.util.ArraySet;
@@ -2947,8 +2947,8 @@
      * of points can be less than max (that is, the request doesn't have to
      * always provide a curve with number of points equivalent to
      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
-     * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
-     * are ignored.</p>
+     * <p>For devices with MONOCHROME capability, all three channels must have the same set of
+     * control points.</p>
      * <p>A few examples, and their corresponding graphical mappings; these
      * only specify the red channel and the precision is limited to 4
      * digits, for conciseness.</p>
@@ -3011,8 +3011,8 @@
      * of points can be less than max (that is, the request doesn't have to
      * always provide a curve with number of points equivalent to
      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
-     * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
-     * are ignored.</p>
+     * <p>For devices with MONOCHROME capability, all three channels must have the same set of
+     * control points.</p>
      * <p>A few examples, and their corresponding graphical mappings; these
      * only specify the red channel and the precision is limited to 4
      * digits, for conciseness.</p>
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 2b67f3e..3d70c51 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -3417,6 +3417,8 @@
      * used to interpolate between the provided color transforms when
      * processing raw sensor data.</p>
      * <p>The order of the values is R, G, B; where R is in the lowest index.</p>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      */
     @PublicKey
@@ -3442,6 +3444,8 @@
      * that channel.</p>
      * <p>A more detailed description of the noise model can be found in the
      * Adobe DNG specification for the NoiseProfile tag.</p>
+     * <p>For a MONOCHROME camera, there is only one color channel. So the noise model coefficients
+     * will only contain one S and one O.</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
      *
      * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
@@ -3482,6 +3486,8 @@
      * <li>R &gt; 1.20 will require strong software correction to produce
      * a usuable image (&gt;20% divergence).</li>
      * </ul>
+     * <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+     * the camera device has RAW capability.</p>
      * <p><b>Range of valid values:</b><br></p>
      * <p>&gt;= 0</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
@@ -3592,6 +3598,7 @@
      * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
      * nth value given corresponds to the black level offset for the nth
      * color channel listed in the CFA.</p>
+     * <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values.</p>
      * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is available or the
      * camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
      * <p><b>Range of valid values:</b><br>
@@ -3852,6 +3859,17 @@
      * <p>As a visualization only, inverting the full-color map to recover an
      * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+     * <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example
+     * shading map for such a camera is defined as:</p>
+     * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
+     * android.statistics.lensShadingMap =
+     * [ 1.3, 1.3, 1.3, 1.3,  1.2, 1.2, 1.2, 1.2,
+     *     1.1, 1.1, 1.1, 1.1,  1.3, 1.3, 1.3, 1.3,
+     *   1.2, 1.2, 1.2, 1.2,  1.1, 1.1, 1.1, 1.1,
+     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.2, 1.2, 1.2,
+     *   1.3, 1.3, 1.3, 1.3,   1.2, 1.2, 1.2, 1.2,
+     *     1.2, 1.2, 1.2, 1.2,  1.3, 1.3, 1.3, 1.3 ]
+     * </code></pre>
      * <p><b>Range of valid values:</b><br>
      * Each gain factor is &gt;= 1</p>
      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
@@ -3894,13 +3912,13 @@
      * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
      * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
      * The map is assumed to be bilinearly interpolated between the sample points.</p>
-     * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
-     * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+     * <p>For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is
+     * the green channel for the even rows of a Bayer pattern, and Godd is the odd rows.
      * The shading map is stored in a fully interleaved format, and its size
      * is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
      * <p>The shading map will generally have on the order of 30-40 rows and columns,
      * and will be smaller than 64x64.</p>
-     * <p>As an example, given a very small map defined as:</p>
+     * <p>As an example, given a very small map for a Bayer camera defined as:</p>
      * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
      * android.statistics.lensShadingMap =
      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
@@ -3920,6 +3938,17 @@
      * image of a gray wall (using bicubic interpolation for visual quality)
      * as captured by the sensor gives:</p>
      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+     * <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example
+     * shading map for such a camera is defined as:</p>
+     * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
+     * android.statistics.lensShadingMap =
+     * [ 1.3, 1.3, 1.3, 1.3,  1.2, 1.2, 1.2, 1.2,
+     *     1.1, 1.1, 1.1, 1.1,  1.3, 1.3, 1.3, 1.3,
+     *   1.2, 1.2, 1.2, 1.2,  1.1, 1.1, 1.1, 1.1,
+     *     1.0, 1.0, 1.0, 1.0,  1.2, 1.2, 1.2, 1.2,
+     *   1.3, 1.3, 1.3, 1.3,   1.2, 1.2, 1.2, 1.2,
+     *     1.2, 1.2, 1.2, 1.2,  1.3, 1.3, 1.3, 1.3 ]
+     * </code></pre>
      * <p>Note that the RAW image data might be subject to lens shading
      * correction not reported on this map. Query
      * {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
@@ -4250,8 +4279,8 @@
      * of points can be less than max (that is, the request doesn't have to
      * always provide a curve with number of points equivalent to
      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
-     * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
-     * are ignored.</p>
+     * <p>For devices with MONOCHROME capability, all three channels must have the same set of
+     * control points.</p>
      * <p>A few examples, and their corresponding graphical mappings; these
      * only specify the red channel and the precision is limited to 4
      * digits, for conciseness.</p>
@@ -4314,8 +4343,8 @@
      * of points can be less than max (that is, the request doesn't have to
      * always provide a curve with number of points equivalent to
      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
-     * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
-     * are ignored.</p>
+     * <p>For devices with MONOCHROME capability, all three channels must have the same set of
+     * control points.</p>
      * <p>A few examples, and their corresponding graphical mappings; these
      * only specify the red channel and the precision is limited to 4
      * digits, for conciseness.</p>
diff --git a/core/java/android/hardware/camera2/params/BlackLevelPattern.java b/core/java/android/hardware/camera2/params/BlackLevelPattern.java
index 6d6c094..283977f 100644
--- a/core/java/android/hardware/camera2/params/BlackLevelPattern.java
+++ b/core/java/android/hardware/camera2/params/BlackLevelPattern.java
@@ -16,13 +16,17 @@
 
 package android.hardware.camera2.params;
 
-import java.util.Arrays;
-
 import static com.android.internal.util.Preconditions.checkNotNull;
 
+import java.util.Arrays;
+
 /**
  * Immutable class to store a 4-element vector of integers corresponding to a 2x2 pattern
  * of color channel offsets used for the black level offsets of each color channel.
+ *
+ * For a camera device with
+ * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME
+ * MONOCHROME} capability, all 4 elements of the pattern will have the same value.
  */
 public final class BlackLevelPattern {
 
@@ -133,6 +137,12 @@
      * {@link android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}).
      * </p>
      *
+     * <p>A {@link
+     * android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME
+     * MONOCHROME} camera only has one channel. As a result, the returned string will contain 4
+     * identical values.
+     * </p>
+     *
      * @return string representation of {@link BlackLevelPattern}
      *
      * @see android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
diff --git a/core/java/android/hardware/camera2/params/TonemapCurve.java b/core/java/android/hardware/camera2/params/TonemapCurve.java
index 71e68a5..90e6355 100644
--- a/core/java/android/hardware/camera2/params/TonemapCurve.java
+++ b/core/java/android/hardware/camera2/params/TonemapCurve.java
@@ -34,6 +34,10 @@
  * use as the tonemapping/contrast/gamma curve when {@link CaptureRequest#TONEMAP_MODE} is
  * set to {@link CameraMetadata#TONEMAP_MODE_CONTRAST_CURVE}.</p>
  *
+ * <p>For a camera device with
+ * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME
+ * MONOCHROME} capability, all 3 channels will contain the same set of control points.
+ *
  * <p>The total number of points {@code (Pin, Pout)} for each color channel can be no more than
  * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS}.</p>
  *
diff --git a/core/jni/android_hardware_camera2_DngCreator.cpp b/core/jni/android_hardware_camera2_DngCreator.cpp
index c977437..29051f1 100644
--- a/core/jni/android_hardware_camera2_DngCreator.cpp
+++ b/core/jni/android_hardware_camera2_DngCreator.cpp
@@ -892,6 +892,13 @@
             cfaOut[3] = 0;
             break;
         }
+        // MONO and NIR are degenerate case of RGGB pattern: only Red channel
+        // will be used.
+        case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
+        case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
+            cfaOut[0] = 0;
+            break;
+        }
         default: {
             return BAD_VALUE;
         }
@@ -1063,6 +1070,8 @@
 
     uint32_t preWidth = 0;
     uint32_t preHeight = 0;
+    uint8_t colorFilter = 0;
+    bool isBayer = true;
     {
         // Check dimensions
         camera_metadata_entry entry =
@@ -1083,10 +1092,25 @@
                     "either the preCorrectionActiveArraySize or the pixelArraySize.");
             return nullptr;
         }
+
+        camera_metadata_entry colorFilterEntry =
+                characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
+        colorFilter = colorFilterEntry.data.u8[0];
+        camera_metadata_entry capabilitiesEntry =
+                characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+        size_t capsCount = capabilitiesEntry.count;
+        uint8_t* caps = capabilitiesEntry.data.u8;
+        if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
+                != caps+capsCount) {
+            isBayer = false;
+        } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
+                colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
+            jniThrowException(env, "java/lang/AssertionError",
+                    "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
+            return nullptr;
+        }
     }
 
-
-
     writer->addIfd(TIFF_IFD_0);
 
     status_t err = OK;
@@ -1094,9 +1118,12 @@
     const uint32_t samplesPerPixel = 1;
     const uint32_t bitsPerSample = BITS_PER_SAMPLE;
 
-    OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB;
+    OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
     uint8_t cfaPlaneColor[3] = {0, 1, 2};
-    uint8_t cfaEnum = -1;
+    camera_metadata_entry cfaEntry =
+            characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
+    BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
+    uint8_t cfaEnum = cfaEntry.data.u8[0];
 
     // TODO: Greensplit.
     // TODO: Add remaining non-essential tags
@@ -1141,12 +1168,20 @@
 
     {
         // Set photometric interpretation
-        uint16_t interpretation = 32803; // CFA
+        uint16_t interpretation = isBayer ? 32803 /* CFA */ :
+                34892; /* Linear Raw */;
         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
                 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
     }
 
     {
+        uint16_t repeatDim[2] = {2, 2};
+        if (!isBayer) {
+            repeatDim[0] = repeatDim[1] = 1;
+        }
+        BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
+                TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
+
         // Set blacklevel tags, using dynamic black level if available
         camera_metadata_entry entry =
                 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
@@ -1165,14 +1200,9 @@
                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
                 blackLevelRational[i * 2 + 1] = 1;
             }
-
         }
-        BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, 4, blackLevelRational,
-                TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
-
-        uint16_t repeatDim[2] = {2, 2};
-        BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
-                TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
+        BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
+                blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
     }
 
     {
@@ -1189,21 +1219,15 @@
                 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
     }
 
-    {
+    // All CFA pattern tags are not necessary for monochrome cameras.
+    if (isBayer) {
         // Set CFA pattern dimensions
         uint16_t repeatDim[2] = {2, 2};
         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
                 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
-    }
 
-    {
         // Set CFA pattern
-        camera_metadata_entry entry =
-                        characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
-        BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer);
-
         const int cfaLength = 4;
-        cfaEnum = entry.data.u8[0];
         uint8_t cfa[cfaLength];
         if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
@@ -1214,15 +1238,11 @@
                 env, TAG_CFAPATTERN, writer);
 
         opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
-    }
 
-    {
         // Set CFA plane color
         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
                 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
-    }
 
-    {
         // Set CFA layout
         uint16_t cfaLayout = 1;
         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
@@ -1442,7 +1462,7 @@
     }
 
     bool singleIlluminant = false;
-    {
+    if (isBayer) {
         // Set calibration illuminants
         camera_metadata_entry entry1 =
             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
@@ -1464,7 +1484,7 @@
         }
     }
 
-    {
+    if (isBayer) {
         // Set color transforms
         camera_metadata_entry entry1 =
             characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
@@ -1497,7 +1517,7 @@
         }
     }
 
-    {
+    if (isBayer) {
         // Set calibration transforms
         camera_metadata_entry entry1 =
             characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
@@ -1531,7 +1551,7 @@
         }
     }
 
-    {
+    if (isBayer) {
         // Set forward transforms
         camera_metadata_entry entry1 =
             characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
@@ -1565,7 +1585,7 @@
         }
     }
 
-    {
+    if (isBayer) {
         // Set camera neutral
         camera_metadata_entry entry =
             results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
@@ -1632,8 +1652,8 @@
         camera_metadata_entry entry =
             results.find(ANDROID_SENSOR_NOISE_PROFILE);
 
-        const status_t numPlaneColors = 3;
-        const status_t numCfaChannels = 4;
+        const status_t numPlaneColors = isBayer ? 3 : 1;
+        const status_t numCfaChannels = isBayer ? 4 : 1;
 
         uint8_t cfaOut[numCfaChannels];
         if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
@@ -1710,42 +1730,44 @@
             }
         }
 
+        // Hot pixel map is specific to bayer camera per DNG spec.
+        if (isBayer) {
+            // Set up bad pixel correction list
+            camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
 
-        // Set up bad pixel correction list
-        camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
-
-        if ((entry3.count % 2) != 0) {
-            ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
-                    __FUNCTION__);
-            jniThrowRuntimeException(env, "failed to add hotpixel map.");
-            return nullptr;
-        }
-
-        // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
-        std::vector<uint32_t> v;
-        for (size_t i = 0; i < entry3.count; i += 2) {
-            int32_t x = entry3.data.i32[i];
-            int32_t y = entry3.data.i32[i + 1];
-            x -= static_cast<int32_t>(xmin);
-            y -= static_cast<int32_t>(ymin);
-            if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
-                    static_cast<uint32_t>(y) >= height) {
-                continue;
-            }
-            v.push_back(x);
-            v.push_back(y);
-        }
-        const uint32_t* badPixels = &v[0];
-        uint32_t badPixelCount = v.size();
-
-        if (badPixelCount > 0) {
-            err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
-
-            if (err != OK) {
-                ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
+            if ((entry3.count % 2) != 0) {
+                ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
+                        __FUNCTION__);
                 jniThrowRuntimeException(env, "failed to add hotpixel map.");
                 return nullptr;
             }
+
+            // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
+            std::vector<uint32_t> v;
+            for (size_t i = 0; i < entry3.count; i += 2) {
+                int32_t x = entry3.data.i32[i];
+                int32_t y = entry3.data.i32[i + 1];
+                x -= static_cast<int32_t>(xmin);
+                y -= static_cast<int32_t>(ymin);
+                if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
+                        static_cast<uint32_t>(y) >= height) {
+                    continue;
+                }
+                v.push_back(x);
+                v.push_back(y);
+            }
+            const uint32_t* badPixels = &v[0];
+            uint32_t badPixelCount = v.size();
+
+            if (badPixelCount > 0) {
+                err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
+
+                if (err != OK) {
+                    ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
+                    jniThrowRuntimeException(env, "failed to add hotpixel map.");
+                    return nullptr;
+                }
+            }
         }
 
         if (builder.getCount() > 0) {
@@ -1960,10 +1982,12 @@
         tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
         tagsToMove.add(TAG_SAMPLESPERPIXEL);
         tagsToMove.add(TAG_PLANARCONFIGURATION);
-        tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
-        tagsToMove.add(TAG_CFAPATTERN);
-        tagsToMove.add(TAG_CFAPLANECOLOR);
-        tagsToMove.add(TAG_CFALAYOUT);
+        if (isBayer) {
+            tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
+            tagsToMove.add(TAG_CFAPATTERN);
+            tagsToMove.add(TAG_CFAPLANECOLOR);
+            tagsToMove.add(TAG_CFALAYOUT);
+        }
         tagsToMove.add(TAG_XRESOLUTION);
         tagsToMove.add(TAG_YRESOLUTION);
         tagsToMove.add(TAG_RESOLUTIONUNIT);