Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Eino-Ville Talvala | 2f1a2e4 | 2013-07-25 17:12:05 -0700 | [diff] [blame] | 17 | package android.hardware.camera2; |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 18 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 19 | import android.hardware.camera2.impl.CameraMetadataNative; |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 20 | import android.os.Parcel; |
| 21 | import android.os.Parcelable; |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 22 | import android.view.Surface; |
| 23 | |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 24 | import java.util.HashSet; |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 25 | import java.util.Objects; |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 26 | |
| 27 | |
| 28 | /** |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 29 | * <p>An immutable package of settings and outputs needed to capture a single |
| 30 | * image from the camera device.</p> |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 31 | * |
| 32 | * <p>Contains the configuration for the capture hardware (sensor, lens, flash), |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 33 | * the processing pipeline, the control algorithms, and the output buffers. Also |
| 34 | * contains the list of target Surfaces to send image data to for this |
| 35 | * capture.</p> |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 36 | * |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 37 | * <p>CaptureRequests can be created by using a {@link Builder} instance, |
| 38 | * obtained by calling {@link CameraDevice#createCaptureRequest}</p> |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 39 | * |
| 40 | * <p>CaptureRequests are given to {@link CameraDevice#capture} or |
| 41 | * {@link CameraDevice#setRepeatingRequest} to capture images from a camera.</p> |
| 42 | * |
| 43 | * <p>Each request can specify a different subset of target Surfaces for the |
| 44 | * camera to send the captured data to. All the surfaces used in a request must |
| 45 | * be part of the surface list given to the last call to |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 46 | * {@link CameraDevice#configureOutputs}, when the request is submitted to the |
| 47 | * camera device.</p> |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 48 | * |
| 49 | * <p>For example, a request meant for repeating preview might only include the |
| 50 | * Surface for the preview SurfaceView or SurfaceTexture, while a |
| 51 | * high-resolution still capture would also include a Surface from a ImageReader |
| 52 | * configured for high-resolution JPEG images.</p> |
| 53 | * |
| 54 | * @see CameraDevice#capture |
| 55 | * @see CameraDevice#setRepeatingRequest |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 56 | * @see CameraDevice#createCaptureRequest |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 57 | */ |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 58 | public final class CaptureRequest extends CameraMetadata implements Parcelable { |
| 59 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 60 | private final HashSet<Surface> mSurfaceSet; |
| 61 | private final CameraMetadataNative mSettings; |
| 62 | |
Eino-Ville Talvala | 4068388 | 2013-08-08 16:56:28 -0700 | [diff] [blame] | 63 | private Object mUserTag; |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 64 | |
| 65 | /** |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 66 | * Construct empty request. |
| 67 | * |
| 68 | * Used by Binder to unparcel this object only. |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 69 | */ |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 70 | private CaptureRequest() { |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 71 | mSettings = new CameraMetadataNative(); |
| 72 | mSurfaceSet = new HashSet<Surface>(); |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 73 | } |
| 74 | |
| 75 | /** |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 76 | * Clone from source capture request. |
| 77 | * |
| 78 | * Used by the Builder to create an immutable copy. |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 79 | */ |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 80 | @SuppressWarnings("unchecked") |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 81 | private CaptureRequest(CaptureRequest source) { |
| 82 | mSettings = new CameraMetadataNative(source.mSettings); |
| 83 | mSurfaceSet = (HashSet<Surface>) source.mSurfaceSet.clone(); |
Eino-Ville Talvala | 7b01c5c | 2013-10-08 19:34:29 -0700 | [diff] [blame] | 84 | mUserTag = source.mUserTag; |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 85 | } |
| 86 | |
| 87 | /** |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 88 | * Take ownership of passed-in settings. |
| 89 | * |
| 90 | * Used by the Builder to create a mutable CaptureRequest. |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 91 | */ |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 92 | private CaptureRequest(CameraMetadataNative settings) { |
| 93 | mSettings = settings; |
| 94 | mSurfaceSet = new HashSet<Surface>(); |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 95 | } |
| 96 | |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 97 | @SuppressWarnings("unchecked") |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 98 | @Override |
| 99 | public <T> T get(Key<T> key) { |
| 100 | return mSettings.get(key); |
Eino-Ville Talvala | 4068388 | 2013-08-08 16:56:28 -0700 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | /** |
| 104 | * Retrieve the tag for this request, if any. |
| 105 | * |
| 106 | * <p>This tag is not used for anything by the camera device, but can be |
| 107 | * used by an application to easily identify a CaptureRequest when it is |
| 108 | * returned by |
Eino-Ville Talvala | 4af73c2 | 2013-08-14 10:35:46 -0700 | [diff] [blame] | 109 | * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} |
Eino-Ville Talvala | 4068388 | 2013-08-08 16:56:28 -0700 | [diff] [blame] | 110 | * </p> |
| 111 | * |
| 112 | * @return the last tag Object set on this request, or {@code null} if |
| 113 | * no tag has been set. |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 114 | * @see Builder#setTag |
Eino-Ville Talvala | 4068388 | 2013-08-08 16:56:28 -0700 | [diff] [blame] | 115 | */ |
| 116 | public Object getTag() { |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 117 | return mUserTag; |
Eino-Ville Talvala | 4068388 | 2013-08-08 16:56:28 -0700 | [diff] [blame] | 118 | } |
| 119 | |
Igor Murashkin | 6bbf9dc | 2013-09-05 12:22:00 -0700 | [diff] [blame] | 120 | /** |
| 121 | * Determine whether this CaptureRequest is equal to another CaptureRequest. |
| 122 | * |
| 123 | * <p>A request is considered equal to another is if it's set of key/values is equal, it's |
| 124 | * list of output surfaces is equal, and the user tag is equal.</p> |
| 125 | * |
| 126 | * @param other Another instance of CaptureRequest. |
| 127 | * |
| 128 | * @return True if the requests are the same, false otherwise. |
| 129 | */ |
| 130 | @Override |
| 131 | public boolean equals(Object other) { |
| 132 | return other instanceof CaptureRequest |
| 133 | && equals((CaptureRequest)other); |
| 134 | } |
| 135 | |
| 136 | private boolean equals(CaptureRequest other) { |
| 137 | return other != null |
| 138 | && Objects.equals(mUserTag, other.mUserTag) |
| 139 | && mSurfaceSet.equals(other.mSurfaceSet) |
| 140 | && mSettings.equals(other.mSettings); |
| 141 | } |
| 142 | |
| 143 | @Override |
| 144 | public int hashCode() { |
| 145 | return mSettings.hashCode(); |
| 146 | } |
| 147 | |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 148 | public static final Parcelable.Creator<CaptureRequest> CREATOR = |
| 149 | new Parcelable.Creator<CaptureRequest>() { |
| 150 | @Override |
| 151 | public CaptureRequest createFromParcel(Parcel in) { |
| 152 | CaptureRequest request = new CaptureRequest(); |
| 153 | request.readFromParcel(in); |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 154 | |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 155 | return request; |
| 156 | } |
| 157 | |
| 158 | @Override |
| 159 | public CaptureRequest[] newArray(int size) { |
| 160 | return new CaptureRequest[size]; |
| 161 | } |
| 162 | }; |
| 163 | |
| 164 | /** |
| 165 | * Expand this object from a Parcel. |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 166 | * Hidden since this breaks the immutability of CaptureRequest, but is |
| 167 | * needed to receive CaptureRequests with aidl. |
| 168 | * |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 169 | * @param in The parcel from which the object should be read |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 170 | * @hide |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 171 | */ |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 172 | public void readFromParcel(Parcel in) { |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 173 | mSettings.readFromParcel(in); |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 174 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 175 | mSurfaceSet.clear(); |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 176 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 177 | Parcelable[] parcelableArray = in.readParcelableArray(Surface.class.getClassLoader()); |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 178 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 179 | if (parcelableArray == null) { |
| 180 | return; |
| 181 | } |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 182 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 183 | for (Parcelable p : parcelableArray) { |
| 184 | Surface s = (Surface) p; |
| 185 | mSurfaceSet.add(s); |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 186 | } |
| 187 | } |
| 188 | |
| 189 | @Override |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 190 | public int describeContents() { |
| 191 | return 0; |
| 192 | } |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 193 | |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 194 | @Override |
| 195 | public void writeToParcel(Parcel dest, int flags) { |
| 196 | mSettings.writeToParcel(dest, flags); |
| 197 | dest.writeParcelableArray(mSurfaceSet.toArray(new Surface[mSurfaceSet.size()]), flags); |
| 198 | } |
| 199 | |
| 200 | /** |
| 201 | * A builder for capture requests. |
| 202 | * |
| 203 | * <p>To obtain a builder instance, use the |
| 204 | * {@link CameraDevice#createCaptureRequest} method, which initializes the |
| 205 | * request fields to one of the templates defined in {@link CameraDevice}. |
| 206 | * |
| 207 | * @see CameraDevice#createCaptureRequest |
| 208 | * @see #TEMPLATE_PREVIEW |
| 209 | * @see #TEMPLATE_RECORD |
| 210 | * @see #TEMPLATE_STILL_CAPTURE |
| 211 | * @see #TEMPLATE_VIDEO_SNAPSHOT |
| 212 | * @see #TEMPLATE_MANUAL |
| 213 | */ |
| 214 | public final static class Builder { |
| 215 | |
Igor Murashkin | 7a36a0f | 2013-09-10 18:13:09 -0700 | [diff] [blame] | 216 | private final CaptureRequest mRequest; |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 217 | |
| 218 | /** |
| 219 | * Initialize the builder using the template; the request takes |
| 220 | * ownership of the template. |
| 221 | * |
| 222 | * @hide |
| 223 | */ |
| 224 | public Builder(CameraMetadataNative template) { |
| 225 | mRequest = new CaptureRequest(template); |
Igor Murashkin | 7072550 | 2013-06-25 20:27:06 +0000 | [diff] [blame] | 226 | } |
Eino-Ville Talvala | 70c2207 | 2013-08-27 12:09:04 -0700 | [diff] [blame] | 227 | |
| 228 | /** |
| 229 | * <p>Add a surface to the list of targets for this request</p> |
| 230 | * |
| 231 | * <p>The Surface added must be one of the surfaces included in the most |
| 232 | * recent call to {@link CameraDevice#configureOutputs}, when the |
| 233 | * request is given to the camera device.</p> |
| 234 | * |
| 235 | * <p>Adding a target more than once has no effect.</p> |
| 236 | * |
| 237 | * @param outputTarget Surface to use as an output target for this request |
| 238 | */ |
| 239 | public void addTarget(Surface outputTarget) { |
| 240 | mRequest.mSurfaceSet.add(outputTarget); |
| 241 | } |
| 242 | |
| 243 | /** |
| 244 | * <p>Remove a surface from the list of targets for this request.</p> |
| 245 | * |
| 246 | * <p>Removing a target that is not currently added has no effect.</p> |
| 247 | * |
| 248 | * @param outputTarget Surface to use as an output target for this request |
| 249 | */ |
| 250 | public void removeTarget(Surface outputTarget) { |
| 251 | mRequest.mSurfaceSet.remove(outputTarget); |
| 252 | } |
| 253 | |
| 254 | /** |
| 255 | * Set a capture request field to a value. The field definitions can be |
| 256 | * found in {@link CaptureRequest}. |
| 257 | * |
| 258 | * @param key The metadata field to write. |
| 259 | * @param value The value to set the field to, which must be of a matching |
| 260 | * type to the key. |
| 261 | */ |
| 262 | public <T> void set(Key<T> key, T value) { |
| 263 | mRequest.mSettings.set(key, value); |
| 264 | } |
| 265 | |
| 266 | /** |
| 267 | * Get a capture request field value. The field definitions can be |
| 268 | * found in {@link CaptureRequest}. |
| 269 | * |
| 270 | * @throws IllegalArgumentException if the key was not valid |
| 271 | * |
| 272 | * @param key The metadata field to read. |
| 273 | * @return The value of that key, or {@code null} if the field is not set. |
| 274 | */ |
| 275 | public <T> T get(Key<T> key) { |
| 276 | return mRequest.mSettings.get(key); |
| 277 | } |
| 278 | |
| 279 | /** |
| 280 | * Set a tag for this request. |
| 281 | * |
| 282 | * <p>This tag is not used for anything by the camera device, but can be |
| 283 | * used by an application to easily identify a CaptureRequest when it is |
| 284 | * returned by |
| 285 | * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} |
| 286 | * |
| 287 | * @param tag an arbitrary Object to store with this request |
| 288 | * @see CaptureRequest#getTag |
| 289 | */ |
| 290 | public void setTag(Object tag) { |
| 291 | mRequest.mUserTag = tag; |
| 292 | } |
| 293 | |
| 294 | /** |
| 295 | * Build a request using the current target Surfaces and settings. |
| 296 | * |
| 297 | * @return A new capture request instance, ready for submission to the |
| 298 | * camera device. |
| 299 | */ |
| 300 | public CaptureRequest build() { |
| 301 | return new CaptureRequest(mRequest); |
| 302 | } |
| 303 | |
| 304 | |
| 305 | /** |
| 306 | * @hide |
| 307 | */ |
| 308 | public boolean isEmpty() { |
| 309 | return mRequest.mSettings.isEmpty(); |
| 310 | } |
| 311 | |
Eino-Ville Talvala | b267554 | 2012-12-12 13:29:45 -0800 | [diff] [blame] | 312 | } |
| 313 | |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 314 | /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ |
| 315 | * The key entries below this point are generated from metadata |
| 316 | * definitions in /system/media/camera/docs. Do not modify by hand or |
| 317 | * modify the comment blocks at the start or end. |
| 318 | *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ |
| 319 | |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 320 | |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 321 | /** |
Igor Murashkin | 7d2a5c5 | 2014-01-17 15:07:52 -0800 | [diff] [blame] | 322 | * <p>The mode control selects how the image data is converted from the |
| 323 | * sensor's native color into linear sRGB color.</p> |
| 324 | * <p>When auto-white balance is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this |
| 325 | * control is overridden by the AWB routine. When AWB is disabled, the |
| 326 | * application controls how the color mapping is performed.</p> |
| 327 | * <p>We define the expected processing pipeline below. For consistency |
| 328 | * across devices, this is always the case with TRANSFORM_MATRIX.</p> |
| 329 | * <p>When either FULL or HIGH_QUALITY is used, the camera device may |
| 330 | * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and |
| 331 | * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the |
| 332 | * camera device (in the results) and be roughly correct.</p> |
| 333 | * <p>Switching to TRANSFORM_MATRIX and using the data provided from |
| 334 | * FAST or HIGH_QUALITY will yield a picture with the same white point |
| 335 | * as what was produced by the camera device in the earlier frame.</p> |
| 336 | * <p>The expected processing pipeline is as follows:</p> |
| 337 | * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p> |
| 338 | * <p>The white balance is encoded by two values, a 4-channel white-balance |
| 339 | * gain vector (applied in the Bayer domain), and a 3x3 color transform |
| 340 | * matrix (applied after demosaic).</p> |
| 341 | * <p>The 4-channel white-balance gains are defined as:</p> |
| 342 | * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ] |
| 343 | * </code></pre> |
| 344 | * <p>where <code>G_even</code> is the gain for green pixels on even rows of the |
| 345 | * output, and <code>G_odd</code> is the gain for green pixels on the odd rows. |
| 346 | * These may be identical for a given camera device implementation; if |
| 347 | * the camera device does not support a separate gain for even/odd green |
| 348 | * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to |
| 349 | * <code>G_even</code> in the output result metadata.</p> |
| 350 | * <p>The matrices for color transforms are defined as a 9-entry vector:</p> |
| 351 | * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ] |
| 352 | * </code></pre> |
| 353 | * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>, |
| 354 | * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p> |
| 355 | * <p>with colors as follows:</p> |
| 356 | * <pre><code>r' = I0r + I1g + I2b |
| 357 | * g' = I3r + I4g + I5b |
| 358 | * b' = I6r + I7g + I8b |
| 359 | * </code></pre> |
| 360 | * <p>Both the input and output value ranges must match. Overflow/underflow |
| 361 | * values are clipped to fit within the range.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 362 | * |
Igor Murashkin | 7d2a5c5 | 2014-01-17 15:07:52 -0800 | [diff] [blame] | 363 | * @see CaptureRequest#COLOR_CORRECTION_GAINS |
| 364 | * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 365 | * @see CaptureRequest#CONTROL_AWB_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 366 | * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX |
| 367 | * @see #COLOR_CORRECTION_MODE_FAST |
| 368 | * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY |
| 369 | */ |
| 370 | public static final Key<Integer> COLOR_CORRECTION_MODE = |
| 371 | new Key<Integer>("android.colorCorrection.mode", int.class); |
| 372 | |
| 373 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 374 | * <p>A color transform matrix to use to transform |
| 375 | * from sensor RGB color space to output linear sRGB color space</p> |
Zhijun He | 49a3ca9 | 2014-02-05 13:48:09 -0800 | [diff] [blame] | 376 | * <p>This matrix is either set by the camera device when the request |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 377 | * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 378 | * directly by the application in the request when the |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 379 | * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p> |
Zhijun He | 49a3ca9 | 2014-02-05 13:48:09 -0800 | [diff] [blame] | 380 | * <p>In the latter case, the camera device may round the matrix to account |
| 381 | * for precision issues; the final rounded matrix should be reported back |
| 382 | * in this matrix result metadata. The transform should keep the magnitude |
| 383 | * of the output color values within <code>[0, 1.0]</code> (assuming input color |
| 384 | * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 385 | * |
| 386 | * @see CaptureRequest#COLOR_CORRECTION_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 387 | */ |
| 388 | public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = |
| 389 | new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); |
| 390 | |
| 391 | /** |
Igor Murashkin | 7d2a5c5 | 2014-01-17 15:07:52 -0800 | [diff] [blame] | 392 | * <p>Gains applying to Bayer raw color channels for |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 393 | * white-balance</p> |
| 394 | * <p>The 4-channel white-balance gains are defined in |
Igor Murashkin | 7d2a5c5 | 2014-01-17 15:07:52 -0800 | [diff] [blame] | 395 | * the order of <code>[R G_even G_odd B]</code>, where <code>G_even</code> is the gain |
| 396 | * for green pixels on even rows of the output, and <code>G_odd</code> |
| 397 | * is the gain for green pixels on the odd rows. if a HAL |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 398 | * does not support a separate gain for even/odd green channels, |
Igor Murashkin | 7d2a5c5 | 2014-01-17 15:07:52 -0800 | [diff] [blame] | 399 | * it should use the <code>G_even</code> value, and write <code>G_odd</code> equal to |
| 400 | * <code>G_even</code> in the output result metadata.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 401 | * <p>This array is either set by HAL when the request |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 402 | * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or |
Igor Murashkin | d5ff06a | 2013-08-20 15:15:06 -0700 | [diff] [blame] | 403 | * directly by the application in the request when the |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 404 | * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p> |
Igor Murashkin | 7d2a5c5 | 2014-01-17 15:07:52 -0800 | [diff] [blame] | 405 | * <p>The output should be the gains actually applied by the HAL to |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 406 | * the current frame.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 407 | * |
| 408 | * @see CaptureRequest#COLOR_CORRECTION_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 409 | */ |
| 410 | public static final Key<float[]> COLOR_CORRECTION_GAINS = |
| 411 | new Key<float[]>("android.colorCorrection.gains", float[].class); |
| 412 | |
| 413 | /** |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 414 | * <p>The desired setting for the camera device's auto-exposure |
| 415 | * algorithm's antibanding compensation.</p> |
| 416 | * <p>Some kinds of lighting fixtures, such as some fluorescent |
| 417 | * lights, flicker at the rate of the power supply frequency |
| 418 | * (60Hz or 50Hz, depending on country). While this is |
| 419 | * typically not noticeable to a person, it can be visible to |
| 420 | * a camera device. If a camera sets its exposure time to the |
| 421 | * wrong value, the flicker may become visible in the |
| 422 | * viewfinder as flicker or in a final captured image, as a |
| 423 | * set of variable-brightness bands across the image.</p> |
| 424 | * <p>Therefore, the auto-exposure routines of camera devices |
| 425 | * include antibanding routines that ensure that the chosen |
| 426 | * exposure value will not cause such banding. The choice of |
| 427 | * exposure time depends on the rate of flicker, which the |
| 428 | * camera device can detect automatically, or the expected |
| 429 | * rate can be selected by the application using this |
| 430 | * control.</p> |
| 431 | * <p>A given camera device may not support all of the possible |
| 432 | * options for the antibanding mode. The |
| 433 | * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains |
| 434 | * the available modes for a given camera device.</p> |
| 435 | * <p>The default mode is AUTO, which must be supported by all |
| 436 | * camera devices.</p> |
| 437 | * <p>If manual exposure control is enabled (by setting |
| 438 | * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF), |
| 439 | * then this setting has no effect, and the application must |
| 440 | * ensure it selects exposure times that do not cause banding |
| 441 | * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist |
| 442 | * the application in this.</p> |
| 443 | * |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 444 | * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES |
Zhijun He | 399f05d | 2014-01-15 11:31:30 -0800 | [diff] [blame] | 445 | * @see CaptureRequest#CONTROL_AE_MODE |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 446 | * @see CaptureRequest#CONTROL_MODE |
| 447 | * @see CaptureResult#STATISTICS_SCENE_FLICKER |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 448 | * @see #CONTROL_AE_ANTIBANDING_MODE_OFF |
| 449 | * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ |
| 450 | * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ |
| 451 | * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO |
| 452 | */ |
| 453 | public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE = |
| 454 | new Key<Integer>("android.control.aeAntibandingMode", int.class); |
| 455 | |
| 456 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 457 | * <p>Adjustment to AE target image |
| 458 | * brightness</p> |
| 459 | * <p>For example, if EV step is 0.333, '6' will mean an |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 460 | * exposure compensation of +2 EV; -3 will mean an exposure |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 461 | * compensation of -1</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 462 | */ |
| 463 | public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION = |
| 464 | new Key<Integer>("android.control.aeExposureCompensation", int.class); |
| 465 | |
| 466 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 467 | * <p>Whether AE is currently locked to its latest |
Zhijun He | 49a3ca9 | 2014-02-05 13:48:09 -0800 | [diff] [blame] | 468 | * calculated values.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 469 | * <p>Note that even when AE is locked, the flash may be |
Zhijun He | 49a3ca9 | 2014-02-05 13:48:09 -0800 | [diff] [blame] | 470 | * fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH / ON_ALWAYS_FLASH / |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 471 | * ON_AUTO_FLASH_REDEYE.</p> |
Zhijun He | 49a3ca9 | 2014-02-05 13:48:09 -0800 | [diff] [blame] | 472 | * <p>If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) |
| 473 | * when AE is already locked, the camera device will not change the exposure time |
| 474 | * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}) |
| 475 | * parameters. The flash may be fired if the android.control.aeMode |
| 476 | * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the |
| 477 | * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.</p> |
| 478 | * <p>See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.</p> |
| 479 | * |
| 480 | * @see CaptureRequest#CONTROL_AE_MODE |
| 481 | * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER |
| 482 | * @see CaptureResult#CONTROL_AE_STATE |
| 483 | * @see CaptureRequest#SENSOR_EXPOSURE_TIME |
| 484 | * @see CaptureRequest#SENSOR_SENSITIVITY |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 485 | */ |
| 486 | public static final Key<Boolean> CONTROL_AE_LOCK = |
| 487 | new Key<Boolean>("android.control.aeLock", boolean.class); |
| 488 | |
| 489 | /** |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 490 | * <p>The desired mode for the camera device's |
| 491 | * auto-exposure routine.</p> |
| 492 | * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is |
| 493 | * AUTO.</p> |
| 494 | * <p>When set to any of the ON modes, the camera device's |
| 495 | * auto-exposure routine is enabled, overriding the |
| 496 | * application's selected exposure time, sensor sensitivity, |
| 497 | * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, |
| 498 | * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and |
| 499 | * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes |
| 500 | * is selected, the camera device's flash unit controls are |
| 501 | * also overridden.</p> |
| 502 | * <p>The FLASH modes are only available if the camera device |
| 503 | * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p> |
| 504 | * <p>If flash TORCH mode is desired, this field must be set to |
| 505 | * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p> |
| 506 | * <p>When set to any of the ON modes, the values chosen by the |
| 507 | * camera device auto-exposure routine for the overridden |
| 508 | * fields for a given capture will be available in its |
| 509 | * CaptureResult.</p> |
| 510 | * |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 511 | * @see CaptureRequest#CONTROL_MODE |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 512 | * @see CameraCharacteristics#FLASH_INFO_AVAILABLE |
| 513 | * @see CaptureRequest#FLASH_MODE |
Igor Murashkin | aef3b7e | 2014-01-15 13:20:37 -0800 | [diff] [blame] | 514 | * @see CaptureRequest#SENSOR_EXPOSURE_TIME |
| 515 | * @see CaptureRequest#SENSOR_FRAME_DURATION |
Zhijun He | 399f05d | 2014-01-15 11:31:30 -0800 | [diff] [blame] | 516 | * @see CaptureRequest#SENSOR_SENSITIVITY |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 517 | * @see #CONTROL_AE_MODE_OFF |
| 518 | * @see #CONTROL_AE_MODE_ON |
| 519 | * @see #CONTROL_AE_MODE_ON_AUTO_FLASH |
| 520 | * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH |
| 521 | * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE |
| 522 | */ |
| 523 | public static final Key<Integer> CONTROL_AE_MODE = |
| 524 | new Key<Integer>("android.control.aeMode", int.class); |
| 525 | |
| 526 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 527 | * <p>List of areas to use for |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 528 | * metering.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 529 | * <p>Each area is a rectangle plus weight: xmin, ymin, |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 530 | * xmax, ymax, weight. The rectangle is defined to be inclusive of the |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 531 | * specified coordinates.</p> |
| 532 | * <p>The coordinate system is based on the active pixel array, |
Timothy Knight | 2629f27 | 2013-09-03 17:23:23 -0700 | [diff] [blame] | 533 | * with (0,0) being the top-left pixel in the active pixel array, and |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 534 | * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, |
| 535 | * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the |
Timothy Knight | 2629f27 | 2013-09-03 17:23:23 -0700 | [diff] [blame] | 536 | * bottom-right pixel in the active pixel array. The weight |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 537 | * should be nonnegative.</p> |
| 538 | * <p>If all regions have 0 weight, then no specific metering area |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 539 | * needs to be used by the HAL. If the metering region is |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 540 | * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 541 | * should ignore the sections outside the region and output the |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 542 | * used sections in the frame metadata.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 543 | * |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 544 | * @see CaptureRequest#SCALER_CROP_REGION |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 545 | * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 546 | */ |
| 547 | public static final Key<int[]> CONTROL_AE_REGIONS = |
| 548 | new Key<int[]>("android.control.aeRegions", int[].class); |
| 549 | |
| 550 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 551 | * <p>Range over which fps can be adjusted to |
| 552 | * maintain exposure</p> |
| 553 | * <p>Only constrains AE algorithm, not manual control |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 554 | * of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}</p> |
| 555 | * |
| 556 | * @see CaptureRequest#SENSOR_EXPOSURE_TIME |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 557 | */ |
| 558 | public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE = |
| 559 | new Key<int[]>("android.control.aeTargetFpsRange", int[].class); |
| 560 | |
| 561 | /** |
Zhijun He | 228f4f9 | 2014-01-16 17:22:05 -0800 | [diff] [blame] | 562 | * <p>Whether the camera device will trigger a precapture |
| 563 | * metering sequence when it processes this request.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 564 | * <p>This entry is normally set to IDLE, or is not |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 565 | * included at all in the request settings. When included and |
Zhijun He | 228f4f9 | 2014-01-16 17:22:05 -0800 | [diff] [blame] | 566 | * set to START, the camera device will trigger the autoexposure |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 567 | * precapture metering sequence.</p> |
| 568 | * <p>The effect of AE precapture trigger depends on the current |
Zhijun He | 228f4f9 | 2014-01-16 17:22:05 -0800 | [diff] [blame] | 569 | * AE mode and state; see {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture |
| 570 | * state transition details.</p> |
| 571 | * |
| 572 | * @see CaptureResult#CONTROL_AE_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 573 | * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE |
| 574 | * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START |
| 575 | */ |
| 576 | public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER = |
| 577 | new Key<Integer>("android.control.aePrecaptureTrigger", int.class); |
| 578 | |
| 579 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 580 | * <p>Whether AF is currently enabled, and what |
| 581 | * mode it is set to</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 582 | * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.</p> |
Zhijun He | 78146ec | 2014-01-14 18:12:13 -0800 | [diff] [blame] | 583 | * <p>If the lens is controlled by the camera device auto-focus algorithm, |
| 584 | * the camera device will report the current AF status in android.control.afState |
| 585 | * in result metadata.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 586 | * |
| 587 | * @see CaptureRequest#CONTROL_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 588 | * @see #CONTROL_AF_MODE_OFF |
| 589 | * @see #CONTROL_AF_MODE_AUTO |
| 590 | * @see #CONTROL_AF_MODE_MACRO |
| 591 | * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO |
| 592 | * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE |
| 593 | * @see #CONTROL_AF_MODE_EDOF |
| 594 | */ |
| 595 | public static final Key<Integer> CONTROL_AF_MODE = |
| 596 | new Key<Integer>("android.control.afMode", int.class); |
| 597 | |
| 598 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 599 | * <p>List of areas to use for focus |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 600 | * estimation.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 601 | * <p>Each area is a rectangle plus weight: xmin, ymin, |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 602 | * xmax, ymax, weight. The rectangle is defined to be inclusive of the |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 603 | * specified coordinates.</p> |
| 604 | * <p>The coordinate system is based on the active pixel array, |
Timothy Knight | 2629f27 | 2013-09-03 17:23:23 -0700 | [diff] [blame] | 605 | * with (0,0) being the top-left pixel in the active pixel array, and |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 606 | * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, |
| 607 | * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the |
Timothy Knight | 2629f27 | 2013-09-03 17:23:23 -0700 | [diff] [blame] | 608 | * bottom-right pixel in the active pixel array. The weight |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 609 | * should be nonnegative.</p> |
| 610 | * <p>If all regions have 0 weight, then no specific focus area |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 611 | * needs to be used by the HAL. If the focusing region is |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 612 | * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 613 | * should ignore the sections outside the region and output the |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 614 | * used sections in the frame metadata.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 615 | * |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 616 | * @see CaptureRequest#SCALER_CROP_REGION |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 617 | * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 618 | */ |
| 619 | public static final Key<int[]> CONTROL_AF_REGIONS = |
| 620 | new Key<int[]>("android.control.afRegions", int[].class); |
| 621 | |
| 622 | /** |
Zhijun He | 228f4f9 | 2014-01-16 17:22:05 -0800 | [diff] [blame] | 623 | * <p>Whether the camera device will trigger autofocus for this request.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 624 | * <p>This entry is normally set to IDLE, or is not |
| 625 | * included at all in the request settings.</p> |
Zhijun He | 228f4f9 | 2014-01-16 17:22:05 -0800 | [diff] [blame] | 626 | * <p>When included and set to START, the camera device will trigger the |
| 627 | * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p> |
| 628 | * <p>When set to CANCEL, the camera device will cancel any active trigger, |
| 629 | * and return to its initial AF state.</p> |
| 630 | * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what that means for each AF mode.</p> |
| 631 | * |
| 632 | * @see CaptureResult#CONTROL_AF_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 633 | * @see #CONTROL_AF_TRIGGER_IDLE |
| 634 | * @see #CONTROL_AF_TRIGGER_START |
| 635 | * @see #CONTROL_AF_TRIGGER_CANCEL |
| 636 | */ |
| 637 | public static final Key<Integer> CONTROL_AF_TRIGGER = |
| 638 | new Key<Integer>("android.control.afTrigger", int.class); |
| 639 | |
| 640 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 641 | * <p>Whether AWB is currently locked to its |
Zhijun He | 2d5e897 | 2014-02-07 16:13:46 -0800 | [diff] [blame] | 642 | * latest calculated values.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 643 | * <p>Note that AWB lock is only meaningful for AUTO |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 644 | * mode; in other modes, AWB is already fixed to a specific |
Zhijun He | 2d5e897 | 2014-02-07 16:13:46 -0800 | [diff] [blame] | 645 | * setting.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 646 | */ |
| 647 | public static final Key<Boolean> CONTROL_AWB_LOCK = |
| 648 | new Key<Boolean>("android.control.awbLock", boolean.class); |
| 649 | |
| 650 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 651 | * <p>Whether AWB is currently setting the color |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 652 | * transform fields, and what its illumination target |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 653 | * is</p> |
Zhijun He | 399f05d | 2014-01-15 11:31:30 -0800 | [diff] [blame] | 654 | * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p> |
| 655 | * <p>When set to the ON mode, the camera device's auto white balance |
| 656 | * routine is enabled, overriding the application's selected |
| 657 | * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and |
| 658 | * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p> |
| 659 | * <p>When set to the OFF mode, the camera device's auto white balance |
| 660 | * routine is disabled. The applicantion manually controls the white |
| 661 | * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, android.colorCorrection.gains |
| 662 | * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p> |
| 663 | * <p>When set to any other modes, the camera device's auto white balance |
| 664 | * routine is disabled. The camera device uses each particular illumination |
| 665 | * target for white balance adjustment.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 666 | * |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 667 | * @see CaptureRequest#COLOR_CORRECTION_GAINS |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 668 | * @see CaptureRequest#COLOR_CORRECTION_MODE |
| 669 | * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 670 | * @see CaptureRequest#CONTROL_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 671 | * @see #CONTROL_AWB_MODE_OFF |
| 672 | * @see #CONTROL_AWB_MODE_AUTO |
| 673 | * @see #CONTROL_AWB_MODE_INCANDESCENT |
| 674 | * @see #CONTROL_AWB_MODE_FLUORESCENT |
| 675 | * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT |
| 676 | * @see #CONTROL_AWB_MODE_DAYLIGHT |
| 677 | * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT |
| 678 | * @see #CONTROL_AWB_MODE_TWILIGHT |
| 679 | * @see #CONTROL_AWB_MODE_SHADE |
| 680 | */ |
| 681 | public static final Key<Integer> CONTROL_AWB_MODE = |
| 682 | new Key<Integer>("android.control.awbMode", int.class); |
| 683 | |
| 684 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 685 | * <p>List of areas to use for illuminant |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 686 | * estimation.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 687 | * <p>Only used in AUTO mode.</p> |
| 688 | * <p>Each area is a rectangle plus weight: xmin, ymin, |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 689 | * xmax, ymax, weight. The rectangle is defined to be inclusive of the |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 690 | * specified coordinates.</p> |
| 691 | * <p>The coordinate system is based on the active pixel array, |
Timothy Knight | 2629f27 | 2013-09-03 17:23:23 -0700 | [diff] [blame] | 692 | * with (0,0) being the top-left pixel in the active pixel array, and |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 693 | * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, |
| 694 | * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the |
Timothy Knight | 2629f27 | 2013-09-03 17:23:23 -0700 | [diff] [blame] | 695 | * bottom-right pixel in the active pixel array. The weight |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 696 | * should be nonnegative.</p> |
| 697 | * <p>If all regions have 0 weight, then no specific metering area |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 698 | * needs to be used by the HAL. If the metering region is |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 699 | * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 700 | * should ignore the sections outside the region and output the |
Ruben Brunk | f59521d | 2014-02-03 17:14:33 -0800 | [diff] [blame] | 701 | * used sections in the frame metadata.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 702 | * |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 703 | * @see CaptureRequest#SCALER_CROP_REGION |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 704 | * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 705 | */ |
| 706 | public static final Key<int[]> CONTROL_AWB_REGIONS = |
| 707 | new Key<int[]>("android.control.awbRegions", int[].class); |
| 708 | |
| 709 | /** |
Zhijun He | 66d065a | 2014-01-16 18:18:50 -0800 | [diff] [blame] | 710 | * <p>Information to the camera device 3A (auto-exposure, |
| 711 | * auto-focus, auto-white balance) routines about the purpose |
| 712 | * of this capture, to help the camera device to decide optimal 3A |
| 713 | * strategy.</p> |
| 714 | * <p>This control is only effective if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> |
| 715 | * and any 3A routine is active.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 716 | * |
| 717 | * @see CaptureRequest#CONTROL_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 718 | * @see #CONTROL_CAPTURE_INTENT_CUSTOM |
| 719 | * @see #CONTROL_CAPTURE_INTENT_PREVIEW |
| 720 | * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE |
| 721 | * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD |
| 722 | * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT |
| 723 | * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG |
| 724 | */ |
| 725 | public static final Key<Integer> CONTROL_CAPTURE_INTENT = |
| 726 | new Key<Integer>("android.control.captureIntent", int.class); |
| 727 | |
| 728 | /** |
Ruben Brunk | 5f1dcfe | 2014-01-17 16:42:51 -0800 | [diff] [blame] | 729 | * <p>A special color effect to apply.</p> |
| 730 | * <p>When this mode is set, a color effect will be applied |
| 731 | * to images produced by the camera device. The interpretation |
| 732 | * and implementation of these color effects is left to the |
| 733 | * implementor of the camera device, and should not be |
| 734 | * depended on to be consistent (or present) across all |
| 735 | * devices.</p> |
| 736 | * <p>A color effect will only be applied if |
| 737 | * {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 738 | * |
| 739 | * @see CaptureRequest#CONTROL_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 740 | * @see #CONTROL_EFFECT_MODE_OFF |
| 741 | * @see #CONTROL_EFFECT_MODE_MONO |
| 742 | * @see #CONTROL_EFFECT_MODE_NEGATIVE |
| 743 | * @see #CONTROL_EFFECT_MODE_SOLARIZE |
| 744 | * @see #CONTROL_EFFECT_MODE_SEPIA |
| 745 | * @see #CONTROL_EFFECT_MODE_POSTERIZE |
| 746 | * @see #CONTROL_EFFECT_MODE_WHITEBOARD |
| 747 | * @see #CONTROL_EFFECT_MODE_BLACKBOARD |
| 748 | * @see #CONTROL_EFFECT_MODE_AQUA |
| 749 | */ |
| 750 | public static final Key<Integer> CONTROL_EFFECT_MODE = |
| 751 | new Key<Integer>("android.control.effectMode", int.class); |
| 752 | |
| 753 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 754 | * <p>Overall mode of 3A control |
| 755 | * routines</p> |
Zhijun He | f353742 | 2013-12-16 16:56:35 -0800 | [diff] [blame] | 756 | * <p>High-level 3A control. When set to OFF, all 3A control |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 757 | * by the camera device is disabled. The application must set the fields for |
Zhijun He | f353742 | 2013-12-16 16:56:35 -0800 | [diff] [blame] | 758 | * capture parameters itself.</p> |
| 759 | * <p>When set to AUTO, the individual algorithm controls in |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 760 | * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p> |
Zhijun He | f353742 | 2013-12-16 16:56:35 -0800 | [diff] [blame] | 761 | * <p>When set to USE_SCENE_MODE, the individual controls in |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 762 | * android.control.* are mostly disabled, and the camera device implements |
Zhijun He | f353742 | 2013-12-16 16:56:35 -0800 | [diff] [blame] | 763 | * one of the scene mode settings (such as ACTION, SUNSET, or PARTY) |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 764 | * as it wishes. The camera device scene mode 3A settings are provided by |
Zhijun He | f353742 | 2013-12-16 16:56:35 -0800 | [diff] [blame] | 765 | * android.control.sceneModeOverrides.</p> |
Zhijun He | 2d5e897 | 2014-02-07 16:13:46 -0800 | [diff] [blame] | 766 | * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference |
| 767 | * is that this frame will not be used by camera device background 3A statistics |
| 768 | * update, as if this frame is never captured. This mode can be used in the scenario |
| 769 | * where the application doesn't want a 3A manual control capture to affect |
| 770 | * the subsequent auto 3A capture results.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 771 | * |
| 772 | * @see CaptureRequest#CONTROL_AF_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 773 | * @see #CONTROL_MODE_OFF |
| 774 | * @see #CONTROL_MODE_AUTO |
| 775 | * @see #CONTROL_MODE_USE_SCENE_MODE |
Zhijun He | 2d5e897 | 2014-02-07 16:13:46 -0800 | [diff] [blame] | 776 | * @see #CONTROL_MODE_OFF_KEEP_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 777 | */ |
| 778 | public static final Key<Integer> CONTROL_MODE = |
| 779 | new Key<Integer>("android.control.mode", int.class); |
| 780 | |
| 781 | /** |
Ruben Brunk | e667936 | 2014-01-17 17:05:54 -0800 | [diff] [blame] | 782 | * <p>A camera mode optimized for conditions typical in a particular |
| 783 | * capture setting.</p> |
| 784 | * <p>This is the mode that that is active when |
| 785 | * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, |
| 786 | * these modes will disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, |
| 787 | * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.</p> |
| 788 | * <p>The interpretation and implementation of these scene modes is left |
| 789 | * to the implementor of the camera device. Their behavior will not be |
| 790 | * consistent across all devices, and any given device may only implement |
| 791 | * a subset of these modes.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 792 | * |
Ruben Brunk | e667936 | 2014-01-17 17:05:54 -0800 | [diff] [blame] | 793 | * @see CaptureRequest#CONTROL_AE_MODE |
| 794 | * @see CaptureRequest#CONTROL_AF_MODE |
| 795 | * @see CaptureRequest#CONTROL_AWB_MODE |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 796 | * @see CaptureRequest#CONTROL_MODE |
Ruben Brunk | e667936 | 2014-01-17 17:05:54 -0800 | [diff] [blame] | 797 | * @see #CONTROL_SCENE_MODE_DISABLED |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 798 | * @see #CONTROL_SCENE_MODE_FACE_PRIORITY |
| 799 | * @see #CONTROL_SCENE_MODE_ACTION |
| 800 | * @see #CONTROL_SCENE_MODE_PORTRAIT |
| 801 | * @see #CONTROL_SCENE_MODE_LANDSCAPE |
| 802 | * @see #CONTROL_SCENE_MODE_NIGHT |
| 803 | * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT |
| 804 | * @see #CONTROL_SCENE_MODE_THEATRE |
| 805 | * @see #CONTROL_SCENE_MODE_BEACH |
| 806 | * @see #CONTROL_SCENE_MODE_SNOW |
| 807 | * @see #CONTROL_SCENE_MODE_SUNSET |
| 808 | * @see #CONTROL_SCENE_MODE_STEADYPHOTO |
| 809 | * @see #CONTROL_SCENE_MODE_FIREWORKS |
| 810 | * @see #CONTROL_SCENE_MODE_SPORTS |
| 811 | * @see #CONTROL_SCENE_MODE_PARTY |
| 812 | * @see #CONTROL_SCENE_MODE_CANDLELIGHT |
| 813 | * @see #CONTROL_SCENE_MODE_BARCODE |
| 814 | */ |
| 815 | public static final Key<Integer> CONTROL_SCENE_MODE = |
| 816 | new Key<Integer>("android.control.sceneMode", int.class); |
| 817 | |
| 818 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 819 | * <p>Whether video stabilization is |
| 820 | * active</p> |
| 821 | * <p>If enabled, video stabilization can modify the |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 822 | * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 823 | * stabilized</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 824 | * |
| 825 | * @see CaptureRequest#SCALER_CROP_REGION |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 826 | */ |
| 827 | public static final Key<Boolean> CONTROL_VIDEO_STABILIZATION_MODE = |
| 828 | new Key<Boolean>("android.control.videoStabilizationMode", boolean.class); |
| 829 | |
| 830 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 831 | * <p>Operation mode for edge |
| 832 | * enhancement</p> |
Zhijun He | 2807936 | 2013-12-17 10:35:40 -0800 | [diff] [blame] | 833 | * <p>Edge/sharpness/detail enhancement. OFF means no |
| 834 | * enhancement will be applied by the HAL.</p> |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 835 | * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement |
Zhijun He | 2807936 | 2013-12-17 10:35:40 -0800 | [diff] [blame] | 836 | * will be applied. HIGH_QUALITY mode indicates that the |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 837 | * camera device will use the highest-quality enhancement algorithms, |
| 838 | * even if it slows down capture rate. FAST means the camera device will |
Zhijun He | 2807936 | 2013-12-17 10:35:40 -0800 | [diff] [blame] | 839 | * not slow down capture rate when applying edge enhancement.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 840 | * @see #EDGE_MODE_OFF |
| 841 | * @see #EDGE_MODE_FAST |
| 842 | * @see #EDGE_MODE_HIGH_QUALITY |
| 843 | */ |
| 844 | public static final Key<Integer> EDGE_MODE = |
| 845 | new Key<Integer>("android.edge.mode", int.class); |
| 846 | |
| 847 | /** |
Zhijun He | 66d065a | 2014-01-16 18:18:50 -0800 | [diff] [blame] | 848 | * <p>The desired mode for for the camera device's flash control.</p> |
| 849 | * <p>This control is only effective when flash unit is available |
Zhijun He | 153ac10 | 2014-02-03 12:25:12 -0800 | [diff] [blame] | 850 | * (<code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true</code>).</p> |
Zhijun He | 66d065a | 2014-01-16 18:18:50 -0800 | [diff] [blame] | 851 | * <p>When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF. |
| 852 | * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, |
| 853 | * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p> |
| 854 | * <p>When set to OFF, the camera device will not fire flash for this capture.</p> |
| 855 | * <p>When set to SINGLE, the camera device will fire flash regardless of the camera |
| 856 | * device's auto-exposure routine's result. When used in still capture case, this |
| 857 | * control should be used along with AE precapture metering sequence |
| 858 | * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.</p> |
| 859 | * <p>When set to TORCH, the flash will be on continuously. This mode can be used |
| 860 | * for use cases such as preview, auto-focus assist, still capture, or video recording.</p> |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 861 | * <p>The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.</p> |
Zhijun He | 66d065a | 2014-01-16 18:18:50 -0800 | [diff] [blame] | 862 | * |
| 863 | * @see CaptureRequest#CONTROL_AE_MODE |
| 864 | * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER |
| 865 | * @see CameraCharacteristics#FLASH_INFO_AVAILABLE |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 866 | * @see CaptureResult#FLASH_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 867 | * @see #FLASH_MODE_OFF |
| 868 | * @see #FLASH_MODE_SINGLE |
| 869 | * @see #FLASH_MODE_TORCH |
| 870 | */ |
| 871 | public static final Key<Integer> FLASH_MODE = |
| 872 | new Key<Integer>("android.flash.mode", int.class); |
| 873 | |
| 874 | /** |
Ruben Brunk | eba1b3a | 2014-02-07 18:23:50 -0800 | [diff] [blame] | 875 | * <p>Set operational mode for hot pixel correction.</p> |
| 876 | * <p>Hotpixel correction interpolates out, or otherwise removes, pixels |
| 877 | * that do not accurately encode the incoming light (i.e. pixels that |
| 878 | * are stuck at an arbitrary value).</p> |
| 879 | * @see #HOT_PIXEL_MODE_OFF |
| 880 | * @see #HOT_PIXEL_MODE_FAST |
| 881 | * @see #HOT_PIXEL_MODE_HIGH_QUALITY |
| 882 | */ |
| 883 | public static final Key<Integer> HOT_PIXEL_MODE = |
| 884 | new Key<Integer>("android.hotPixel.mode", int.class); |
| 885 | |
| 886 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 887 | * <p>GPS coordinates to include in output JPEG |
| 888 | * EXIF</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 889 | */ |
| 890 | public static final Key<double[]> JPEG_GPS_COORDINATES = |
| 891 | new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); |
| 892 | |
| 893 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 894 | * <p>32 characters describing GPS algorithm to |
| 895 | * include in EXIF</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 896 | */ |
| 897 | public static final Key<String> JPEG_GPS_PROCESSING_METHOD = |
| 898 | new Key<String>("android.jpeg.gpsProcessingMethod", String.class); |
| 899 | |
| 900 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 901 | * <p>Time GPS fix was made to include in |
| 902 | * EXIF</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 903 | */ |
| 904 | public static final Key<Long> JPEG_GPS_TIMESTAMP = |
| 905 | new Key<Long>("android.jpeg.gpsTimestamp", long.class); |
| 906 | |
| 907 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 908 | * <p>Orientation of JPEG image to |
| 909 | * write</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 910 | */ |
| 911 | public static final Key<Integer> JPEG_ORIENTATION = |
| 912 | new Key<Integer>("android.jpeg.orientation", int.class); |
| 913 | |
| 914 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 915 | * <p>Compression quality of the final JPEG |
| 916 | * image</p> |
| 917 | * <p>85-95 is typical usage range</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 918 | */ |
| 919 | public static final Key<Byte> JPEG_QUALITY = |
| 920 | new Key<Byte>("android.jpeg.quality", byte.class); |
| 921 | |
| 922 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 923 | * <p>Compression quality of JPEG |
| 924 | * thumbnail</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 925 | */ |
| 926 | public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = |
| 927 | new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); |
| 928 | |
| 929 | /** |
Zhijun He | 5a9ff37 | 2013-12-26 11:49:09 -0800 | [diff] [blame] | 930 | * <p>Resolution of embedded JPEG thumbnail</p> |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 931 | * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, |
| 932 | * but the captured JPEG will still be a valid image.</p> |
Zhijun He | 5a9ff37 | 2013-12-26 11:49:09 -0800 | [diff] [blame] | 933 | * <p>When a jpeg image capture is issued, the thumbnail size selected should have |
| 934 | * the same aspect ratio as the jpeg image.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 935 | */ |
| 936 | public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = |
| 937 | new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); |
| 938 | |
| 939 | /** |
Zhijun He | fb46c64 | 2014-01-14 17:57:23 -0800 | [diff] [blame] | 940 | * <p>The ratio of lens focal length to the effective |
| 941 | * aperture diameter.</p> |
| 942 | * <p>This will only be supported on the camera devices that |
| 943 | * have variable aperture lens. The aperture value can only be |
| 944 | * one of the values listed in {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}.</p> |
| 945 | * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, |
| 946 | * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, |
| 947 | * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and android.sensor.frameDuration |
| 948 | * to achieve manual exposure control.</p> |
| 949 | * <p>The requested aperture value may take several frames to reach the |
| 950 | * requested value; the camera device will report the current (intermediate) |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 951 | * aperture size in capture result metadata while the aperture is changing. |
| 952 | * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p> |
Zhijun He | fb46c64 | 2014-01-14 17:57:23 -0800 | [diff] [blame] | 953 | * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of |
| 954 | * the ON modes, this will be overridden by the camera device |
| 955 | * auto-exposure algorithm, the overridden values are then provided |
| 956 | * back to the user in the corresponding result.</p> |
| 957 | * |
Zhijun He | 399f05d | 2014-01-15 11:31:30 -0800 | [diff] [blame] | 958 | * @see CaptureRequest#CONTROL_AE_MODE |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 959 | * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 960 | * @see CaptureResult#LENS_STATE |
Eino-Ville Talvala | 265b34c | 2014-01-16 16:18:52 -0800 | [diff] [blame] | 961 | * @see CaptureRequest#SENSOR_EXPOSURE_TIME |
| 962 | * @see CaptureRequest#SENSOR_SENSITIVITY |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 963 | */ |
| 964 | public static final Key<Float> LENS_APERTURE = |
| 965 | new Key<Float>("android.lens.aperture", float.class); |
| 966 | |
| 967 | /** |
Ruben Brunk | 855bae4 | 2014-01-17 10:30:32 -0800 | [diff] [blame] | 968 | * <p>State of lens neutral density filter(s).</p> |
| 969 | * <p>This will not be supported on most camera devices. On devices |
| 970 | * where this is supported, this may only be set to one of the |
| 971 | * values included in {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}.</p> |
| 972 | * <p>Lens filters are typically used to lower the amount of light the |
| 973 | * sensor is exposed to (measured in steps of EV). As used here, an EV |
| 974 | * step is the standard logarithmic representation, which are |
| 975 | * non-negative, and inversely proportional to the amount of light |
| 976 | * hitting the sensor. For example, setting this to 0 would result |
| 977 | * in no reduction of the incoming light, and setting this to 2 would |
| 978 | * mean that the filter is set to reduce incoming light by two stops |
| 979 | * (allowing 1/4 of the prior amount of light to the sensor).</p> |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 980 | * <p>It may take several frames before the lens filter density changes |
| 981 | * to the requested value. While the filter density is still changing, |
| 982 | * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p> |
Ruben Brunk | 855bae4 | 2014-01-17 10:30:32 -0800 | [diff] [blame] | 983 | * |
| 984 | * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 985 | * @see CaptureResult#LENS_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 986 | */ |
| 987 | public static final Key<Float> LENS_FILTER_DENSITY = |
| 988 | new Key<Float>("android.lens.filterDensity", float.class); |
| 989 | |
| 990 | /** |
Ruben Brunk | a20f4c2 | 2014-01-17 15:21:13 -0800 | [diff] [blame] | 991 | * <p>The current lens focal length; used for optical zoom.</p> |
| 992 | * <p>This setting controls the physical focal length of the camera |
| 993 | * device's lens. Changing the focal length changes the field of |
| 994 | * view of the camera device, and is usually used for optical zoom.</p> |
| 995 | * <p>Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this |
| 996 | * setting won't be applied instantaneously, and it may take several |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 997 | * frames before the lens can change to the requested focal length. |
Ruben Brunk | a20f4c2 | 2014-01-17 15:21:13 -0800 | [diff] [blame] | 998 | * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will |
| 999 | * be set to MOVING.</p> |
| 1000 | * <p>This is expected not to be supported on most devices.</p> |
| 1001 | * |
| 1002 | * @see CaptureRequest#LENS_APERTURE |
| 1003 | * @see CaptureRequest#LENS_FOCUS_DISTANCE |
| 1004 | * @see CaptureResult#LENS_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1005 | */ |
| 1006 | public static final Key<Float> LENS_FOCAL_LENGTH = |
| 1007 | new Key<Float>("android.lens.focalLength", float.class); |
| 1008 | |
| 1009 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1010 | * <p>Distance to plane of sharpest focus, |
| 1011 | * measured from frontmost surface of the lens</p> |
Zhijun He | ca1b73a | 2014-02-03 12:39:53 -0800 | [diff] [blame] | 1012 | * <p>0 means infinity focus. Used value will be clamped |
| 1013 | * to [0, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}].</p> |
| 1014 | * <p>Like {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, this setting won't be applied |
| 1015 | * instantaneously, and it may take several frames before the lens |
| 1016 | * can move to the requested focus distance. While the lens is still moving, |
| 1017 | * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p> |
| 1018 | * |
| 1019 | * @see CaptureRequest#LENS_FOCAL_LENGTH |
| 1020 | * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE |
| 1021 | * @see CaptureResult#LENS_STATE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1022 | */ |
| 1023 | public static final Key<Float> LENS_FOCUS_DISTANCE = |
| 1024 | new Key<Float>("android.lens.focusDistance", float.class); |
| 1025 | |
| 1026 | /** |
Ruben Brunk | 00849b3 | 2014-01-17 18:30:23 -0800 | [diff] [blame] | 1027 | * <p>Sets whether the camera device uses optical image stabilization (OIS) |
| 1028 | * when capturing images.</p> |
| 1029 | * <p>OIS is used to compensate for motion blur due to small movements of |
| 1030 | * the camera during capture. Unlike digital image stabilization, OIS makes |
| 1031 | * use of mechanical elements to stabilize the camera sensor, and thus |
| 1032 | * allows for longer exposure times before camera shake becomes |
| 1033 | * apparent.</p> |
| 1034 | * <p>This is not expected to be supported on most devices.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1035 | * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF |
| 1036 | * @see #LENS_OPTICAL_STABILIZATION_MODE_ON |
| 1037 | */ |
| 1038 | public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = |
| 1039 | new Key<Integer>("android.lens.opticalStabilizationMode", int.class); |
| 1040 | |
| 1041 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1042 | * <p>Mode of operation for the noise reduction |
| 1043 | * algorithm</p> |
Zhijun He | 2807936 | 2013-12-17 10:35:40 -0800 | [diff] [blame] | 1044 | * <p>Noise filtering control. OFF means no noise reduction |
| 1045 | * will be applied by the HAL.</p> |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 1046 | * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering |
| 1047 | * will be applied. HIGH_QUALITY mode indicates that the camera device |
| 1048 | * will use the highest-quality noise filtering algorithms, |
| 1049 | * even if it slows down capture rate. FAST means the camera device should not |
Zhijun He | 2807936 | 2013-12-17 10:35:40 -0800 | [diff] [blame] | 1050 | * slow down capture rate when applying noise filtering.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1051 | * @see #NOISE_REDUCTION_MODE_OFF |
| 1052 | * @see #NOISE_REDUCTION_MODE_FAST |
| 1053 | * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY |
| 1054 | */ |
| 1055 | public static final Key<Integer> NOISE_REDUCTION_MODE = |
| 1056 | new Key<Integer>("android.noiseReduction.mode", int.class); |
| 1057 | |
| 1058 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1059 | * <p>An application-specified ID for the current |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1060 | * request. Must be maintained unchanged in output |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1061 | * frame</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1062 | * @hide |
| 1063 | */ |
| 1064 | public static final Key<Integer> REQUEST_ID = |
| 1065 | new Key<Integer>("android.request.id", int.class); |
| 1066 | |
| 1067 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1068 | * <p>(x, y, width, height).</p> |
| 1069 | * <p>A rectangle with the top-level corner of (x,y) and size |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1070 | * (width, height). The region of the sensor that is used for |
| 1071 | * output. Each stream must use this rectangle to produce its |
| 1072 | * output, cropping to a smaller region if necessary to |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1073 | * maintain the stream's aspect ratio.</p> |
| 1074 | * <p>HAL2.x uses only (x, y, width)</p> |
| 1075 | * <p>Any additional per-stream cropping must be done to |
| 1076 | * maximize the final pixel area of the stream.</p> |
| 1077 | * <p>For example, if the crop region is set to a 4:3 aspect |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1078 | * ratio, then 4:3 streams should use the exact crop |
| 1079 | * region. 16:9 streams should further crop vertically |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1080 | * (letterbox).</p> |
| 1081 | * <p>Conversely, if the crop region is set to a 16:9, then 4:3 |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1082 | * outputs should crop horizontally (pillarbox), and 16:9 |
| 1083 | * streams should match exactly. These additional crops must |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1084 | * be centered within the crop region.</p> |
| 1085 | * <p>The output streams must maintain square pixels at all |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1086 | * times, no matter what the relative aspect ratios of the |
| 1087 | * crop region and the stream are. Negative values for |
| 1088 | * corner are allowed for raw output if full pixel array is |
| 1089 | * larger than active pixel array. Width and height may be |
| 1090 | * rounded to nearest larger supportable width, especially |
| 1091 | * for raw output, where only a few fixed scales may be |
| 1092 | * possible. The width and height of the crop region cannot |
| 1093 | * be set to be smaller than floor( activeArraySize.width / |
| 1094 | * android.scaler.maxDigitalZoom ) and floor( |
| 1095 | * activeArraySize.height / android.scaler.maxDigitalZoom), |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1096 | * respectively.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1097 | */ |
| 1098 | public static final Key<android.graphics.Rect> SCALER_CROP_REGION = |
| 1099 | new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); |
| 1100 | |
| 1101 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1102 | * <p>Duration each pixel is exposed to |
| 1103 | * light.</p> |
| 1104 | * <p>If the sensor can't expose this exact duration, it should shorten the |
| 1105 | * duration exposed to the nearest possible value (rather than expose longer).</p> |
| 1106 | * <p>1/10000 - 30 sec range. No bulb mode</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1107 | */ |
| 1108 | public static final Key<Long> SENSOR_EXPOSURE_TIME = |
| 1109 | new Key<Long>("android.sensor.exposureTime", long.class); |
| 1110 | |
| 1111 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1112 | * <p>Duration from start of frame exposure to |
Igor Murashkin | 143aa0b | 2014-01-17 15:02:34 -0800 | [diff] [blame] | 1113 | * start of next frame exposure.</p> |
| 1114 | * <p>The maximum frame rate that can be supported by a camera subsystem is |
| 1115 | * a function of many factors:</p> |
| 1116 | * <ul> |
| 1117 | * <li>Requested resolutions of output image streams</li> |
| 1118 | * <li>Availability of binning / skipping modes on the imager</li> |
| 1119 | * <li>The bandwidth of the imager interface</li> |
| 1120 | * <li>The bandwidth of the various ISP processing blocks</li> |
| 1121 | * </ul> |
| 1122 | * <p>Since these factors can vary greatly between different ISPs and |
| 1123 | * sensors, the camera abstraction tries to represent the bandwidth |
| 1124 | * restrictions with as simple a model as possible.</p> |
| 1125 | * <p>The model presented has the following characteristics:</p> |
| 1126 | * <ul> |
| 1127 | * <li>The image sensor is always configured to output the smallest |
| 1128 | * resolution possible given the application's requested output stream |
| 1129 | * sizes. The smallest resolution is defined as being at least as large |
| 1130 | * as the largest requested output stream size; the camera pipeline must |
| 1131 | * never digitally upsample sensor data when the crop region covers the |
| 1132 | * whole sensor. In general, this means that if only small output stream |
| 1133 | * resolutions are configured, the sensor can provide a higher frame |
| 1134 | * rate.</li> |
| 1135 | * <li>Since any request may use any or all the currently configured |
| 1136 | * output streams, the sensor and ISP must be configured to support |
| 1137 | * scaling a single capture to all the streams at the same time. This |
| 1138 | * means the camera pipeline must be ready to produce the largest |
| 1139 | * requested output size without any delay. Therefore, the overall |
| 1140 | * frame rate of a given configured stream set is governed only by the |
| 1141 | * largest requested stream resolution.</li> |
| 1142 | * <li>Using more than one output stream in a request does not affect the |
| 1143 | * frame duration.</li> |
Igor Murashkin | a23ffb5 | 2014-02-07 18:52:34 -0800 | [diff] [blame] | 1144 | * <li>Certain format-streams may need to do additional background processing |
| 1145 | * before data is consumed/produced by that stream. These processors |
| 1146 | * can run concurrently to the rest of the camera pipeline, but |
| 1147 | * cannot process more than 1 capture at a time.</li> |
Igor Murashkin | 143aa0b | 2014-01-17 15:02:34 -0800 | [diff] [blame] | 1148 | * </ul> |
| 1149 | * <p>The necessary information for the application, given the model above, |
Igor Murashkin | a23ffb5 | 2014-02-07 18:52:34 -0800 | [diff] [blame] | 1150 | * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field. |
Igor Murashkin | 143aa0b | 2014-01-17 15:02:34 -0800 | [diff] [blame] | 1151 | * These are used to determine the maximum frame rate / minimum frame |
| 1152 | * duration that is possible for a given stream configuration.</p> |
| 1153 | * <p>Specifically, the application can use the following rules to |
Igor Murashkin | a23ffb5 | 2014-02-07 18:52:34 -0800 | [diff] [blame] | 1154 | * determine the minimum frame duration it can request from the camera |
Igor Murashkin | 143aa0b | 2014-01-17 15:02:34 -0800 | [diff] [blame] | 1155 | * device:</p> |
| 1156 | * <ol> |
Igor Murashkin | a23ffb5 | 2014-02-07 18:52:34 -0800 | [diff] [blame] | 1157 | * <li>Let the set of currently configured input/output streams |
| 1158 | * be called <code>S</code>.</li> |
| 1159 | * <li>Find the minimum frame durations for each stream in <code>S</code>, by |
| 1160 | * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with |
| 1161 | * its respective size/format). Let this set of frame durations be called |
| 1162 | * <code>F</code>.</li> |
| 1163 | * <li>For any given request <code>R</code>, the minimum frame duration allowed |
| 1164 | * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams |
| 1165 | * used in <code>R</code> be called <code>S_r</code>.</li> |
Igor Murashkin | 143aa0b | 2014-01-17 15:02:34 -0800 | [diff] [blame] | 1166 | * </ol> |
Igor Murashkin | a23ffb5 | 2014-02-07 18:52:34 -0800 | [diff] [blame] | 1167 | * <p>If none of the streams in <code>S_r</code> have a stall time (listed in |
| 1168 | * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in |
| 1169 | * <code>F</code> determines the steady state frame rate that the application will |
| 1170 | * get if it uses <code>R</code> as a repeating request. Let this special kind |
| 1171 | * of request be called <code>Rsimple</code>.</p> |
| 1172 | * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved |
| 1173 | * by a single capture of a new request <code>Rstall</code> (which has at least |
| 1174 | * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the |
| 1175 | * same minimum frame duration this will not cause a frame rate loss |
| 1176 | * if all buffers from the previous <code>Rstall</code> have already been |
| 1177 | * delivered.</p> |
| 1178 | * <p>For more details about stalling, see |
| 1179 | * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p> |
Igor Murashkin | 143aa0b | 2014-01-17 15:02:34 -0800 | [diff] [blame] | 1180 | * |
Igor Murashkin | a23ffb5 | 2014-02-07 18:52:34 -0800 | [diff] [blame] | 1181 | * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS |
| 1182 | * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1183 | */ |
| 1184 | public static final Key<Long> SENSOR_FRAME_DURATION = |
| 1185 | new Key<Long>("android.sensor.frameDuration", long.class); |
| 1186 | |
| 1187 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1188 | * <p>Gain applied to image data. Must be |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1189 | * implemented through analog gain only if set to values |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1190 | * below 'maximum analog sensitivity'.</p> |
| 1191 | * <p>If the sensor can't apply this exact gain, it should lessen the |
| 1192 | * gain to the nearest possible value (rather than gain more).</p> |
| 1193 | * <p>ISO 12232:2006 REI method</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1194 | */ |
| 1195 | public static final Key<Integer> SENSOR_SENSITIVITY = |
| 1196 | new Key<Integer>("android.sensor.sensitivity", int.class); |
| 1197 | |
| 1198 | /** |
Igor Murashkin | c127f05 | 2014-01-17 18:06:02 -0800 | [diff] [blame] | 1199 | * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern |
| 1200 | * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p> |
| 1201 | * <p>Each color channel is treated as an unsigned 32-bit integer. |
| 1202 | * The camera device then uses the most significant X bits |
| 1203 | * that correspond to how many bits are in its Bayer raw sensor |
| 1204 | * output.</p> |
| 1205 | * <p>For example, a sensor with RAW10 Bayer output would use the |
| 1206 | * 10 most significant bits from each color channel.</p> |
| 1207 | * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> |
| 1208 | * |
| 1209 | * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE |
| 1210 | */ |
| 1211 | public static final Key<int[]> SENSOR_TEST_PATTERN_DATA = |
| 1212 | new Key<int[]>("android.sensor.testPatternData", int[].class); |
| 1213 | |
| 1214 | /** |
| 1215 | * <p>When enabled, the sensor sends a test pattern instead of |
| 1216 | * doing a real exposure from the camera.</p> |
| 1217 | * <p>When a test pattern is enabled, all manual sensor controls specified |
| 1218 | * by android.sensor.* should be ignored. All other controls should |
| 1219 | * work as normal.</p> |
| 1220 | * <p>For example, if manual flash is enabled, flash firing should still |
| 1221 | * occur (and that the test pattern remain unmodified, since the flash |
| 1222 | * would not actually affect it).</p> |
| 1223 | * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> |
| 1224 | * @see #SENSOR_TEST_PATTERN_MODE_OFF |
| 1225 | * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR |
| 1226 | * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS |
| 1227 | * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY |
| 1228 | * @see #SENSOR_TEST_PATTERN_MODE_PN9 |
| 1229 | * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1 |
| 1230 | */ |
| 1231 | public static final Key<Integer> SENSOR_TEST_PATTERN_MODE = |
| 1232 | new Key<Integer>("android.sensor.testPatternMode", int.class); |
| 1233 | |
| 1234 | /** |
Zhijun He | ba93fe6 | 2014-01-17 16:43:05 -0800 | [diff] [blame] | 1235 | * <p>Quality of lens shading correction applied |
| 1236 | * to the image data.</p> |
| 1237 | * <p>When set to OFF mode, no lens shading correction will be applied by the |
| 1238 | * camera device, and an identity lens shading map data will be provided |
| 1239 | * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens |
| 1240 | * shading map with size specified as <code>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ]</code>, |
| 1241 | * the output {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} for this case will be an identity map |
| 1242 | * shown below:</p> |
| 1243 | * <pre><code>[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1244 | * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1245 | * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1246 | * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1247 | * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1248 | * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] |
| 1249 | * </code></pre> |
| 1250 | * <p>When set to other modes, lens shading correction will be applied by the |
| 1251 | * camera device. Applications can request lens shading map data by setting |
| 1252 | * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide |
| 1253 | * lens shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap}, with size specified |
| 1254 | * by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}.</p> |
| 1255 | * |
| 1256 | * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE |
| 1257 | * @see CaptureResult#STATISTICS_LENS_SHADING_MAP |
| 1258 | * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE |
| 1259 | * @see #SHADING_MODE_OFF |
| 1260 | * @see #SHADING_MODE_FAST |
| 1261 | * @see #SHADING_MODE_HIGH_QUALITY |
| 1262 | * @hide |
| 1263 | */ |
| 1264 | public static final Key<Integer> SHADING_MODE = |
| 1265 | new Key<Integer>("android.shading.mode", int.class); |
| 1266 | |
| 1267 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1268 | * <p>State of the face detector |
| 1269 | * unit</p> |
| 1270 | * <p>Whether face detection is enabled, and whether it |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1271 | * should output just the basic fields or the full set of |
| 1272 | * fields. Value must be one of the |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1273 | * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}.</p> |
| 1274 | * |
| 1275 | * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1276 | * @see #STATISTICS_FACE_DETECT_MODE_OFF |
| 1277 | * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE |
| 1278 | * @see #STATISTICS_FACE_DETECT_MODE_FULL |
| 1279 | */ |
| 1280 | public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = |
| 1281 | new Key<Integer>("android.statistics.faceDetectMode", int.class); |
| 1282 | |
| 1283 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1284 | * <p>Whether the HAL needs to output the lens |
| 1285 | * shading map in output result metadata</p> |
| 1286 | * <p>When set to ON, |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1287 | * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} must be provided in |
Igor Murashkin | 7a9b30e | 2013-12-11 13:31:38 -0800 | [diff] [blame] | 1288 | * the output result metadata.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1289 | * |
| 1290 | * @see CaptureResult#STATISTICS_LENS_SHADING_MAP |
Eino-Ville Talvala | d96748b | 2013-09-12 11:11:27 -0700 | [diff] [blame] | 1291 | * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF |
| 1292 | * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON |
| 1293 | */ |
| 1294 | public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE = |
| 1295 | new Key<Integer>("android.statistics.lensShadingMapMode", int.class); |
| 1296 | |
| 1297 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1298 | * <p>Tonemapping / contrast / gamma curve for the blue |
Igor Murashkin | e006093 | 2014-01-17 17:24:11 -0800 | [diff] [blame] | 1299 | * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is |
| 1300 | * CONTRAST_CURVE.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1301 | * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p> |
| 1302 | * |
Igor Murashkin | 3242f4f | 2014-01-15 12:27:41 -0800 | [diff] [blame] | 1303 | * @see CaptureRequest#TONEMAP_CURVE_RED |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 1304 | * @see CaptureRequest#TONEMAP_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1305 | */ |
Zhijun He | 3ffd705 | 2013-08-19 15:45:08 -0700 | [diff] [blame] | 1306 | public static final Key<float[]> TONEMAP_CURVE_BLUE = |
| 1307 | new Key<float[]>("android.tonemap.curveBlue", float[].class); |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1308 | |
| 1309 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1310 | * <p>Tonemapping / contrast / gamma curve for the green |
Igor Murashkin | e006093 | 2014-01-17 17:24:11 -0800 | [diff] [blame] | 1311 | * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is |
| 1312 | * CONTRAST_CURVE.</p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1313 | * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p> |
| 1314 | * |
Igor Murashkin | 3242f4f | 2014-01-15 12:27:41 -0800 | [diff] [blame] | 1315 | * @see CaptureRequest#TONEMAP_CURVE_RED |
Zhijun He | 5f2a47f | 2014-01-16 15:44:41 -0800 | [diff] [blame] | 1316 | * @see CaptureRequest#TONEMAP_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1317 | */ |
Zhijun He | 3ffd705 | 2013-08-19 15:45:08 -0700 | [diff] [blame] | 1318 | public static final Key<float[]> TONEMAP_CURVE_GREEN = |
| 1319 | new Key<float[]>("android.tonemap.curveGreen", float[].class); |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1320 | |
| 1321 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1322 | * <p>Tonemapping / contrast / gamma curve for the red |
Igor Murashkin | e006093 | 2014-01-17 17:24:11 -0800 | [diff] [blame] | 1323 | * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is |
| 1324 | * CONTRAST_CURVE.</p> |
| 1325 | * <p>Each channel's curve is defined by an array of control points:</p> |
| 1326 | * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = |
| 1327 | * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ] |
| 1328 | * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre> |
| 1329 | * <p>These are sorted in order of increasing <code>Pin</code>; it is always |
| 1330 | * guaranteed that input values 0.0 and 1.0 are included in the list to |
| 1331 | * define a complete mapping. For input values between control points, |
| 1332 | * the camera device must linearly interpolate between the control |
| 1333 | * points.</p> |
| 1334 | * <p>Each curve can have an independent number of points, and the number |
| 1335 | * of points can be less than max (that is, the request doesn't have to |
| 1336 | * always provide a curve with number of points equivalent to |
| 1337 | * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p> |
| 1338 | * <p>A few examples, and their corresponding graphical mappings; these |
| 1339 | * only specify the red channel and the precision is limited to 4 |
| 1340 | * digits, for conciseness.</p> |
| 1341 | * <p>Linear mapping:</p> |
| 1342 | * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 0, 1.0, 1.0 ] |
| 1343 | * </code></pre> |
| 1344 | * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p> |
| 1345 | * <p>Invert mapping:</p> |
| 1346 | * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 1.0, 1.0, 0 ] |
| 1347 | * </code></pre> |
| 1348 | * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p> |
| 1349 | * <p>Gamma 1/2.2 mapping, with 16 control points:</p> |
| 1350 | * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ |
| 1351 | * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812, |
| 1352 | * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072, |
| 1353 | * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685, |
| 1354 | * 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ] |
| 1355 | * </code></pre> |
| 1356 | * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p> |
| 1357 | * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p> |
| 1358 | * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ |
| 1359 | * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845, |
| 1360 | * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130, |
| 1361 | * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721, |
| 1362 | * 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ] |
| 1363 | * </code></pre> |
| 1364 | * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p> |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1365 | * |
Igor Murashkin | e006093 | 2014-01-17 17:24:11 -0800 | [diff] [blame] | 1366 | * @see CaptureRequest#TONEMAP_CURVE_RED |
| 1367 | * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS |
Eino-Ville Talvala | 0da8bf5 | 2014-01-08 16:18:35 -0800 | [diff] [blame] | 1368 | * @see CaptureRequest#TONEMAP_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1369 | */ |
| 1370 | public static final Key<float[]> TONEMAP_CURVE_RED = |
| 1371 | new Key<float[]>("android.tonemap.curveRed", float[].class); |
| 1372 | |
| 1373 | /** |
Igor Murashkin | e006093 | 2014-01-17 17:24:11 -0800 | [diff] [blame] | 1374 | * <p>High-level global contrast/gamma/tonemapping control.</p> |
| 1375 | * <p>When switching to an application-defined contrast curve by setting |
| 1376 | * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined |
| 1377 | * per-channel with a set of <code>(in, out)</code> points that specify the |
| 1378 | * mapping from input high-bit-depth pixel value to the output |
| 1379 | * low-bit-depth value. Since the actual pixel ranges of both input |
| 1380 | * and output may change depending on the camera pipeline, the values |
| 1381 | * are specified by normalized floating-point numbers.</p> |
| 1382 | * <p>More-complex color mapping operations such as 3D color look-up |
| 1383 | * tables, selective chroma enhancement, or other non-linear color |
| 1384 | * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is |
| 1385 | * CONTRAST_CURVE.</p> |
| 1386 | * <p>When using either FAST or HIGH_QUALITY, the camera device will |
| 1387 | * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}, |
| 1388 | * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, and {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}. |
| 1389 | * These values are always available, and as close as possible to the |
| 1390 | * actually used nonlinear/nonglobal transforms.</p> |
| 1391 | * <p>If a request is sent with TRANSFORM_MATRIX with the camera device's |
| 1392 | * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be |
| 1393 | * roughly the same.</p> |
Igor Murashkin | 3242f4f | 2014-01-15 12:27:41 -0800 | [diff] [blame] | 1394 | * |
Igor Murashkin | e006093 | 2014-01-17 17:24:11 -0800 | [diff] [blame] | 1395 | * @see CaptureRequest#TONEMAP_CURVE_BLUE |
| 1396 | * @see CaptureRequest#TONEMAP_CURVE_GREEN |
| 1397 | * @see CaptureRequest#TONEMAP_CURVE_RED |
| 1398 | * @see CaptureRequest#TONEMAP_MODE |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1399 | * @see #TONEMAP_MODE_CONTRAST_CURVE |
| 1400 | * @see #TONEMAP_MODE_FAST |
| 1401 | * @see #TONEMAP_MODE_HIGH_QUALITY |
| 1402 | */ |
| 1403 | public static final Key<Integer> TONEMAP_MODE = |
| 1404 | new Key<Integer>("android.tonemap.mode", int.class); |
| 1405 | |
| 1406 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1407 | * <p>This LED is nominally used to indicate to the user |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1408 | * that the camera is powered on and may be streaming images back to the |
| 1409 | * Application Processor. In certain rare circumstances, the OS may |
| 1410 | * disable this when video is processed locally and not transmitted to |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1411 | * any untrusted applications.</p> |
| 1412 | * <p>In particular, the LED <em>must</em> always be on when the data could be |
| 1413 | * transmitted off the device. The LED <em>should</em> always be on whenever |
| 1414 | * data is stored locally on the device.</p> |
| 1415 | * <p>The LED <em>may</em> be off if a trusted application is using the data that |
| 1416 | * doesn't violate the above rules.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1417 | * @hide |
| 1418 | */ |
| 1419 | public static final Key<Boolean> LED_TRANSMIT = |
| 1420 | new Key<Boolean>("android.led.transmit", boolean.class); |
| 1421 | |
| 1422 | /** |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1423 | * <p>Whether black-level compensation is locked |
Eino-Ville Talvala | 0956af5 | 2013-12-26 13:19:10 -0800 | [diff] [blame] | 1424 | * to its current values, or is free to vary.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1425 | * <p>When set to ON, the values used for black-level |
Eino-Ville Talvala | 0956af5 | 2013-12-26 13:19:10 -0800 | [diff] [blame] | 1426 | * compensation will not change until the lock is set to |
| 1427 | * OFF.</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1428 | * <p>Since changes to certain capture parameters (such as |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1429 | * exposure time) may require resetting of black level |
Eino-Ville Talvala | 0956af5 | 2013-12-26 13:19:10 -0800 | [diff] [blame] | 1430 | * compensation, the camera device must report whether setting |
| 1431 | * the black level lock was successful in the output result |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1432 | * metadata.</p> |
| 1433 | * <p>For example, if a sequence of requests is as follows:</p> |
| 1434 | * <ul> |
| 1435 | * <li>Request 1: Exposure = 10ms, Black level lock = OFF</li> |
| 1436 | * <li>Request 2: Exposure = 10ms, Black level lock = ON</li> |
| 1437 | * <li>Request 3: Exposure = 10ms, Black level lock = ON</li> |
| 1438 | * <li>Request 4: Exposure = 20ms, Black level lock = ON</li> |
| 1439 | * <li>Request 5: Exposure = 20ms, Black level lock = ON</li> |
| 1440 | * <li>Request 6: Exposure = 20ms, Black level lock = ON</li> |
| 1441 | * </ul> |
Eino-Ville Talvala | 0956af5 | 2013-12-26 13:19:10 -0800 | [diff] [blame] | 1442 | * <p>And the exposure change in Request 4 requires the camera |
| 1443 | * device to reset the black level offsets, then the output |
| 1444 | * result metadata is expected to be:</p> |
Igor Murashkin | ace5bf0 | 2013-12-10 17:36:40 -0800 | [diff] [blame] | 1445 | * <ul> |
| 1446 | * <li>Result 1: Exposure = 10ms, Black level lock = OFF</li> |
| 1447 | * <li>Result 2: Exposure = 10ms, Black level lock = ON</li> |
| 1448 | * <li>Result 3: Exposure = 10ms, Black level lock = ON</li> |
| 1449 | * <li>Result 4: Exposure = 20ms, Black level lock = OFF</li> |
| 1450 | * <li>Result 5: Exposure = 20ms, Black level lock = ON</li> |
| 1451 | * <li>Result 6: Exposure = 20ms, Black level lock = ON</li> |
| 1452 | * </ul> |
Eino-Ville Talvala | 0956af5 | 2013-12-26 13:19:10 -0800 | [diff] [blame] | 1453 | * <p>This indicates to the application that on frame 4, black |
| 1454 | * levels were reset due to exposure value changes, and pixel |
| 1455 | * values may not be consistent across captures.</p> |
| 1456 | * <p>The camera device will maintain the lock to the extent |
| 1457 | * possible, only overriding the lock to OFF when changes to |
| 1458 | * other request parameters require a black level recalculation |
| 1459 | * or reset.</p> |
Eino-Ville Talvala | 5a32b20c | 2013-08-08 12:38:36 -0700 | [diff] [blame] | 1460 | */ |
| 1461 | public static final Key<Boolean> BLACK_LEVEL_LOCK = |
| 1462 | new Key<Boolean>("android.blackLevel.lock", boolean.class); |
| 1463 | |
| 1464 | /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ |
| 1465 | * End generated code |
| 1466 | *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ |
| 1467 | } |