Merge "Minor optimizations to task loading/screenshot"
diff --git a/cmds/statsd/src/atoms.proto b/cmds/statsd/src/atoms.proto
index 48b85c9..ba93feb 100644
--- a/cmds/statsd/src/atoms.proto
+++ b/cmds/statsd/src/atoms.proto
@@ -102,8 +102,7 @@
/*
* *****************************************************************************
- * Below are all of the individual atoms that are logged by Android via statsd
- * and Westworld.
+ * Below are all of the individual atoms that are logged by Android via statsd.
*
* RULES:
* - The field ids for each atom must start at 1, and count upwards by 1.
diff --git a/cmds/statsd/src/atoms_copy.proto b/cmds/statsd/src/atoms_copy.proto
index 598c2bc..58e225a 100644
--- a/cmds/statsd/src/atoms_copy.proto
+++ b/cmds/statsd/src/atoms_copy.proto
@@ -104,8 +104,7 @@
/*
* *****************************************************************************
- * Below are all of the individual atoms that are logged by Android via statsd
- * and Westworld.
+ * Below are all of the individual atoms that are logged by Android via statsd.
*
* RULES:
* - The field ids for each atom must start at 1, and count upwards by 1.
diff --git a/core/java/android/bluetooth/BluetoothHidDevice.java b/core/java/android/bluetooth/BluetoothHidDevice.java
index e3d763a..6692e13 100644
--- a/core/java/android/bluetooth/BluetoothHidDevice.java
+++ b/core/java/android/bluetooth/BluetoothHidDevice.java
@@ -350,13 +350,22 @@
* application can be registered at time. When no longer used, application
* should be unregistered using
* {@link #unregisterApp(BluetoothHidDeviceAppConfiguration)}.
+ * The registration status should be tracked by the application by handling callback from
+ * BluetoothHidDeviceCallback#onAppStatusChanged. The app registration status is not related
+ * to the return value of this method.
*
* @param sdp {@link BluetoothHidDeviceAppSdpSettings} object of HID Device SDP record.
+ * The HID Device SDP record is required.
* @param inQos {@link BluetoothHidDeviceAppQosSettings} object of Incoming QoS Settings.
+ * The Incoming QoS Settings is not required. Use null or default
+ * BluetoothHidDeviceAppQosSettings.Builder for default values.
* @param outQos {@link BluetoothHidDeviceAppQosSettings} object of Outgoing QoS Settings.
+ * The Outgoing QoS Settings is not required. Use null or default
+ * BluetoothHidDeviceAppQosSettings.Builder for default values.
* @param callback {@link BluetoothHidDeviceCallback} object to which callback messages will be
* sent.
- * @return
+ * The BluetoothHidDeviceCallback object is required.
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean registerApp(BluetoothHidDeviceAppSdpSettings sdp,
BluetoothHidDeviceAppQosSettings inQos, BluetoothHidDeviceAppQosSettings outQos,
@@ -394,12 +403,15 @@
* {@link #registerApp
* (BluetoothHidDeviceAppQosSettings, BluetoothHidDeviceAppQosSettings,
* BluetoothHidDeviceAppQosSettings, BluetoothHidDeviceCallback)}
+ * The registration status should be tracked by the application by handling callback from
+ * BluetoothHidDeviceCallback#onAppStatusChanged. The app registration status is not related
+ * to the return value of this method.
*
* @param config {@link BluetoothHidDeviceAppConfiguration} object as obtained from {@link
* BluetoothHidDeviceCallback#onAppStatusChanged(BluetoothDevice,
* BluetoothHidDeviceAppConfiguration,
* boolean)}
- * @return
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean unregisterApp(BluetoothHidDeviceAppConfiguration config) {
Log.v(TAG, "unregisterApp()");
@@ -426,7 +438,7 @@
* @param id Report Id, as defined in descriptor. Can be 0 in case Report Id are not defined in
* descriptor.
* @param data Report data, not including Report Id.
- * @return
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean sendReport(BluetoothDevice device, int id, byte[] data) {
boolean result = false;
@@ -452,7 +464,7 @@
* @param type Report Type, as in request.
* @param id Report Id, as in request.
* @param data Report data, not including Report Id.
- * @return
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean replyReport(BluetoothDevice device, byte type, byte id, byte[] data) {
Log.v(TAG, "replyReport(): device=" + device + " type=" + type + " id=" + id);
@@ -478,7 +490,7 @@
* from {@link BluetoothHidDeviceCallback#onSetReport(BluetoothDevice, byte, byte, byte[])}.
*
* @param error Error to be sent for SET_REPORT via HANDSHAKE.
- * @return
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean reportError(BluetoothDevice device, byte error) {
Log.v(TAG, "reportError(): device=" + device + " error=" + error);
@@ -524,10 +536,13 @@
}
/**
- * Initiates connection to host which currently has Virtual Cable
- * established with device.
+ * Initiates connection to host which is currently paired with this device.
+ * If the application is not registered, #connect(BluetoothDevice) will fail.
+ * The connection state should be tracked by the application by handling callback from
+ * BluetoothHidDeviceCallback#onConnectionStateChanged. The connection state is not related
+ * to the return value of this method.
*
- * @return
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean connect(BluetoothDevice device) {
Log.v(TAG, "connect(): device=" + device);
@@ -550,8 +565,11 @@
/**
* Disconnects from currently connected host.
+ * The connection state should be tracked by the application by handling callback from
+ * BluetoothHidDeviceCallback#onConnectionStateChanged. The connection state is not related
+ * to the return value of this method.
*
- * @return
+ * @return true if the command is successfully sent; otherwise false.
*/
public boolean disconnect(BluetoothDevice device) {
Log.v(TAG, "disconnect(): device=" + device);
diff --git a/core/java/android/bluetooth/BluetoothHidDeviceAppQosSettings.java b/core/java/android/bluetooth/BluetoothHidDeviceAppQosSettings.java
index ccc3ef4..881ae98 100644
--- a/core/java/android/bluetooth/BluetoothHidDeviceAppQosSettings.java
+++ b/core/java/android/bluetooth/BluetoothHidDeviceAppQosSettings.java
@@ -45,6 +45,21 @@
public static final int MAX = (int) 0xffffffff;
+ /**
+ * Create a BluetoothHidDeviceAppQosSettings object for the Bluetooth L2CAP channel.
+ * The QoS Settings is optional.
+ * Recommended to use BluetoothHidDeviceAppQosSettings.Builder.
+ * {@see <a href="https://www.bluetooth.com/specifications/profiles-overview">
+ * https://www.bluetooth.com/specifications/profiles-overview
+ * </a>
+ * Bluetooth HID Specfication v1.1.1 Section 5.2 and Appendix D }
+ * @param serviceType L2CAP service type
+ * @param tokenRate L2CAP token rate
+ * @param tokenBucketSize L2CAP token bucket size
+ * @param peakBandwidth L2CAP peak bandwidth
+ * @param latency L2CAP latency
+ * @param delayVariation L2CAP delay variation
+ */
public BluetoothHidDeviceAppQosSettings(int serviceType, int tokenRate, int tokenBucketSize,
int peakBandwidth, int latency, int delayVariation) {
this.serviceType = serviceType;
@@ -59,7 +74,12 @@
public boolean equals(Object o) {
if (o instanceof BluetoothHidDeviceAppQosSettings) {
BluetoothHidDeviceAppQosSettings qos = (BluetoothHidDeviceAppQosSettings) o;
- return false;
+ return this.serviceType == qos.serviceType
+ && this.tokenRate == qos.tokenRate
+ && this.tokenBucketSize == qos.tokenBucketSize
+ && this.peakBandwidth == qos.peakBandwidth
+ && this.latency == qos.latency
+ && this.delayVariation == qos.delayVariation;
}
return false;
}
@@ -106,4 +126,85 @@
serviceType, tokenRate, tokenBucketSize, peakBandwidth, latency, delayVariation
};
}
+
+ /**
+ * A helper to build the BluetoothHidDeviceAppQosSettings object.
+ */
+ public static class Builder {
+ // Optional parameters - initialized to default values
+ private int mServiceType = SERVICE_BEST_EFFORT;
+ private int mTokenRate = 0;
+ private int mTokenBucketSize = 0;
+ private int mPeakBandwidth = 0;
+ private int mLatency = MAX;
+ private int mDelayVariation = MAX;
+
+ /**
+ * Set the service type.
+ * @param val service type. Should be one of {SERVICE_NO_TRAFFIC, SERVICE_BEST_EFFORT,
+ * SERVICE_GUARANTEED}, with SERVICE_BEST_EFFORT being the default one.
+ * @return BluetoothHidDeviceAppQosSettings Builder with specified service type.
+ */
+ public Builder serviceType(int val) {
+ mServiceType = val;
+ return this;
+ }
+ /**
+ * Set the token rate.
+ * @param val token rate
+ * @return BluetoothHidDeviceAppQosSettings Builder with specified token rate.
+ */
+ public Builder tokenRate(int val) {
+ mTokenRate = val;
+ return this;
+ }
+
+ /**
+ * Set the bucket size.
+ * @param val bucket size
+ * @return BluetoothHidDeviceAppQosSettings Builder with specified bucket size.
+ */
+ public Builder tokenBucketSize(int val) {
+ mTokenBucketSize = val;
+ return this;
+ }
+
+ /**
+ * Set the peak bandwidth.
+ * @param val peak bandwidth
+ * @return BluetoothHidDeviceAppQosSettings Builder with specified peak bandwidth.
+ */
+ public Builder peakBandwidth(int val) {
+ mPeakBandwidth = val;
+ return this;
+ }
+ /**
+ * Set the latency.
+ * @param val latency
+ * @return BluetoothHidDeviceAppQosSettings Builder with specified latency.
+ */
+ public Builder latency(int val) {
+ mLatency = val;
+ return this;
+ }
+
+ /**
+ * Set the delay variation.
+ * @param val delay variation
+ * @return BluetoothHidDeviceAppQosSettings Builder with specified delay variation.
+ */
+ public Builder delayVariation(int val) {
+ mDelayVariation = val;
+ return this;
+ }
+
+ /**
+ * Build the BluetoothHidDeviceAppQosSettings object.
+ * @return BluetoothHidDeviceAppQosSettings object with current settings.
+ */
+ public BluetoothHidDeviceAppQosSettings build() {
+ return new BluetoothHidDeviceAppQosSettings(mServiceType, mTokenRate, mTokenBucketSize,
+ mPeakBandwidth, mLatency, mDelayVariation);
+ }
+ }
}
diff --git a/core/java/android/bluetooth/BluetoothHidDeviceAppSdpSettings.java b/core/java/android/bluetooth/BluetoothHidDeviceAppSdpSettings.java
index f01c493..4669637 100644
--- a/core/java/android/bluetooth/BluetoothHidDeviceAppSdpSettings.java
+++ b/core/java/android/bluetooth/BluetoothHidDeviceAppSdpSettings.java
@@ -19,6 +19,8 @@
import android.os.Parcel;
import android.os.Parcelable;
+import java.util.Arrays;
+
/**
* Represents the Service Discovery Protocol (SDP) settings for a Bluetooth
* HID Device application.
@@ -39,6 +41,18 @@
public final byte subclass;
public final byte[] descriptors;
+ /**
+ * Create a BluetoothHidDeviceAppSdpSettings object for the Bluetooth SDP record.
+ * @param name Name of this Bluetooth HID device. Maximum length is 50 bytes.
+ * @param description Description for this Bluetooth HID device. Maximum length is 50 bytes.
+ * @param provider Provider of this Bluetooth HID device. Maximum length is 50 bytes.
+ * @param subclass Subclass of this Bluetooth HID device.
+ * See <a href="www.usb.org/developers/hidpage/HID1_11.pdf">
+ * www.usb.org/developers/hidpage/HID1_11.pdf Section 4.2</a>
+ * @param descriptors Descriptors of this Bluetooth HID device.
+ * See <a href="www.usb.org/developers/hidpage/HID1_11.pdf">
+ * www.usb.org/developers/hidpage/HID1_11.pdf Chapter 6</a> Maximum length is 2048 bytes.
+ */
public BluetoothHidDeviceAppSdpSettings(String name, String description, String provider,
byte subclass, byte[] descriptors) {
this.name = name;
@@ -52,7 +66,11 @@
public boolean equals(Object o) {
if (o instanceof BluetoothHidDeviceAppSdpSettings) {
BluetoothHidDeviceAppSdpSettings sdp = (BluetoothHidDeviceAppSdpSettings) o;
- return false;
+ return this.name.equals(sdp.name)
+ && this.description.equals(sdp.description)
+ && this.provider.equals(sdp.provider)
+ && this.subclass == sdp.subclass
+ && Arrays.equals(this.descriptors, sdp.descriptors);
}
return false;
}
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index 46ad3f0..3a3048e 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -1280,11 +1280,11 @@
* <ul>
* <li>Processed (but stalling): any non-RAW format with a stallDurations > 0.
* Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li>
- * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12 RAW12}.</li>
- * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
- * Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
- * {@link android.graphics.ImageFormat#NV21 NV21}, or
- * {@link android.graphics.ImageFormat#YV12 YV12}.</li>
+ * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10 RAW10}, or
+ * {@link android.graphics.ImageFormat#RAW12 RAW12}.</li>
+ * <li>Processed (but not-stalling): any non-RAW format without a stall duration. Typically
+ * {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
+ * {@link android.graphics.ImageFormat#NV21 NV21}, or {@link android.graphics.ImageFormat#YV12 YV12}.</li>
* </ul>
* <p><b>Range of valid values:</b><br></p>
* <p>For processed (and stalling) format streams, >= 1.</p>
@@ -1376,8 +1376,7 @@
* CPU resources that will consume more power. The image format for this kind of an output stream can
* be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
* <p>A processed and stalling format is defined as any non-RAW format with a stallDurations
- * > 0. Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a
- * stalling format.</p>
+ * > 0. Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a stalling format.</p>
* <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a
* processed format -- it will return a non-0 value for a stalling stream.</p>
* <p>LEGACY devices will support up to 1 processing/stalling stream.</p>
@@ -1535,8 +1534,7 @@
new Key<int[]>("android.request.availableRequestKeys", int[].class);
/**
- * <p>A list of all keys that the camera device has available
- * to use with {@link android.hardware.camera2.CaptureResult }.</p>
+ * <p>A list of all keys that the camera device has available to use with {@link android.hardware.camera2.CaptureResult }.</p>
* <p>Attempting to get a key from a CaptureResult that is not
* listed here will always return a <code>null</code> value. Getting a key from
* a CaptureResult that is listed here will generally never return a <code>null</code>
@@ -1561,8 +1559,7 @@
new Key<int[]>("android.request.availableResultKeys", int[].class);
/**
- * <p>A list of all keys that the camera device has available
- * to use with {@link android.hardware.camera2.CameraCharacteristics }.</p>
+ * <p>A list of all keys that the camera device has available to use with {@link android.hardware.camera2.CameraCharacteristics }.</p>
* <p>This entry follows the same rules as
* android.request.availableResultKeys (except that it applies for
* CameraCharacteristics instead of CaptureResult). See above for more
@@ -1843,8 +1840,6 @@
* <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
* android.scaler.availableStallDurations for more details about
* calculating the max frame rate.</p>
- * <p>(Keep in sync with
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p>
* <p><b>Units</b>: (format, width, height, ns) x n</p>
* <p>This key is available on all devices.</p>
*
@@ -1905,14 +1900,13 @@
* <ul>
* <li>{@link android.graphics.ImageFormat#YUV_420_888 }</li>
* <li>{@link android.graphics.ImageFormat#RAW10 }</li>
+ * <li>{@link android.graphics.ImageFormat#RAW12 }</li>
* </ul>
* <p>All other formats may or may not have an allowed stall duration on
* a per-capability basis; refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
* for more details.</p>
* <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
* calculating the max frame rate (absent stalls).</p>
- * <p>(Keep up to date with
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } )</p>
* <p><b>Units</b>: (format, width, height, ns) x n</p>
* <p>This key is available on all devices.</p>
*
@@ -2195,9 +2189,9 @@
* the raw buffers produced by this sensor.</p>
* <p>If a camera device supports raw sensor formats, either this or
* {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} is the maximum dimensions for the raw
- * output formats listed in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} (this depends on
- * whether or not the image sensor returns buffers containing pixels that are not
- * part of the active array region for blacklevel calibration or other purposes).</p>
+ * output formats listed in {@link android.hardware.camera2.params.StreamConfigurationMap }
+ * (this depends on whether or not the image sensor returns buffers containing pixels that
+ * are not part of the active array region for blacklevel calibration or other purposes).</p>
* <p>Some parts of the full pixel array may not receive light from the scene,
* or be otherwise inactive. The {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} key
* defines the rectangle of active pixels that will be included in processed image
@@ -2205,7 +2199,6 @@
* <p><b>Units</b>: Pixels</p>
* <p>This key is available on all devices.</p>
*
- * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
* @see CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE
* @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
*/
@@ -2838,7 +2831,7 @@
* <p>See the individual level enums for full descriptions of the supported capabilities. The
* {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} entry describes the device's capabilities at a
* finer-grain level, if needed. In addition, many controls have their available settings or
- * ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics } entries.</p>
+ * ranges defined in individual entries from {@link android.hardware.camera2.CameraCharacteristics }.</p>
* <p>Some features are not part of any particular hardware level or capability and must be
* queried separately. These include:</p>
* <ul>
@@ -2973,7 +2966,6 @@
* <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
* android.scaler.availableStallDurations for more details about
* calculating the max frame rate.</p>
- * <p>(Keep in sync with {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p>
* <p><b>Units</b>: (format, width, height, ns) x n</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
* <p><b>Limited capability</b> -
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index 8c8c49f..4b57018 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -587,8 +587,8 @@
* then the list of resolutions for YUV_420_888 from {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } contains at
* least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20
* s.</p>
- * <p>If the device supports the {@link android.graphics.ImageFormat#RAW10 }, {@link android.graphics.ImageFormat#RAW12 }, then those can also be captured at the same rate
- * as the maximum-size YUV_420_888 resolution is.</p>
+ * <p>If the device supports the {@link android.graphics.ImageFormat#RAW10 }, {@link android.graphics.ImageFormat#RAW12 }, then those can also be
+ * captured at the same rate as the maximum-size YUV_420_888 resolution is.</p>
* <p>If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
* as for the YUV_420_888 format also apply to the {@link android.graphics.ImageFormat#PRIVATE } format.</p>
* <p>In addition, the {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} field is guaranted to have a value between 0
@@ -610,25 +610,22 @@
* following:</p>
* <ul>
* <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li>
- * <li>{@link android.graphics.ImageFormat#YUV_420_888 } is supported as an output/input format, that is,
- * YUV_420_888 is included in the lists of formats returned by
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li>
+ * <li>{@link android.graphics.ImageFormat#YUV_420_888 } is supported as an output/input
+ * format, that is, YUV_420_888 is included in the lists of formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li>
* <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput }
* returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li>
* <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(YUV_420_888)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(YUV_420_888)}</li>
- * <li>Using {@link android.graphics.ImageFormat#YUV_420_888 } does not cause a frame rate drop
- * relative to the sensor's maximum capture rate (at that resolution).</li>
+ * <li>Using {@link android.graphics.ImageFormat#YUV_420_888 } does not cause a frame rate
+ * drop relative to the sensor's maximum capture rate (at that resolution).</li>
* <li>{@link android.graphics.ImageFormat#YUV_420_888 } will be reprocessable into both
* {@link android.graphics.ImageFormat#YUV_420_888 } and {@link android.graphics.ImageFormat#JPEG } formats.</li>
* <li>The maximum available resolution for {@link android.graphics.ImageFormat#YUV_420_888 } streams (both input/output) will match the
* maximum available resolution of {@link android.graphics.ImageFormat#JPEG } streams.</li>
* <li>Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.</li>
* <li>Only the below controls are effective for reprocessing requests and will be present
- * in capture results. The reprocess requests are from the original capture results that
- * are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888 }
- * output buffers. All other controls in the reprocess requests will be ignored by the
- * camera device.<ul>
+ * in capture results. The reprocess requests are from the original capture results
+ * that are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888 } output buffers. All other controls in the
+ * reprocess requests will be ignored by the camera device.<ul>
* <li>android.jpeg.*</li>
* <li>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</li>
* <li>{@link CaptureRequest#EDGE_MODE android.edge.mode}</li>
@@ -654,13 +651,13 @@
* <p>The camera device can produce depth measurements from its field of view.</p>
* <p>This capability requires the camera device to support the following:</p>
* <ul>
- * <li>{@link android.graphics.ImageFormat#DEPTH16 } is supported as an output format.</li>
- * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD } is optionally supported as an
- * output format.</li>
- * <li>This camera device, and all camera devices with the same {@link CameraCharacteristics#LENS_FACING android.lens.facing},
- * will list the following calibration entries in both
- * {@link android.hardware.camera2.CameraCharacteristics } and
- * {@link android.hardware.camera2.CaptureResult }:<ul>
+ * <li>{@link android.graphics.ImageFormat#DEPTH16 } is supported as
+ * an output format.</li>
+ * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD } is
+ * optionally supported as an output format.</li>
+ * <li>This camera device, and all camera devices with the same {@link CameraCharacteristics#LENS_FACING android.lens.facing}, will
+ * list the following calibration metadata entries in both {@link android.hardware.camera2.CameraCharacteristics }
+ * and {@link android.hardware.camera2.CaptureResult }:<ul>
* <li>{@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}</li>
* <li>{@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}</li>
* <li>{@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}</li>
@@ -674,8 +671,7 @@
* </ul>
* <p>Generally, depth output operates at a slower frame rate than standard color capture,
* so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
- * should be accounted for (see
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }).
+ * should be accounted for (see {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }).
* On a device that supports both depth and color-based output, to enable smooth preview,
* using a repeating burst is recommended, where a depth-output target is only included
* once every N frames, where N is the ratio between preview output rate and depth output
@@ -692,23 +688,19 @@
public static final int REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8;
/**
- * <p>The device supports constrained high speed video recording (frame rate >=120fps)
- * use case. The camera device will support high speed capture session created by
- * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }, which
- * only accepts high speed request lists created by
- * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList }.</p>
- * <p>A camera device can still support high speed video streaming by advertising the high speed
- * FPS ranges in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}. For this case, all normal
- * capture request per frame control and synchronization requirements will apply to
- * the high speed fps ranges, the same as all other fps ranges. This capability describes
- * the capability of a specialized operating mode with many limitations (see below), which
- * is only targeted at high speed video recording.</p>
- * <p>The supported high speed video sizes and fps ranges are specified in
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.
- * To get desired output frame rates, the application is only allowed to select video size
- * and FPS range combinations provided by
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }.
- * The fps range can be controlled via {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}.</p>
+ * <p>The device supports constrained high speed video recording (frame rate >=120fps) use
+ * case. The camera device will support high speed capture session created by {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }, which
+ * only accepts high speed request lists created by {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList }.</p>
+ * <p>A camera device can still support high speed video streaming by advertising the high
+ * speed FPS ranges in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}. For this case, all
+ * normal capture request per frame control and synchronization requirements will apply
+ * to the high speed fps ranges, the same as all other fps ranges. This capability
+ * describes the capability of a specialized operating mode with many limitations (see
+ * below), which is only targeted at high speed video recording.</p>
+ * <p>The supported high speed video sizes and fps ranges are specified in {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.
+ * To get desired output frame rates, the application is only allowed to select video
+ * size and FPS range combinations provided by {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }. The
+ * fps range can be controlled via {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}.</p>
* <p>In this capability, the camera device will override aeMode, awbMode, and afMode to
* ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
* controls will be overridden to be FAST. Therefore, no manual control of capture
@@ -743,19 +735,16 @@
* frame rate. If the destination surface is from preview window, the actual preview frame
* rate will be bounded by the screen refresh rate.</p>
* <p>The camera device will only support up to 2 high speed simultaneous output surfaces
- * (preview and recording surfaces)
- * in this mode. Above controls will be effective only if all of below conditions are true:</p>
+ * (preview and recording surfaces) in this mode. Above controls will be effective only
+ * if all of below conditions are true:</p>
* <ul>
* <li>The application creates a camera capture session with no more than 2 surfaces via
* {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }. The
- * targeted surfaces must be preview surface (either from
- * {@link android.view.SurfaceView } or {@link android.graphics.SurfaceTexture }) or
- * recording surface(either from {@link android.media.MediaRecorder#getSurface } or
- * {@link android.media.MediaCodec#createInputSurface }).</li>
+ * targeted surfaces must be preview surface (either from {@link android.view.SurfaceView } or {@link android.graphics.SurfaceTexture }) or recording
+ * surface(either from {@link android.media.MediaRecorder#getSurface } or {@link android.media.MediaCodec#createInputSurface }).</li>
* <li>The stream sizes are selected from the sizes reported by
* {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }.</li>
- * <li>The FPS ranges are selected from
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.</li>
+ * <li>The FPS ranges are selected from {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.</li>
* </ul>
* <p>When above conditions are NOT satistied,
* {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }
@@ -1038,8 +1027,7 @@
/**
* <p>This camera device is running in backward compatibility mode.</p>
- * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
- * documentation are supported.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are supported.</p>
* <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
* post-processing, arbitrary cropping regions, and has relaxed performance constraints.
* No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -1061,8 +1049,7 @@
* <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
* FULL-level capabilities.</p>
* <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
- * <code>LIMITED</code> tables in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
- * documentation are guaranteed to be supported.</p>
+ * <code>LIMITED</code> tables in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
* <p>The following additional capabilities are guaranteed to be supported:</p>
* <ul>
* <li><code>YUV_REPROCESSING</code> capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
@@ -2155,12 +2142,13 @@
public static final int EDGE_MODE_HIGH_QUALITY = 2;
/**
- * <p>Edge enhancement is applied at different levels for different output streams,
- * based on resolution. Streams at maximum recording resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession }) or below have
- * edge enhancement applied, while higher-resolution streams have no edge enhancement
- * applied. The level of edge enhancement for low-resolution streams is tuned so that
- * frame rate is not impacted, and the quality is equal to or better than FAST (since it
- * is only applied to lower-resolution outputs, quality may improve from FAST).</p>
+ * <p>Edge enhancement is applied at different
+ * levels for different output streams, based on resolution. Streams at maximum recording
+ * resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession })
+ * or below have edge enhancement applied, while higher-resolution streams have no edge
+ * enhancement applied. The level of edge enhancement for low-resolution streams is tuned
+ * so that frame rate is not impacted, and the quality is equal to or better than FAST
+ * (since it is only applied to lower-resolution outputs, quality may improve from FAST).</p>
* <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
* with YUV or PRIVATE reprocessing, where the application continuously captures
* high-resolution intermediate buffers into a circular buffer, from which a final image is
@@ -2287,12 +2275,12 @@
/**
* <p>Noise reduction is applied at different levels for different output streams,
- * based on resolution. Streams at maximum recording resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession }) or below have noise
- * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
- * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
- * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
- * is equal to or better than FAST (since it is only applied to lower-resolution outputs,
- * quality may improve from FAST).</p>
+ * based on resolution. Streams at maximum recording resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession })
+ * or below have noise reduction applied, while higher-resolution streams have MINIMAL (if
+ * supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of
+ * noise reduction for low-resolution streams is tuned so that frame rate is not impacted,
+ * and the quality is equal to or better than FAST (since it is only applied to
+ * lower-resolution outputs, quality may improve from FAST).</p>
* <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
* with YUV or PRIVATE reprocessing, where the application continuously captures
* high-resolution intermediate buffers into a circular buffer, from which a final image is
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index c41fc02..0262ecb 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -680,7 +680,7 @@
* FAST or HIGH_QUALITY will yield a picture with the same white point
* as what was produced by the camera device in the earlier frame.</p>
* <p>The expected processing pipeline is as follows:</p>
- * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+ * <p><img alt="White balance processing pipeline" src="/reference/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
* <p>The white balance is encoded by two values, a 4-channel white-balance
* gain vector (applied in the Bayer domain), and a 3x3 color transform
* matrix (applied after demosaic).</p>
@@ -1470,10 +1470,10 @@
* <p>When set to AUTO, the individual algorithm controls in
* android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
* <p>When set to USE_SCENE_MODE, the individual controls in
- * android.control.* are mostly disabled, and the camera device implements
- * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
- * as it wishes. The camera device scene mode 3A settings are provided by
- * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+ * android.control.* are mostly disabled, and the camera device
+ * implements one of the scene mode settings (such as ACTION,
+ * SUNSET, or PARTY) as it wishes. The camera device scene mode
+ * 3A settings are provided by {@link android.hardware.camera2.CaptureResult capture results}.</p>
* <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
* is that this frame will not be used by camera device background 3A statistics
* update, as if this frame is never captured. This mode can be used in the scenario
@@ -2268,45 +2268,35 @@
* can run concurrently to the rest of the camera pipeline, but
* cannot process more than 1 capture at a time.</li>
* </ul>
- * <p>The necessary information for the application, given the model above,
- * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using
+ * <p>The necessary information for the application, given the model above, is provided via
* {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
- * These are used to determine the maximum frame rate / minimum frame
- * duration that is possible for a given stream configuration.</p>
+ * These are used to determine the maximum frame rate / minimum frame duration that is
+ * possible for a given stream configuration.</p>
* <p>Specifically, the application can use the following rules to
* determine the minimum frame duration it can request from the camera
* device:</p>
* <ol>
- * <li>Let the set of currently configured input/output streams
- * be called <code>S</code>.</li>
- * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
- * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
- * (with its respective size/format). Let this set of frame durations be
- * called <code>F</code>.</li>
- * <li>For any given request <code>R</code>, the minimum frame duration allowed
- * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
- * used in <code>R</code> be called <code>S_r</code>.</li>
+ * <li>Let the set of currently configured input/output streams be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking it up in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
+ * (with its respective size/format). Let this set of frame durations be called <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed for <code>R</code> is the maximum
+ * out of all values in <code>F</code>. Let the streams used in <code>R</code> be called <code>S_r</code>.</li>
* </ol>
* <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
- * using its respective size/format), then the frame duration in <code>F</code>
- * determines the steady state frame rate that the application will get
- * if it uses <code>R</code> as a repeating request. Let this special kind of
- * request be called <code>Rsimple</code>.</p>
- * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
- * by a single capture of a new request <code>Rstall</code> (which has at least
- * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
- * same minimum frame duration this will not cause a frame rate loss
- * if all buffers from the previous <code>Rstall</code> have already been
- * delivered.</p>
- * <p>For more details about stalling, see
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
+ * using its respective size/format), then the frame duration in <code>F</code> determines the steady
+ * state frame rate that the application will get if it uses <code>R</code> as a repeating request. Let
+ * this special kind of request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved by a single capture of a
+ * new request <code>Rstall</code> (which has at least one in-use stream with a non-0 stall time) and if
+ * <code>Rstall</code> has the same minimum frame duration this will not cause a frame rate loss if all
+ * buffers from the previous <code>Rstall</code> have already been delivered.</p>
+ * <p>For more details about stalling, see {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
* <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
* OFF; otherwise the auto-exposure algorithm will override this value.</p>
* <p><b>Units</b>: Nanoseconds</p>
* <p><b>Range of valid values:</b><br>
- * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration},
- * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. The duration
- * is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
+ * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}, {@link android.hardware.camera2.params.StreamConfigurationMap }.
+ * The duration is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
* <p><b>Full capability</b> -
* Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
@@ -2315,7 +2305,6 @@
* @see CaptureRequest#CONTROL_AE_MODE
* @see CaptureRequest#CONTROL_MODE
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
- * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
* @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
*/
@PublicKey
@@ -2584,11 +2573,11 @@
* <p>Linear mapping:</p>
* <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
* </code></pre>
- * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p><img alt="Linear mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
* <p>Invert mapping:</p>
* <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
* </code></pre>
- * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p><img alt="Inverting mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
* <p>Gamma 1/2.2 mapping, with 16 control points:</p>
* <pre><code>android.tonemap.curveRed = [
* 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
@@ -2596,7 +2585,7 @@
* 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
* 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
* </code></pre>
- * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
* <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
* <pre><code>android.tonemap.curveRed = [
* 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
@@ -2604,7 +2593,7 @@
* 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
* 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
* </code></pre>
- * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
* <p><b>Range of valid values:</b><br>
* 0-1 on both input and output coordinates, normalized
* as a floating-point value such that 0 == black and 1 == white.</p>
@@ -2646,11 +2635,11 @@
* <p>Linear mapping:</p>
* <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
* </code></pre>
- * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p><img alt="Linear mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
* <p>Invert mapping:</p>
* <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
* </code></pre>
- * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p><img alt="Inverting mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
* <p>Gamma 1/2.2 mapping, with 16 control points:</p>
* <pre><code>curveRed = [
* (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
@@ -2658,7 +2647,7 @@
* (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
* (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
* </code></pre>
- * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
* <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
* <pre><code>curveRed = [
* (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
@@ -2666,7 +2655,7 @@
* (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
* (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
* </code></pre>
- * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
* <p><b>Full capability</b> -
* Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
@@ -2756,9 +2745,9 @@
* PRESET_CURVE</p>
* <p>The tonemap curve will be defined by specified standard.</p>
* <p>sRGB (approximated by 16 control points):</p>
- * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
* <p>Rec. 709 (approximated by 16 control points):</p>
- * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+ * <p><img alt="Rec. 709 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
* <p>Note that above figures show a 16 control points approximation of preset
* curves. Camera devices may apply a different approximation to the curve.</p>
* <p><b>Possible values:</b>
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 6d80c20..cfad098 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -390,7 +390,7 @@
* FAST or HIGH_QUALITY will yield a picture with the same white point
* as what was produced by the camera device in the earlier frame.</p>
* <p>The expected processing pipeline is as follows:</p>
- * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+ * <p><img alt="White balance processing pipeline" src="/reference/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
* <p>The white balance is encoded by two values, a 4-channel white-balance
* gain vector (applied in the Bayer domain), and a 3x3 color transform
* matrix (applied after demosaic).</p>
@@ -1975,10 +1975,10 @@
* <p>When set to AUTO, the individual algorithm controls in
* android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
* <p>When set to USE_SCENE_MODE, the individual controls in
- * android.control.* are mostly disabled, and the camera device implements
- * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
- * as it wishes. The camera device scene mode 3A settings are provided by
- * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+ * android.control.* are mostly disabled, and the camera device
+ * implements one of the scene mode settings (such as ACTION,
+ * SUNSET, or PARTY) as it wishes. The camera device scene mode
+ * 3A settings are provided by {@link android.hardware.camera2.CaptureResult capture results}.</p>
* <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
* is that this frame will not be used by camera device background 3A statistics
* update, as if this frame is never captured. This mode can be used in the scenario
@@ -3108,45 +3108,35 @@
* can run concurrently to the rest of the camera pipeline, but
* cannot process more than 1 capture at a time.</li>
* </ul>
- * <p>The necessary information for the application, given the model above,
- * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using
+ * <p>The necessary information for the application, given the model above, is provided via
* {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
- * These are used to determine the maximum frame rate / minimum frame
- * duration that is possible for a given stream configuration.</p>
+ * These are used to determine the maximum frame rate / minimum frame duration that is
+ * possible for a given stream configuration.</p>
* <p>Specifically, the application can use the following rules to
* determine the minimum frame duration it can request from the camera
* device:</p>
* <ol>
- * <li>Let the set of currently configured input/output streams
- * be called <code>S</code>.</li>
- * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
- * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
- * (with its respective size/format). Let this set of frame durations be
- * called <code>F</code>.</li>
- * <li>For any given request <code>R</code>, the minimum frame duration allowed
- * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
- * used in <code>R</code> be called <code>S_r</code>.</li>
+ * <li>Let the set of currently configured input/output streams be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking it up in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
+ * (with its respective size/format). Let this set of frame durations be called <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed for <code>R</code> is the maximum
+ * out of all values in <code>F</code>. Let the streams used in <code>R</code> be called <code>S_r</code>.</li>
* </ol>
* <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
- * using its respective size/format), then the frame duration in <code>F</code>
- * determines the steady state frame rate that the application will get
- * if it uses <code>R</code> as a repeating request. Let this special kind of
- * request be called <code>Rsimple</code>.</p>
- * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
- * by a single capture of a new request <code>Rstall</code> (which has at least
- * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
- * same minimum frame duration this will not cause a frame rate loss
- * if all buffers from the previous <code>Rstall</code> have already been
- * delivered.</p>
- * <p>For more details about stalling, see
- * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
+ * using its respective size/format), then the frame duration in <code>F</code> determines the steady
+ * state frame rate that the application will get if it uses <code>R</code> as a repeating request. Let
+ * this special kind of request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved by a single capture of a
+ * new request <code>Rstall</code> (which has at least one in-use stream with a non-0 stall time) and if
+ * <code>Rstall</code> has the same minimum frame duration this will not cause a frame rate loss if all
+ * buffers from the previous <code>Rstall</code> have already been delivered.</p>
+ * <p>For more details about stalling, see {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
* <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
* OFF; otherwise the auto-exposure algorithm will override this value.</p>
* <p><b>Units</b>: Nanoseconds</p>
* <p><b>Range of valid values:</b><br>
- * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration},
- * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. The duration
- * is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
+ * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}, {@link android.hardware.camera2.params.StreamConfigurationMap }.
+ * The duration is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
* <p><b>Full capability</b> -
* Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
@@ -3155,7 +3145,6 @@
* @see CaptureRequest#CONTROL_AE_MODE
* @see CaptureRequest#CONTROL_MODE
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
- * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
* @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
*/
@PublicKey
@@ -3408,9 +3397,8 @@
* layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
* nth value given corresponds to the black level offset for the nth
* color channel listed in the CFA.</p>
- * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is
- * available or the camera device advertises this key via
- * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
+ * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is available or the
+ * camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
* <p><b>Range of valid values:</b><br>
* >= 0 for each.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
@@ -3640,13 +3628,13 @@
* </code></pre>
* <p>The low-resolution scaling map images for each channel are
* (displayed using nearest-neighbor interpolation):</p>
- * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
- * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
- * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
- * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+ * <p><img alt="Red lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+ * <img alt="Green (even rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+ * <img alt="Green (odd rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+ * <img alt="Blue lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
* <p>As a visualization only, inverting the full-color map to recover an
* image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
- * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+ * <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
* <p><b>Range of valid values:</b><br>
* Each gain factor is >= 1</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
@@ -3707,14 +3695,14 @@
* </code></pre>
* <p>The low-resolution scaling map images for each channel are
* (displayed using nearest-neighbor interpolation):</p>
- * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
- * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
- * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
- * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+ * <p><img alt="Red lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+ * <img alt="Green (even rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+ * <img alt="Green (odd rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+ * <img alt="Blue lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
* <p>As a visualization only, inverting the full-color map to recover an
* image of a gray wall (using bicubic interpolation for visual quality)
* as captured by the sensor gives:</p>
- * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+ * <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
* <p>Note that the RAW image data might be subject to lens shading
* correction not reported on this map. Query
* {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
@@ -3947,11 +3935,11 @@
* <p>Linear mapping:</p>
* <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
* </code></pre>
- * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p><img alt="Linear mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
* <p>Invert mapping:</p>
* <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
* </code></pre>
- * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p><img alt="Inverting mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
* <p>Gamma 1/2.2 mapping, with 16 control points:</p>
* <pre><code>android.tonemap.curveRed = [
* 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
@@ -3959,7 +3947,7 @@
* 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
* 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
* </code></pre>
- * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
* <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
* <pre><code>android.tonemap.curveRed = [
* 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
@@ -3967,7 +3955,7 @@
* 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
* 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
* </code></pre>
- * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
* <p><b>Range of valid values:</b><br>
* 0-1 on both input and output coordinates, normalized
* as a floating-point value such that 0 == black and 1 == white.</p>
@@ -4009,11 +3997,11 @@
* <p>Linear mapping:</p>
* <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
* </code></pre>
- * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p><img alt="Linear mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
* <p>Invert mapping:</p>
* <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
* </code></pre>
- * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p><img alt="Inverting mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
* <p>Gamma 1/2.2 mapping, with 16 control points:</p>
* <pre><code>curveRed = [
* (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
@@ -4021,7 +4009,7 @@
* (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
* (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
* </code></pre>
- * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
* <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
* <pre><code>curveRed = [
* (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
@@ -4029,7 +4017,7 @@
* (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
* (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
* </code></pre>
- * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
* <p><b>Full capability</b> -
* Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
@@ -4119,9 +4107,9 @@
* PRESET_CURVE</p>
* <p>The tonemap curve will be defined by specified standard.</p>
* <p>sRGB (approximated by 16 control points):</p>
- * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
* <p>Rec. 709 (approximated by 16 control points):</p>
- * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+ * <p><img alt="Rec. 709 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
* <p>Note that above figures show a 16 control points approximation of preset
* curves. Camera devices may apply a different approximation to the curve.</p>
* <p><b>Possible values:</b>
diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java
index 935f5f3..6ebf3b9 100644
--- a/core/java/android/os/Build.java
+++ b/core/java/android/os/Build.java
@@ -103,7 +103,7 @@
/**
* A hardware serial number, if available. Alphanumeric only, case-insensitive.
- * For apps targeting SDK higher than {@link Build.VERSION_CODES#N_MR1} this
+ * For apps targeting SDK higher than {@link Build.VERSION_CODES#O_MR1} this
* field is set to {@link Build#UNKNOWN}.
*
* @deprecated Use {@link #getSerial()} instead.
diff --git a/core/java/android/os/Process.java b/core/java/android/os/Process.java
index b5d62e5..0874d93 100644
--- a/core/java/android/os/Process.java
+++ b/core/java/android/os/Process.java
@@ -151,6 +151,9 @@
*/
public static final int OTA_UPDATE_UID = 1061;
+ /** {@hide} */
+ public static final int NOBODY_UID = 9999;
+
/**
* Defines the start of a range of UIDs (and GIDs), going from this
* number to {@link #LAST_APPLICATION_UID} that are reserved for assigning
diff --git a/core/java/android/provider/SearchIndexableData.java b/core/java/android/provider/SearchIndexableData.java
index 5e0a76d..a60be53 100644
--- a/core/java/android/provider/SearchIndexableData.java
+++ b/core/java/android/provider/SearchIndexableData.java
@@ -56,6 +56,8 @@
/**
* The key for the data. This is application specific. Should be unique per data as the data
* should be able to be retrieved by the key.
+ * <p/>
+ * This is required for indexing to work.
*/
public String key;
diff --git a/core/res/Android.mk b/core/res/Android.mk
index b066929..370a01f 100644
--- a/core/res/Android.mk
+++ b/core/res/Android.mk
@@ -29,6 +29,9 @@
# Framework doesn't need versioning since it IS the platform.
LOCAL_AAPT_FLAGS += --no-auto-version
+# Allow overlay to add resource
+LOCAL_AAPT_FLAGS += --auto-add-overlay
+
# Install this alongside the libraries.
LOCAL_MODULE_PATH := $(TARGET_OUT_JAVA_LIBRARIES)
diff --git a/docs/html/reference/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png b/docs/html/reference/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png
new file mode 100644
index 0000000..7578b48
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png
new file mode 100644
index 0000000..7b10f6b
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png
new file mode 100644
index 0000000..41972cf
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png
new file mode 100644
index 0000000..d26600b
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png
new file mode 100644
index 0000000..1e7208e
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png
new file mode 100644
index 0000000..ecef3ae
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png
new file mode 100644
index 0000000..a02fd89
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png
new file mode 100644
index 0000000..c309ac5
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png
new file mode 100644
index 0000000..414fad4
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png
new file mode 100644
index 0000000..c147a87
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png
Binary files differ
diff --git a/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png
new file mode 100644
index 0000000..4ce2125
--- /dev/null
+++ b/docs/html/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png
Binary files differ
diff --git a/services/core/java/com/android/server/am/ActiveServices.java b/services/core/java/com/android/server/am/ActiveServices.java
index 2131731..701c574 100644
--- a/services/core/java/com/android/server/am/ActiveServices.java
+++ b/services/core/java/com/android/server/am/ActiveServices.java
@@ -1043,8 +1043,8 @@
try {
if (AppGlobals.getPackageManager().checkPermission(
android.Manifest.permission.INSTANT_APP_FOREGROUND_SERVICE,
- r.appInfo.packageName,
- r.appInfo.uid) != PackageManager.PERMISSION_GRANTED) {
+ r.appInfo.packageName, UserHandle.getUserId(r.appInfo.uid))
+ != PackageManager.PERMISSION_GRANTED) {
throw new SecurityException("Instant app " + r.appInfo.packageName
+ " does not have permission to create foreground"
+ "services");
diff --git a/services/core/java/com/android/server/am/UserController.java b/services/core/java/com/android/server/am/UserController.java
index 2df5dc9..4e3d8d2 100644
--- a/services/core/java/com/android/server/am/UserController.java
+++ b/services/core/java/com/android/server/am/UserController.java
@@ -89,6 +89,7 @@
import com.android.internal.util.ArrayUtils;
import com.android.internal.util.Preconditions;
import com.android.internal.widget.LockPatternUtils;
+import com.android.server.FgThread;
import com.android.server.LocalServices;
import com.android.server.SystemServiceManager;
import com.android.server.pm.UserManagerService;
@@ -355,27 +356,35 @@
// Only keep marching forward if user is actually unlocked
if (!StorageManager.isUserKeyUnlocked(userId)) return;
synchronized (mLock) {
- // Bail if we ended up with a stale user
- if (mStartedUsers.get(uss.mHandle.getIdentifier()) != uss) return;
-
- // Do not proceed if unexpected state
- if (!uss.setState(STATE_RUNNING_LOCKED, STATE_RUNNING_UNLOCKING)) {
+ // Do not proceed if unexpected state or a stale user
+ if (mStartedUsers.get(userId) != uss || uss.state != STATE_RUNNING_LOCKED) {
return;
}
}
- mInjector.getUserManagerInternal().setUserState(userId, uss.state);
uss.mUnlockProgress.start();
// Prepare app storage before we go any further
uss.mUnlockProgress.setProgress(5,
mInjector.getContext().getString(R.string.android_start_title));
- mInjector.getUserManager().onBeforeUnlockUser(userId);
- uss.mUnlockProgress.setProgress(20);
- // Dispatch unlocked to system services; when fully dispatched,
- // that calls through to the next "unlocked" phase
- mHandler.obtainMessage(SYSTEM_USER_UNLOCK_MSG, userId, 0, uss)
- .sendToTarget();
+ // Call onBeforeUnlockUser on a worker thread that allows disk I/O
+ FgThread.getHandler().post(() -> {
+ mInjector.getUserManager().onBeforeUnlockUser(userId);
+ synchronized (mLock) {
+ // Do not proceed if unexpected state
+ if (!uss.setState(STATE_RUNNING_LOCKED, STATE_RUNNING_UNLOCKING)) {
+ return;
+ }
+ }
+ mInjector.getUserManagerInternal().setUserState(userId, uss.state);
+
+ uss.mUnlockProgress.setProgress(20);
+
+ // Dispatch unlocked to system services; when fully dispatched,
+ // that calls through to the next "unlocked" phase
+ mHandler.obtainMessage(SYSTEM_USER_UNLOCK_MSG, userId, 0, uss)
+ .sendToTarget();
+ });
}
/**
@@ -1819,7 +1828,10 @@
case SYSTEM_USER_UNLOCK_MSG:
final int userId = msg.arg1;
mInjector.getSystemServiceManager().unlockUser(userId);
- mInjector.loadUserRecents(userId);
+ // Loads recents on a worker thread that allows disk I/O
+ FgThread.getHandler().post(() -> {
+ mInjector.loadUserRecents(userId);
+ });
if (userId == UserHandle.USER_SYSTEM) {
mInjector.startPersistentApps(PackageManager.MATCH_DIRECT_BOOT_UNAWARE);
}
diff --git a/services/core/java/com/android/server/pm/PackageDexOptimizer.java b/services/core/java/com/android/server/pm/PackageDexOptimizer.java
index 29f48ee..00cfa31 100644
--- a/services/core/java/com/android/server/pm/PackageDexOptimizer.java
+++ b/services/core/java/com/android/server/pm/PackageDexOptimizer.java
@@ -154,7 +154,13 @@
targetInstructionSets : getAppDexInstructionSets(pkg.applicationInfo);
final String[] dexCodeInstructionSets = getDexCodeInstructionSets(instructionSets);
final List<String> paths = pkg.getAllCodePaths();
- final int sharedGid = UserHandle.getSharedAppGid(pkg.applicationInfo.uid);
+
+ int sharedGid = UserHandle.getSharedAppGid(pkg.applicationInfo.uid);
+ if (sharedGid == -1) {
+ Slog.wtf(TAG, "Well this is awkward; package " + pkg.applicationInfo.name + " had UID "
+ + pkg.applicationInfo.uid, new Throwable());
+ sharedGid = android.os.Process.NOBODY_UID;
+ }
// Get the class loader context dependencies.
// For each code path in the package, this array contains the class loader context that
diff --git a/services/robotests/Android.mk b/services/robotests/Android.mk
index f63d1e7..114929c 100644
--- a/services/robotests/Android.mk
+++ b/services/robotests/Android.mk
@@ -48,7 +48,7 @@
# TODO(b/69254249): Migrate to Robolectric 3.4.2
LOCAL_JAVA_LIBRARIES := \
junit \
- platform-robolectric-prebuilt
+ platform-robolectric-3.1.1-prebuilt
LOCAL_INSTRUMENTATION_FOR := FrameworksServicesLib
LOCAL_MODULE := FrameworksServicesRoboTests
diff --git a/tools/stats_log_api_gen/Android.bp b/tools/stats_log_api_gen/Android.bp
index a910c62..468864b 100644
--- a/tools/stats_log_api_gen/Android.bp
+++ b/tools/stats_log_api_gen/Android.bp
@@ -23,6 +23,10 @@
"Collation.cpp",
"main.cpp",
],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
shared_libs: [
"libstats_proto_host",
@@ -90,6 +94,10 @@
name: "libstatslog",
generated_sources: ["statslog.cpp"],
generated_headers: ["statslog.h"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
export_generated_headers: ["statslog.h"],
shared_libs: [
"liblog",
diff --git a/tools/stats_log_api_gen/main.cpp b/tools/stats_log_api_gen/main.cpp
index eb29150..c54ab18 100644
--- a/tools/stats_log_api_gen/main.cpp
+++ b/tools/stats_log_api_gen/main.cpp
@@ -96,8 +96,6 @@
static int
write_stats_log_cpp(FILE* out, const Atoms& atoms)
{
- int errorCount;
-
// Print prelude
fprintf(out, "// This file is autogenerated\n");
fprintf(out, "\n");
@@ -157,8 +155,6 @@
static int
write_stats_log_header(FILE* out, const Atoms& atoms)
{
- int errorCount;
-
// Print prelude
fprintf(out, "// This file is autogenerated\n");
fprintf(out, "\n");
@@ -229,8 +225,6 @@
static int
write_stats_log_java(FILE* out, const Atoms& atoms)
{
- int errorCount;
-
// Print prelude
fprintf(out, "// This file is autogenerated\n");
fprintf(out, "\n");
@@ -395,8 +389,6 @@
static int
write_stats_log_jni(FILE* out, const Atoms& atoms)
{
- int errorCount;
-
// Print prelude
fprintf(out, "// This file is autogenerated\n");
fprintf(out, "\n");