Merge "Force measurement in LinearLayout if any views are skipped"
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index c4b07cc..ff12d77 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -999,7 +999,7 @@
/**
* <p>Use specific scene mode. Enabling this disables
* control.aeMode, control.awbMode and control.afMode
- * controls; the HAL must ignore those settings while
+ * controls; the camera device will ignore those settings while
* USE_SCENE_MODE is active (except for FACE_PRIORITY
* scene mode). Other control entries are still active.
* This setting can only be used if availableSceneModes !=
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index dfde11b..c8668f5 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -390,7 +390,7 @@
/**
* <p>Gains applying to Bayer raw color channels for
- * white-balance</p>
+ * white-balance.</p>
* <p>The 4-channel white-balance gains are defined in
* the order of <code>[R G_even G_odd B]</code>, where <code>G_even</code> is the gain
* for green pixels on even rows of the output, and <code>G_odd</code>
@@ -398,11 +398,11 @@
* does not support a separate gain for even/odd green channels,
* it should use the <code>G_even</code> value, and write <code>G_odd</code> equal to
* <code>G_even</code> in the output result metadata.</p>
- * <p>This array is either set by HAL when the request
+ * <p>This array is either set by the camera device when the request
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
* directly by the application in the request when the
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
- * <p>The output should be the gains actually applied by the HAL to
+ * <p>The output should be the gains actually applied by the camera device to
* the current frame.</p>
*
* @see CaptureRequest#COLOR_CORRECTION_MODE
@@ -536,9 +536,9 @@
* bottom-right pixel in the active pixel array. The weight
* should be nonnegative.</p>
* <p>If all regions have 0 weight, then no specific metering area
- * needs to be used by the HAL. If the metering region is
- * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL
- * should ignore the sections outside the region and output the
+ * needs to be used by the camera device. If the metering region is
+ * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the camera device
+ * will ignore the sections outside the region and output the
* used sections in the frame metadata.</p>
*
* @see CaptureRequest#SCALER_CROP_REGION
@@ -579,13 +579,15 @@
/**
* <p>Whether AF is currently enabled, and what
* mode it is set to</p>
- * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.</p>
+ * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
+ * (i.e. <code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} > 0</code>).</p>
* <p>If the lens is controlled by the camera device auto-focus algorithm,
* the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState}
* in result metadata.</p>
*
* @see CaptureResult#CONTROL_AF_STATE
* @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
* @see #CONTROL_AF_MODE_OFF
* @see #CONTROL_AF_MODE_AUTO
* @see #CONTROL_AF_MODE_MACRO
@@ -609,9 +611,9 @@
* bottom-right pixel in the active pixel array. The weight
* should be nonnegative.</p>
* <p>If all regions have 0 weight, then no specific focus area
- * needs to be used by the HAL. If the focusing region is
- * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL
- * should ignore the sections outside the region and output the
+ * needs to be used by the camera device. If the focusing region is
+ * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the camera device
+ * will ignore the sections outside the region and output the
* used sections in the frame metadata.</p>
*
* @see CaptureRequest#SCALER_CROP_REGION
@@ -651,14 +653,14 @@
/**
* <p>Whether AWB is currently setting the color
* transform fields, and what its illumination target
- * is</p>
+ * is.</p>
* <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
* <p>When set to the ON mode, the camera device's auto white balance
* routine is enabled, overriding the application's selected
* {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
* <p>When set to the OFF mode, the camera device's auto white balance
- * routine is disabled. The applicantion manually controls the white
+ * routine is disabled. The application manually controls the white
* balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
* and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
* <p>When set to any other modes, the camera device's auto white balance
@@ -695,10 +697,10 @@
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
* bottom-right pixel in the active pixel array. The weight
* should be nonnegative.</p>
- * <p>If all regions have 0 weight, then no specific metering area
- * needs to be used by the HAL. If the metering region is
- * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL
- * should ignore the sections outside the region and output the
+ * <p>If all regions have 0 weight, then no specific auto-white balance (AWB) area
+ * needs to be used by the camera device. If the AWB region is
+ * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the camera device
+ * will ignore the sections outside the region and output the
* used sections in the frame metadata.</p>
*
* @see CaptureRequest#SCALER_CROP_REGION
@@ -753,7 +755,7 @@
/**
* <p>Overall mode of 3A control
- * routines</p>
+ * routines.</p>
* <p>High-level 3A control. When set to OFF, all 3A control
* by the camera device is disabled. The application must set the fields for
* capture parameters itself.</p>
@@ -830,9 +832,9 @@
/**
* <p>Operation mode for edge
- * enhancement</p>
+ * enhancement.</p>
* <p>Edge/sharpness/detail enhancement. OFF means no
- * enhancement will be applied by the HAL.</p>
+ * enhancement will be applied by the camera device.</p>
* <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
* will be applied. HIGH_QUALITY mode indicates that the
* camera device will use the highest-quality enhancement algorithms,
@@ -1044,7 +1046,7 @@
* <p>Mode of operation for the noise reduction
* algorithm</p>
* <p>Noise filtering control. OFF means no noise reduction
- * will be applied by the HAL.</p>
+ * will be applied by the camera device.</p>
* <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
* will be applied. HIGH_QUALITY mode indicates that the camera device
* will use the highest-quality noise filtering algorithms,
@@ -1285,8 +1287,8 @@
new Key<Integer>("android.statistics.faceDetectMode", int.class);
/**
- * <p>Whether the HAL needs to output the lens
- * shading map in output result metadata</p>
+ * <p>Whether the camera device will output the lens
+ * shading map in output result metadata.</p>
* <p>When set to ON,
* {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} must be provided in
* the output result metadata.</p>
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 9981bf9..0749edd 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -143,7 +143,7 @@
/**
* <p>Gains applying to Bayer raw color channels for
- * white-balance</p>
+ * white-balance.</p>
* <p>The 4-channel white-balance gains are defined in
* the order of <code>[R G_even G_odd B]</code>, where <code>G_even</code> is the gain
* for green pixels on even rows of the output, and <code>G_odd</code>
@@ -151,11 +151,11 @@
* does not support a separate gain for even/odd green channels,
* it should use the <code>G_even</code> value, and write <code>G_odd</code> equal to
* <code>G_even</code> in the output result metadata.</p>
- * <p>This array is either set by HAL when the request
+ * <p>This array is either set by the camera device when the request
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
* directly by the application in the request when the
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
- * <p>The output should be the gains actually applied by the HAL to
+ * <p>The output should be the gains actually applied by the camera device to
* the current frame.</p>
*
* @see CaptureRequest#COLOR_CORRECTION_MODE
@@ -225,9 +225,9 @@
* bottom-right pixel in the active pixel array. The weight
* should be nonnegative.</p>
* <p>If all regions have 0 weight, then no specific metering area
- * needs to be used by the HAL. If the metering region is
- * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL
- * should ignore the sections outside the region and output the
+ * needs to be used by the camera device. If the metering region is
+ * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the camera device
+ * will ignore the sections outside the region and output the
* used sections in the frame metadata.</p>
*
* @see CaptureRequest#SCALER_CROP_REGION
@@ -437,13 +437,15 @@
/**
* <p>Whether AF is currently enabled, and what
* mode it is set to</p>
- * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.</p>
+ * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
+ * (i.e. <code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} > 0</code>).</p>
* <p>If the lens is controlled by the camera device auto-focus algorithm,
* the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState}
* in result metadata.</p>
*
* @see CaptureResult#CONTROL_AF_STATE
* @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
* @see #CONTROL_AF_MODE_OFF
* @see #CONTROL_AF_MODE_AUTO
* @see #CONTROL_AF_MODE_MACRO
@@ -467,9 +469,9 @@
* bottom-right pixel in the active pixel array. The weight
* should be nonnegative.</p>
* <p>If all regions have 0 weight, then no specific focus area
- * needs to be used by the HAL. If the focusing region is
- * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL
- * should ignore the sections outside the region and output the
+ * needs to be used by the camera device. If the focusing region is
+ * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the camera device
+ * will ignore the sections outside the region and output the
* used sections in the frame metadata.</p>
*
* @see CaptureRequest#SCALER_CROP_REGION
@@ -889,14 +891,14 @@
/**
* <p>Whether AWB is currently setting the color
* transform fields, and what its illumination target
- * is</p>
+ * is.</p>
* <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
* <p>When set to the ON mode, the camera device's auto white balance
* routine is enabled, overriding the application's selected
* {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
* <p>When set to the OFF mode, the camera device's auto white balance
- * routine is disabled. The applicantion manually controls the white
+ * routine is disabled. The application manually controls the white
* balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
* and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
* <p>When set to any other modes, the camera device's auto white balance
@@ -933,10 +935,10 @@
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
* bottom-right pixel in the active pixel array. The weight
* should be nonnegative.</p>
- * <p>If all regions have 0 weight, then no specific metering area
- * needs to be used by the HAL. If the metering region is
- * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL
- * should ignore the sections outside the region and output the
+ * <p>If all regions have 0 weight, then no specific auto-white balance (AWB) area
+ * needs to be used by the camera device. If the AWB region is
+ * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the camera device
+ * will ignore the sections outside the region and output the
* used sections in the frame metadata.</p>
*
* @see CaptureRequest#SCALER_CROP_REGION
@@ -1077,7 +1079,7 @@
/**
* <p>Overall mode of 3A control
- * routines</p>
+ * routines.</p>
* <p>High-level 3A control. When set to OFF, all 3A control
* by the camera device is disabled. The application must set the fields for
* capture parameters itself.</p>
@@ -1105,9 +1107,9 @@
/**
* <p>Operation mode for edge
- * enhancement</p>
+ * enhancement.</p>
* <p>Edge/sharpness/detail enhancement. OFF means no
- * enhancement will be applied by the HAL.</p>
+ * enhancement will be applied by the camera device.</p>
* <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
* will be applied. HIGH_QUALITY mode indicates that the
* camera device will use the highest-quality enhancement algorithms,
@@ -1385,7 +1387,7 @@
* <p>Mode of operation for the noise reduction
* algorithm</p>
* <p>Noise filtering control. OFF means no noise reduction
- * will be applied by the HAL.</p>
+ * will be applied by the camera device.</p>
* <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
* will be applied. HIGH_QUALITY mode indicates that the camera device
* will use the highest-quality noise filtering algorithms,
@@ -1411,7 +1413,7 @@
* before the FINAL buffer for frame 4. PARTIAL buffers may be returned
* in any order relative to other frames, but all PARTIAL buffers for a given
* capture must arrive before the FINAL buffer for that capture. This entry may
- * only be used by the HAL if quirks.usePartialResult is set to 1.</p>
+ * only be used by the camera device if quirks.usePartialResult is set to 1.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
* @hide
*/
@@ -1904,7 +1906,7 @@
/**
* <p>The best-fit color channel gains calculated
- * by the HAL's statistics units for the current output frame</p>
+ * by the camera device's statistics units for the current output frame.</p>
* <p>This may be different than the gains used for this frame,
* since statistics processing on data from a new frame
* typically completes after the transform has already been
@@ -1923,11 +1925,11 @@
/**
* <p>The best-fit color transform matrix estimate
- * calculated by the HAL's statistics units for the current
- * output frame</p>
- * <p>The HAL must provide the estimate from its
+ * calculated by the camera device's statistics units for the current
+ * output frame.</p>
+ * <p>The camera device will provide the estimate from its
* statistics unit on the white balance transforms to use
- * for the next frame. These are the values the HAL believes
+ * for the next frame. These are the values the camera device believes
* are the best fit for the current output frame. This may
* be different than the transform used for this frame, since
* statistics processing on data from a new frame typically
diff --git a/core/java/com/android/internal/os/BatteryStatsHelper.java b/core/java/com/android/internal/os/BatteryStatsHelper.java
index 8a15c99..e0cf435 100644
--- a/core/java/com/android/internal/os/BatteryStatsHelper.java
+++ b/core/java/com/android/internal/os/BatteryStatsHelper.java
@@ -642,8 +642,8 @@
final long radioDataUptimeMs
= mStats.getMobileRadioActiveTime(mBatteryRealtime, mStatsType) / 1000;
- final double mobilePps = radioDataUptimeMs != 0
- ? mobileData / (double)radioDataUptimeMs
+ final double mobilePps = (mobileData != 0 && radioDataUptimeMs != 0)
+ ? (mobileData / (double)radioDataUptimeMs)
: (((double)MOBILE_BPS) / 8 / 2048);
return (MOBILE_POWER / mobilePps) / (60*60);
diff --git a/core/java/com/android/internal/os/BatteryStatsImpl.java b/core/java/com/android/internal/os/BatteryStatsImpl.java
index c3e9862..40e8727 100644
--- a/core/java/com/android/internal/os/BatteryStatsImpl.java
+++ b/core/java/com/android/internal/os/BatteryStatsImpl.java
@@ -56,7 +56,6 @@
import com.android.internal.util.ArrayUtils;
import com.android.internal.util.FastPrintWriter;
import com.android.internal.util.JournaledFile;
-import com.google.android.collect.Sets;
import java.io.File;
import java.io.FileInputStream;
@@ -65,7 +64,6 @@
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -372,8 +370,10 @@
new HashMap<String, KernelWakelockStats>();
private final NetworkStatsFactory mNetworkStatsFactory = new NetworkStatsFactory();
- private NetworkStats mLastMobileSnapshot;
- private NetworkStats mLastWifiSnapshot;
+ private NetworkStats mCurMobileSnapshot = new NetworkStats(SystemClock.elapsedRealtime(), 50);
+ private NetworkStats mLastMobileSnapshot = new NetworkStats(SystemClock.elapsedRealtime(), 50);
+ private NetworkStats mCurWifiSnapshot = new NetworkStats(SystemClock.elapsedRealtime(), 50);
+ private NetworkStats mLastWifiSnapshot = new NetworkStats(SystemClock.elapsedRealtime(), 50);
private NetworkStats mTmpNetworkStats;
private final NetworkStats.Entry mTmpNetworkStatsEntry = new NetworkStats.Entry();
@@ -5608,6 +5608,7 @@
if (mMobileIfaces.length > 0) {
final NetworkStats snapshot;
+ final NetworkStats last = mCurMobileSnapshot;
try {
snapshot = mNetworkStatsFactory.readNetworkStatsDetail(UID_ALL,
mMobileIfaces, NetworkStats.TAG_NONE, mLastMobileSnapshot);
@@ -5616,15 +5617,12 @@
return;
}
- if (mLastMobileSnapshot == null) {
- mLastMobileSnapshot = snapshot;
- return;
- }
+ mCurMobileSnapshot = snapshot;
+ mLastMobileSnapshot = last;
- final NetworkStats delta = NetworkStats.subtract(snapshot, mLastMobileSnapshot,
+ final NetworkStats delta = NetworkStats.subtract(snapshot, last,
null, null, mTmpNetworkStats);
mTmpNetworkStats = delta;
- mLastMobileSnapshot = snapshot;
final int size = delta.size();
for (int i = 0; i < size; i++) {
@@ -5649,6 +5647,7 @@
if (mWifiIfaces.length > 0) {
final NetworkStats snapshot;
+ final NetworkStats last = mCurWifiSnapshot;
try {
snapshot = mNetworkStatsFactory.readNetworkStatsDetail(UID_ALL,
mWifiIfaces, NetworkStats.TAG_NONE, mLastWifiSnapshot);
@@ -5657,12 +5656,10 @@
return;
}
- if (mLastWifiSnapshot == null) {
- mLastWifiSnapshot = snapshot;
- return;
- }
+ mCurWifiSnapshot = snapshot;
+ mLastWifiSnapshot = last;
- final NetworkStats delta = NetworkStats.subtract(snapshot, mLastWifiSnapshot,
+ final NetworkStats delta = NetworkStats.subtract(snapshot, last,
null, null, mTmpNetworkStats);
mTmpNetworkStats = delta;
mLastWifiSnapshot = snapshot;
@@ -5671,6 +5668,13 @@
for (int i = 0; i < size; i++) {
final NetworkStats.Entry entry = delta.getValues(i, mTmpNetworkStatsEntry);
+ if (DEBUG) {
+ final NetworkStats.Entry cur = snapshot.getValues(i, null);
+ Slog.d(TAG, "Wifi uid " + entry.uid + ": delta rx=" + entry.rxBytes
+ + " tx=" + entry.txBytes + ", cur rx=" + cur.rxBytes
+ + " tx=" + cur.txBytes);
+ }
+
if (entry.rxBytes == 0 || entry.txBytes == 0) continue;
final Uid u = getUidStatsLocked(entry.uid);
diff --git a/services/core/java/com/android/server/power/PowerManagerService.java b/services/core/java/com/android/server/power/PowerManagerService.java
index 0ba55b6..f420988 100644
--- a/services/core/java/com/android/server/power/PowerManagerService.java
+++ b/services/core/java/com/android/server/power/PowerManagerService.java
@@ -1553,7 +1553,7 @@
return false;
}
if (!isBeingKeptAwakeLocked()) {
- if (!mIsPowered && !mDreamsEnabledByDefaultConfig) {
+ if (!mIsPowered && !mDreamsEnabledOnBatteryConfig) {
return false;
}
if (!mIsPowered