Merge "New orientation listener." into honeycomb
diff --git a/core/java/android/provider/Settings.java b/core/java/android/provider/Settings.java
index 4f21265..257f275 100644
--- a/core/java/android/provider/Settings.java
+++ b/core/java/android/provider/Settings.java
@@ -1680,6 +1680,16 @@
         public static final String POINTER_LOCATION = "pointer_location";
 
         /**
+         * Log raw orientation data from {@link WindowOrientationListener} for use with the
+         * orientationplot.py tool.
+         * 0 = no
+         * 1 = yes
+         * @hide
+         */
+        public static final String WINDOW_ORIENTATION_LISTENER_LOG =
+                "window_orientation_listener_log";
+
+        /**
          * Whether to play a sound for low-battery alerts.
          * @hide
          */
diff --git a/core/java/android/view/WindowOrientationListener.java b/core/java/android/view/WindowOrientationListener.java
index 6095a64..62d3e6a 100755
--- a/core/java/android/view/WindowOrientationListener.java
+++ b/core/java/android/view/WindowOrientationListener.java
@@ -23,6 +23,7 @@
 import android.hardware.SensorManager;
 import android.util.Config;
 import android.util.Log;
+import android.util.Slog;
 
 /**
  * A special helper class used by the WindowManager
@@ -33,17 +34,27 @@
  * "App/Activity/Screen Orientation" to ensure that all orientation
  * modes still work correctly.
  *
+ * You can also visualize the behavior of the WindowOrientationListener by
+ * enabling the window orientation listener log using the Development Settings
+ * in the Dev Tools application (Development.apk)
+ * and running frameworks/base/tools/orientationplot/orientationplot.py.
+ *
+ * More information about how to tune this algorithm in
+ * frameworks/base/tools/orientationplot/README.txt.
+ *
  * @hide
  */
 public abstract class WindowOrientationListener {
     private static final String TAG = "WindowOrientationListener";
     private static final boolean DEBUG = false;
     private static final boolean localLOGV = DEBUG || Config.DEBUG;
+
     private SensorManager mSensorManager;
-    private boolean mEnabled = false;
+    private boolean mEnabled;
     private int mRate;
     private Sensor mSensor;
     private SensorEventListenerImpl mSensorEventListener;
+    boolean mLogEnabled;
 
     /**
      * Creates a new WindowOrientationListener.
@@ -51,7 +62,7 @@
      * @param context for the WindowOrientationListener.
      */
     public WindowOrientationListener(Context context) {
-        this(context, SensorManager.SENSOR_DELAY_NORMAL);
+        this(context, SensorManager.SENSOR_DELAY_UI);
     }
     
     /**
@@ -63,9 +74,7 @@
      * value of {@link android.hardware.SensorManager#SENSOR_DELAY_NORMAL 
      * SENSOR_DELAY_NORMAL} for simple screen orientation change detection.
      *
-     * This constructor is private since no one uses it and making it public would complicate
-     * things, since the lowpass filtering code depends on the actual sampling period, and there's
-     * no way to get the period from SensorManager based on the rate constant.
+     * This constructor is private since no one uses it.
      */
     private WindowOrientationListener(Context context, int rate) {
         mSensorManager = (SensorManager)context.getSystemService(Context.SENSOR_SERVICE);
@@ -108,12 +117,11 @@
         }
     }
 
-    public void setAllow180Rotation(boolean allowed) {
-        if (mSensorEventListener != null) {
-            mSensorEventListener.setAllow180Rotation(allowed);
-        }
-    }
-
+    /**
+     * Gets the current orientation.
+     * @param lastRotation
+     * @return
+     */
     public int getCurrentRotation(int lastRotation) {
         if (mEnabled) {
             return mSensorEventListener.getCurrentRotation(lastRotation);
@@ -122,364 +130,6 @@
     }
 
     /**
-     * This class filters the raw accelerometer data and tries to detect actual changes in
-     * orientation. This is a very ill-defined problem so there are a lot of tweakable parameters,
-     * but here's the outline:
-     *
-     *  - Convert the acceleromter vector from cartesian to spherical coordinates. Since we're
-     * dealing with rotation of the device, this is the sensible coordinate system to work in. The
-     * zenith direction is the Z-axis, i.e. the direction the screen is facing. The radial distance
-     * is referred to as the magnitude below. The elevation angle is referred to as the "tilt"
-     * below. The azimuth angle is referred to as the "orientation" below (and the azimuth axis is
-     * the Y-axis). See http://en.wikipedia.org/wiki/Spherical_coordinate_system for reference.
-     *
-     *  - Low-pass filter the tilt and orientation angles to avoid "twitchy" behavior.
-     *
-     *  - When the orientation angle reaches a certain threshold, transition to the corresponding
-     * orientation. These thresholds have some hysteresis built-in to avoid oscillation.
-     *
-     *  - Use the magnitude to judge the accuracy of the data. Under ideal conditions, the magnitude
-     * should equal to that of gravity. When it differs significantly, we know the device is under
-     * external acceleration and we can't trust the data.
-     *
-     *  - Use the tilt angle to judge the accuracy of orientation data. When the tilt angle is high
-     * in magnitude, we distrust the orientation data, because when the device is nearly flat, small
-     * physical movements produce large changes in orientation angle.
-     *
-     * Details are explained below.
-     */
-    static class SensorEventListenerImpl implements SensorEventListener {
-        // We work with all angles in degrees in this class.
-        private static final float RADIANS_TO_DEGREES = (float) (180 / Math.PI);
-
-        // Indices into SensorEvent.values
-        private static final int _DATA_X = 0;
-        private static final int _DATA_Y = 1;
-        private static final int _DATA_Z = 2;
-
-        // Internal aliases for the four orientation states.  ROTATION_0 = default portrait mode,
-        // ROTATION_90 = right side of device facing the sky, etc.
-        private static final int ROTATION_0 = 0;
-        private static final int ROTATION_90 = 1;
-        private static final int ROTATION_270 = 2;
-        private static final int ROTATION_180 = 3;
-
-        // Mapping our internal aliases into actual Surface rotation values
-        private static final int[] INTERNAL_TO_SURFACE_ROTATION = new int[] {
-            Surface.ROTATION_0, Surface.ROTATION_90, Surface.ROTATION_270,
-            Surface.ROTATION_180};
-
-        // Mapping Surface rotation values to internal aliases.
-        private static final int[] SURFACE_TO_INTERNAL_ROTATION = new int[] {
-            ROTATION_0, ROTATION_90, ROTATION_180, ROTATION_270};
-
-        // Threshold ranges of orientation angle to transition into other orientation states.
-        // The first list is for transitions from ROTATION_0, ROTATION_90, ROTATION_270,
-        // and then ROTATION_180.
-        // ROTATE_TO defines the orientation each threshold range transitions to, and must be kept
-        // in sync with this.
-        // We generally transition about the halfway point between two states with a swing of 30
-        // degrees for hysteresis.
-        private static final int[][][] THRESHOLDS = new int[][][] {
-                {{60, 180}, {180, 300}},
-                {{0, 30}, {195, 315}, {315, 360}},
-                {{0, 45}, {45, 165}, {330, 360}},
-
-                // Handle situation where we are currently doing 180 rotation
-                // but that is no longer allowed.
-                {{0, 45}, {45, 135}, {135, 225}, {225, 315}, {315, 360}},
-        };
-        // See THRESHOLDS
-        private static final int[][] ROTATE_TO = new int[][] {
-                {ROTATION_90, ROTATION_270},
-                {ROTATION_0, ROTATION_270, ROTATION_0},
-                {ROTATION_0, ROTATION_90, ROTATION_0},
-                {ROTATION_0, ROTATION_90, ROTATION_0, ROTATION_270, ROTATION_0},
-        };
-
-        // Thresholds that allow all 4 orientations.
-        private static final int[][][] THRESHOLDS_WITH_180 = new int[][][] {
-            {{60, 165}, {165, 195}, {195, 300}},
-            {{0, 30}, {165, 195}, {195, 315}, {315, 360}},
-            {{0, 45}, {45, 165}, {165, 195}, {330, 360}},
-            {{0, 45}, {45, 135}, {225, 315}, {315, 360}},
-        };
-        // See THRESHOLDS_WITH_180
-        private static final int[][] ROTATE_TO_WITH_180 = new int[][] {
-            {ROTATION_90, ROTATION_180, ROTATION_270},
-            {ROTATION_0, ROTATION_180, ROTATION_90, ROTATION_0},
-            {ROTATION_0, ROTATION_270, ROTATION_180, ROTATION_0},
-            {ROTATION_0, ROTATION_90, ROTATION_270, ROTATION_0},
-        };
-
-        // Maximum absolute tilt angle at which to consider orientation data.  Beyond this (i.e.
-        // when screen is facing the sky or ground), we completely ignore orientation data.
-        private static final int MAX_TILT = 75;
-
-        // Additional limits on tilt angle to transition to each new orientation.  We ignore all
-        // data with tilt beyond MAX_TILT, but we can set stricter limits on transitions to a
-        // particular orientation here.
-        private static final int[] MAX_TRANSITION_TILT = new int[] {MAX_TILT, 65, 65, 40};
-
-        // Between this tilt angle and MAX_TILT, we'll allow orientation changes, but we'll filter
-        // with a higher time constant, making us less sensitive to change.  This primarily helps
-        // prevent momentary orientation changes when placing a device on a table from the side (or
-        // picking one up).
-        private static final int PARTIAL_TILT = 50;
-
-        // Maximum allowable deviation of the magnitude of the sensor vector from that of gravity,
-        // in m/s^2.  Beyond this, we assume the phone is under external forces and we can't trust
-        // the sensor data.  However, under constantly vibrating conditions (think car mount), we
-        // still want to pick up changes, so rather than ignore the data, we filter it with a very
-        // high time constant.
-        private static final float MAX_DEVIATION_FROM_GRAVITY = 1.5f;
-
-        // Minimum acceleration considered, in m/s^2. Below this threshold sensor noise will have
-        // significant impact on the calculations and in case of the vector (0, 0, 0) there is no
-        // defined rotation or tilt at all. Low or zero readings can happen when space travelling
-        // or free falling, but more commonly when shaking or getting bad readings from the sensor.
-        // The accelerometer is turned off when not used and polling it too soon after it is
-        // turned on may result in (0, 0, 0).
-        private static final float MIN_ABS_ACCELERATION = 1.5f;
-
-        // Actual sampling period corresponding to SensorManager.SENSOR_DELAY_NORMAL.  There's no
-        // way to get this information from SensorManager.
-        // Note the actual period is generally 3-30ms larger than this depending on the device, but
-        // that's not enough to significantly skew our results.
-        private static final int SAMPLING_PERIOD_MS = 200;
-
-        // The following time constants are all used in low-pass filtering the accelerometer output.
-        // See http://en.wikipedia.org/wiki/Low-pass_filter#Discrete-time_realization for
-        // background.
-
-        // When device is near-vertical (screen approximately facing the horizon)
-        private static final int DEFAULT_TIME_CONSTANT_MS = 100;
-        // When device is partially tilted towards the sky or ground
-        private static final int TILTED_TIME_CONSTANT_MS = 500;
-        // When device is under external acceleration, i.e. not just gravity.  We heavily distrust
-        // such readings.
-        private static final int ACCELERATING_TIME_CONSTANT_MS = 2000;
-
-        private static final float DEFAULT_LOWPASS_ALPHA =
-            computeLowpassAlpha(DEFAULT_TIME_CONSTANT_MS);
-        private static final float TILTED_LOWPASS_ALPHA =
-            computeLowpassAlpha(TILTED_TIME_CONSTANT_MS);
-        private static final float ACCELERATING_LOWPASS_ALPHA =
-            computeLowpassAlpha(ACCELERATING_TIME_CONSTANT_MS);
-
-        private boolean mAllow180Rotation = false;
-
-        private WindowOrientationListener mOrientationListener;
-        private int mRotation = ROTATION_0; // Current orientation state
-        private float mTiltAngle = 0; // low-pass filtered
-        private float mOrientationAngle = 0; // low-pass filtered
-
-        /*
-         * Each "distrust" counter represents our current level of distrust in the data based on
-         * a certain signal.  For each data point that is deemed unreliable based on that signal,
-         * the counter increases; otherwise, the counter decreases.  Exact rules vary.
-         */
-        private int mAccelerationDistrust = 0; // based on magnitude != gravity
-        private int mTiltDistrust = 0; // based on tilt close to +/- 90 degrees
-
-        public SensorEventListenerImpl(WindowOrientationListener orientationListener) {
-            mOrientationListener = orientationListener;
-        }
-
-        private static float computeLowpassAlpha(int timeConstantMs) {
-            return (float) SAMPLING_PERIOD_MS / (timeConstantMs + SAMPLING_PERIOD_MS);
-        }
-
-        void setAllow180Rotation(boolean allowed) {
-            mAllow180Rotation = allowed;
-        }
-
-        int getCurrentRotation(int lastRotation) {
-            if (mTiltDistrust > 0) {
-                // we really don't know the current orientation, so trust what's currently displayed
-                mRotation = SURFACE_TO_INTERNAL_ROTATION[lastRotation];
-            }
-            return INTERNAL_TO_SURFACE_ROTATION[mRotation];
-        }
-
-        private void calculateNewRotation(float orientation, float tiltAngle) {
-            if (localLOGV) Log.i(TAG, orientation + ", " + tiltAngle + ", " + mRotation);
-            final boolean allow180Rotation = mAllow180Rotation;
-            int thresholdRanges[][] = allow180Rotation
-                    ? THRESHOLDS_WITH_180[mRotation] : THRESHOLDS[mRotation];
-            int row = -1;
-            for (int i = 0; i < thresholdRanges.length; i++) {
-                if (orientation >= thresholdRanges[i][0] && orientation < thresholdRanges[i][1]) {
-                    row = i;
-                    break;
-                }
-            }
-            if (row == -1) return; // no matching transition
-
-            int rotation = allow180Rotation
-                    ? ROTATE_TO_WITH_180[mRotation][row] : ROTATE_TO[mRotation][row];
-            if (tiltAngle > MAX_TRANSITION_TILT[rotation]) {
-                // tilted too far flat to go to this rotation
-                return;
-            }
-
-            if (localLOGV) Log.i(TAG, "orientation " + orientation + " gives new rotation = "
-                    + rotation);
-            mRotation = rotation;
-            mOrientationListener.onOrientationChanged(INTERNAL_TO_SURFACE_ROTATION[mRotation]);
-        }
-
-        private float lowpassFilter(float newValue, float oldValue, float alpha) {
-            return alpha * newValue + (1 - alpha) * oldValue;
-        }
-
-        private float vectorMagnitude(float x, float y, float z) {
-            return (float) Math.sqrt(x*x + y*y + z*z);
-        }
-
-        /**
-         * Angle between upVector and the x-y plane (the plane of the screen), in [-90, 90].
-         * +/- 90 degrees = screen facing the sky or ground.
-         */
-        private float tiltAngle(float z, float magnitude) {
-            return (float) Math.asin(z / magnitude) * RADIANS_TO_DEGREES;
-        }
-
-        public void onSensorChanged(SensorEvent event) {
-            // the vector given in the SensorEvent points straight up (towards the sky) under ideal
-            // conditions (the phone is not accelerating).  i'll call this upVector elsewhere.
-            float x = event.values[_DATA_X];
-            float y = event.values[_DATA_Y];
-            float z = event.values[_DATA_Z];
-            float magnitude = vectorMagnitude(x, y, z);
-            float deviation = Math.abs(magnitude - SensorManager.STANDARD_GRAVITY);
-
-            handleAccelerationDistrust(deviation);
-            if (magnitude < MIN_ABS_ACCELERATION) {
-                return; // Ignore tilt and orientation when (0, 0, 0) or low reading
-            }
-
-            // only filter tilt when we're accelerating
-            float alpha = 1;
-            if (mAccelerationDistrust > 0) {
-                alpha = ACCELERATING_LOWPASS_ALPHA;
-            }
-            float newTiltAngle = tiltAngle(z, magnitude);
-            mTiltAngle = lowpassFilter(newTiltAngle, mTiltAngle, alpha);
-
-            float absoluteTilt = Math.abs(mTiltAngle);
-            checkFullyTilted(absoluteTilt);
-            if (mTiltDistrust > 0) {
-                return; // when fully tilted, ignore orientation entirely
-            }
-
-            float newOrientationAngle = computeNewOrientation(x, y);
-            filterOrientation(absoluteTilt, newOrientationAngle);
-            calculateNewRotation(mOrientationAngle, absoluteTilt);
-        }
-
-        /**
-         * When accelerating, increment distrust; otherwise, decrement distrust.  The idea is that
-         * if a single jolt happens among otherwise good data, we should keep trusting the good
-         * data.  On the other hand, if a series of many bad readings comes in (as if the phone is
-         * being rapidly shaken), we should wait until things "settle down", i.e. we get a string
-         * of good readings.
-         *
-         * @param deviation absolute difference between the current magnitude and gravity
-         */
-        private void handleAccelerationDistrust(float deviation) {
-            if (deviation > MAX_DEVIATION_FROM_GRAVITY) {
-                if (mAccelerationDistrust < 5) {
-                    mAccelerationDistrust++;
-                }
-            } else if (mAccelerationDistrust > 0) {
-                mAccelerationDistrust--;
-            }
-        }
-
-        /**
-         * Check if the phone is tilted towards the sky or ground and handle that appropriately.
-         * When fully tilted, we automatically push the tilt up to a fixed value; otherwise we
-         * decrement it.  The idea is to distrust the first few readings after the phone gets
-         * un-tilted, no matter what, i.e. preventing an accidental transition when the phone is
-         * picked up from a table.
-         *
-         * We also reset the orientation angle to the center of the current screen orientation.
-         * Since there is no real orientation of the phone, we want to ignore the most recent sensor
-         * data and reset it to this value to avoid a premature transition when the phone starts to
-         * get un-tilted.
-         *
-         * @param absoluteTilt the absolute value of the current tilt angle
-         */
-        private void checkFullyTilted(float absoluteTilt) {
-            if (absoluteTilt > MAX_TILT) {
-                if (mRotation == ROTATION_0) {
-                    mOrientationAngle = 0;
-                } else if (mRotation == ROTATION_90) {
-                    mOrientationAngle = 90;
-                } else { // ROTATION_270
-                    mOrientationAngle = 270;
-                }
-
-                if (mTiltDistrust < 3) {
-                    mTiltDistrust = 3;
-                }
-            } else if (mTiltDistrust > 0) {
-                mTiltDistrust--;
-            }
-        }
-
-        /**
-         * Angle between the x-y projection of upVector and the +y-axis, increasing
-         * clockwise.
-         * 0 degrees = speaker end towards the sky
-         * 90 degrees = right edge of device towards the sky
-         */
-        private float computeNewOrientation(float x, float y) {
-            float orientationAngle = (float) -Math.atan2(-x, y) * RADIANS_TO_DEGREES;
-            // atan2 returns [-180, 180]; normalize to [0, 360]
-            if (orientationAngle < 0) {
-                orientationAngle += 360;
-            }
-            return orientationAngle;
-        }
-
-        /**
-         * Compute a new filtered orientation angle.
-         */
-        private void filterOrientation(float absoluteTilt, float orientationAngle) {
-            float alpha = DEFAULT_LOWPASS_ALPHA;
-            if (mAccelerationDistrust > 1) {
-                // when under more than a transient acceleration, distrust heavily
-                alpha = ACCELERATING_LOWPASS_ALPHA;
-            } else if (absoluteTilt > PARTIAL_TILT || mAccelerationDistrust == 1) {
-                // when tilted partway, or under transient acceleration, distrust lightly
-                alpha = TILTED_LOWPASS_ALPHA;
-            }
-
-            // since we're lowpass filtering a value with periodic boundary conditions, we need to
-            // adjust the new value to filter in the right direction...
-            float deltaOrientation = orientationAngle - mOrientationAngle;
-            if (deltaOrientation > 180) {
-                orientationAngle -= 360;
-            } else if (deltaOrientation < -180) {
-                orientationAngle += 360;
-            }
-            mOrientationAngle = lowpassFilter(orientationAngle, mOrientationAngle, alpha);
-            // ...and then adjust back to ensure we're in the range [0, 360]
-            if (mOrientationAngle > 360) {
-                mOrientationAngle -= 360;
-            } else if (mOrientationAngle < 0) {
-                mOrientationAngle += 360;
-            }
-        }
-
-        public void onAccuracyChanged(Sensor sensor, int accuracy) {
-
-        }
-    }
-
-    /*
      * Returns true if sensor is enabled and false otherwise
      */
     public boolean canDetectOrientation() {
@@ -492,5 +142,481 @@
      * @param rotation The new orientation of the device, one of the Surface.ROTATION_* constants.
      * @see Surface
      */
-    abstract public void onOrientationChanged(int rotation);
+    public abstract void onOrientationChanged(int rotation);
+
+    /**
+     * Enables or disables the window orientation listener logging for use with
+     * the orientationplot.py tool.
+     * Logging is usually enabled via Development Settings.  (See class comments.)
+     * @param enable True to enable logging.
+     */
+    public void setLogEnabled(boolean enable) {
+        mLogEnabled = enable;
+    }
+
+    /**
+     * This class filters the raw accelerometer data and tries to detect actual changes in
+     * orientation. This is a very ill-defined problem so there are a lot of tweakable parameters,
+     * but here's the outline:
+     *
+     *  - Low-pass filter the accelerometer vector in cartesian coordinates.  We do it in
+     *    cartesian space because the orientation calculations are sensitive to the
+     *    absolute magnitude of the acceleration.  In particular, there are singularities
+     *    in the calculation as the magnitude approaches 0.  By performing the low-pass
+     *    filtering early, we can eliminate high-frequency impulses systematically.
+     *
+     *  - Convert the acceleromter vector from cartesian to spherical coordinates.
+     *    Since we're dealing with rotation of the device, this is the sensible coordinate
+     *    system to work in.  The zenith direction is the Z-axis, the direction the screen
+     *    is facing.  The radial distance is referred to as the magnitude below.
+     *    The elevation angle is referred to as the "tilt" below.
+     *    The azimuth angle is referred to as the "orientation" below (and the azimuth axis is
+     *    the Y-axis).
+     *    See http://en.wikipedia.org/wiki/Spherical_coordinate_system for reference.
+     *
+     *  - If the tilt angle is too close to horizontal (near 90 or -90 degrees), do nothing.
+     *    The orientation angle is not meaningful when the device is nearly horizontal.
+     *    The tilt angle thresholds are set differently for each orientation and different
+     *    limits are applied when the device is facing down as opposed to when it is facing
+     *    forward or facing up.
+     *
+     *  - When the orientation angle reaches a certain threshold, consider transitioning
+     *    to the corresponding orientation.  These thresholds have some hysteresis built-in
+     *    to avoid oscillations between adjacent orientations.
+     *
+     *  - Use the magnitude to judge the confidence of the orientation.
+     *    Under ideal conditions, the magnitude should equal to that of gravity.  When it
+     *    differs significantly, we know the device is under external acceleration and
+     *    we can't trust the data.
+     *
+     *  - Use the tilt angle to judge the confidence of the orientation.
+     *    When the tilt angle is high in absolute value then the device is nearly flat
+     *    so small physical movements produce large changes in orientation angle.
+     *    This can be the case when the device is being picked up from a table.
+     *
+     *  - Use the orientation angle to judge the confidence of the orientation.
+     *    The close the orientation angle is to the canonical orientation angle, the better.
+     *
+     *  - Based on the aggregate confidence, we determine how long we want to wait for
+     *    the new orientation to settle.  This is accomplished by integrating the confidence
+     *    for each orientation over time.  When a threshold integration sum is reached
+     *    then we actually change orientations.
+     *
+     * Details are explained inline.
+     */
+    static final class SensorEventListenerImpl implements SensorEventListener {
+        // We work with all angles in degrees in this class.
+        private static final float RADIANS_TO_DEGREES = (float) (180 / Math.PI);
+
+        // Indices into SensorEvent.values for the accelerometer sensor.
+        private static final int ACCELEROMETER_DATA_X = 0;
+        private static final int ACCELEROMETER_DATA_Y = 1;
+        private static final int ACCELEROMETER_DATA_Z = 2;
+
+        // Rotation constants.
+        // These are the same as Surface rotation constants with the addition of a 5th
+        // unknown state when we are not confident about the proporsed orientation.
+        // One important property of these constants is that they are equal to the
+        // orientation angle itself divided by 90.  We use this fact to map
+        // back and forth between orientation angles and rotation values.
+        private static final int ROTATION_UNKNOWN = -1;
+        //private static final int ROTATION_0 = Surface.ROTATION_0; // 0
+        //private static final int ROTATION_90 = Surface.ROTATION_90; // 1
+        //private static final int ROTATION_180 = Surface.ROTATION_180; // 2
+        //private static final int ROTATION_270 = Surface.ROTATION_270; // 3
+
+        private final WindowOrientationListener mOrientationListener;
+
+        private int mRotation = ROTATION_UNKNOWN;
+
+        /* State for first order low-pass filtering of accelerometer data.
+         * See http://en.wikipedia.org/wiki/Low-pass_filter#Discrete-time_realization for
+         * signal processing background.
+         */
+
+        private long mLastTimestamp = Long.MAX_VALUE; // in nanoseconds
+        private float mLastFilteredX, mLastFilteredY, mLastFilteredZ;
+
+        // The maximum sample inter-arrival time in milliseconds.
+        // If the acceleration samples are further apart than this amount in time, we reset the
+        // state of the low-pass filter and orientation properties.  This helps to handle
+        // boundary conditions when the device is turned on, wakes from suspend or there is
+        // a significant gap in samples.
+        private static final float MAX_FILTER_DELTA_TIME_MS = 1000;
+
+        // The acceleration filter cutoff frequency.
+        // This is the frequency at which signals are attenuated by 3dB (half the passband power).
+        // Each successive octave beyond this frequency is attenuated by an additional 6dB.
+        //
+        // We choose the cutoff frequency such that impulses and vibrational noise
+        // (think car dock) is suppressed.  However, this filtering does not eliminate
+        // all possible sources of orientation ambiguity so we also rely on a dynamic
+        // settle time for establishing a new orientation.  Filtering adds latency
+        // inversely proportional to the cutoff frequency so we don't want to make
+        // it too small or we can lose hundreds of milliseconds of responsiveness.
+        private static final float FILTER_CUTOFF_FREQUENCY_HZ = 1f;
+        private static final float FILTER_TIME_CONSTANT_MS = (float)(500.0f
+                / (Math.PI * FILTER_CUTOFF_FREQUENCY_HZ)); // t = 1 / (2pi * Fc) * 1000ms
+
+        // The filter gain.
+        // We choose a value slightly less than unity to avoid numerical instabilities due
+        // to floating-point error accumulation.
+        private static final float FILTER_GAIN = 0.999f;
+
+        /* State for orientation detection. */
+
+        // Thresholds for minimum and maximum allowable deviation from gravity.
+        //
+        // If the device is undergoing external acceleration (being bumped, in a car
+        // that is turning around a corner or a plane taking off) then the magnitude
+        // may be substantially more or less than gravity.  This can skew our orientation
+        // detection by making us think that up is pointed in a different direction.
+        //
+        // Conversely, if the device is in freefall, then there will be no gravity to
+        // measure at all.  This is problematic because we cannot detect the orientation
+        // without gravity to tell us which way is up.  A magnitude near 0 produces
+        // singularities in the tilt and orientation calculations.
+        //
+        // In both cases, we postpone choosing an orientation.
+        private static final float MIN_ACCELERATION_MAGNITUDE =
+                SensorManager.STANDARD_GRAVITY * 0.5f;
+        private static final float MAX_ACCELERATION_MAGNITUDE =
+            SensorManager.STANDARD_GRAVITY * 1.5f;
+
+        // Maximum absolute tilt angle at which to consider orientation data.  Beyond this (i.e.
+        // when screen is facing the sky or ground), we completely ignore orientation data.
+        private static final int MAX_TILT = 75;
+
+        // The tilt angle range in degrees for each orientation.
+        // Beyond these tilt angles, we don't even consider transitioning into the
+        // specified orientation.  We place more stringent requirements on unnatural
+        // orientations than natural ones to make it less likely to accidentally transition
+        // into those states.
+        // The first value of each pair is negative so it applies a limit when the device is
+        // facing down (overhead reading in bed).
+        // The second value of each pair is positive so it applies a limit when the device is
+        // facing up (resting on a table).
+        // The ideal tilt angle is 0 (when the device is vertical) so the limits establish
+        // how close to vertical the device must be in order to change orientation.
+        private static final int[][] TILT_TOLERANCE = new int[][] {
+            /* ROTATION_0   */ { -20, 75 },
+            /* ROTATION_90  */ { -20, 70 },
+            /* ROTATION_180 */ { -20, 65 },
+            /* ROTATION_270 */ { -20, 70 }
+        };
+
+        // The gap angle in degrees between adjacent orientation angles for hysteresis.
+        // This creates a "dead zone" between the current orientation and a proposed
+        // adjacent orientation.  No orientation proposal is made when the orientation
+        // angle is within the gap between the current orientation and the adjacent
+        // orientation.
+        private static final int ADJACENT_ORIENTATION_ANGLE_GAP = 30;
+
+        // The confidence scale factors for angle, tilt and magnitude.
+        // When the distance between the actual value and the ideal value is the
+        // specified delta, orientation transitions will take twice as long as they would
+        // in the ideal case.  Increasing or decreasing the delta has an exponential effect
+        // on each factor's influence over the transition time.
+
+        // Transition takes 2x longer when angle is 30 degrees from ideal orientation angle.
+        private static final float ORIENTATION_ANGLE_CONFIDENCE_SCALE =
+                confidenceScaleFromDelta(30);
+
+        // Transition takes 2x longer when tilt is 45 degrees from vertical.
+        private static final float TILT_ANGLE_CONFIDENCE_SCALE = confidenceScaleFromDelta(45);
+
+        // Transition takes 2x longer when acceleration is 0.25 Gs.
+        private static final float MAGNITUDE_CONFIDENCE_SCALE = confidenceScaleFromDelta(
+                SensorManager.STANDARD_GRAVITY * 0.25f);
+
+        // The number of milliseconds for which a new orientation must be stable before
+        // we perform an orientation change under ideal conditions.  It will take
+        // proportionally longer than this to effect an orientation change when
+        // the proposed orientation confidence is low.
+        private static final float ORIENTATION_SETTLE_TIME_MS = 250;
+
+        // The confidence that we have abount effecting each orientation change.
+        // When one of these values exceeds 1.0, we have determined our new orientation!
+        private float mConfidence[] = new float[4];
+
+        public SensorEventListenerImpl(WindowOrientationListener orientationListener) {
+            mOrientationListener = orientationListener;
+        }
+
+        public int getCurrentRotation(int lastRotation) {
+            return mRotation != ROTATION_UNKNOWN ? mRotation : lastRotation;
+        }
+
+        @Override
+        public void onAccuracyChanged(Sensor sensor, int accuracy) {
+        }
+
+        @Override
+        public void onSensorChanged(SensorEvent event) {
+            final boolean log = mOrientationListener.mLogEnabled;
+
+            // The vector given in the SensorEvent points straight up (towards the sky) under ideal
+            // conditions (the phone is not accelerating).  I'll call this up vector elsewhere.
+            float x = event.values[ACCELEROMETER_DATA_X];
+            float y = event.values[ACCELEROMETER_DATA_Y];
+            float z = event.values[ACCELEROMETER_DATA_Z];
+
+            if (log) {
+                Slog.v(TAG, "Raw acceleration vector: " +
+                        "x=" + x + ", y=" + y + ", z=" + z);
+            }
+
+            // Apply a low-pass filter to the acceleration up vector in cartesian space.
+            // Reset the orientation listener state if the samples are too far apart in time
+            // or when we see values of (0, 0, 0) which indicates that we polled the
+            // accelerometer too soon after turning it on and we don't have any data yet.
+            final float timeDeltaMS = (event.timestamp - mLastTimestamp) * 0.000001f;
+            boolean skipSample;
+            if (timeDeltaMS <= 0 || timeDeltaMS > MAX_FILTER_DELTA_TIME_MS
+                    || (x == 0 && y == 0 && z == 0)) {
+                if (log) {
+                    Slog.v(TAG, "Resetting orientation listener.");
+                }
+                for (int i = 0; i < 4; i++) {
+                    mConfidence[i] = 0;
+                }
+                skipSample = true;
+            } else {
+                final float alpha = timeDeltaMS
+                        / (FILTER_TIME_CONSTANT_MS + timeDeltaMS) * FILTER_GAIN;
+                x = alpha * (x - mLastFilteredX) + mLastFilteredX;
+                y = alpha * (y - mLastFilteredY) + mLastFilteredY;
+                z = alpha * (z - mLastFilteredZ) + mLastFilteredZ;
+                if (log) {
+                    Slog.v(TAG, "Filtered acceleration vector: " +
+                            "x=" + x + ", y=" + y + ", z=" + z);
+                }
+                skipSample = false;
+            }
+            mLastTimestamp = event.timestamp;
+            mLastFilteredX = x;
+            mLastFilteredY = y;
+            mLastFilteredZ = z;
+
+            boolean orientationChanged = false;
+            if (!skipSample) {
+                // Determine a proposed orientation based on the currently available data.
+                int proposedOrientation = ROTATION_UNKNOWN;
+                float combinedConfidence = 1.0f;
+
+                // Calculate the magnitude of the acceleration vector.
+                final float magnitude = (float) Math.sqrt(x * x + y * y + z * z);
+                if (magnitude < MIN_ACCELERATION_MAGNITUDE
+                        || magnitude > MAX_ACCELERATION_MAGNITUDE) {
+                    if (log) {
+                        Slog.v(TAG, "Ignoring sensor data, magnitude out of range: "
+                                + "magnitude=" + magnitude);
+                    }
+                } else {
+                    // Calculate the tilt angle.
+                    // This is the angle between the up vector and the x-y plane (the plane of
+                    // the screen) in a range of [-90, 90] degrees.
+                    //   -90 degrees: screen horizontal and facing the ground (overhead)
+                    //     0 degrees: screen vertical
+                    //    90 degrees: screen horizontal and facing the sky (on table)
+                   final int tiltAngle = (int) Math.round(
+                           Math.asin(z / magnitude) * RADIANS_TO_DEGREES);
+
+                   // If the tilt angle is too close to horizontal then we cannot determine
+                   // the orientation angle of the screen.
+                   if (Math.abs(tiltAngle) > MAX_TILT) {
+                       if (log) {
+                           Slog.v(TAG, "Ignoring sensor data, tilt angle too high: "
+                                   + "magnitude=" + magnitude + ", tiltAngle=" + tiltAngle);
+                       }
+                   } else {
+                       // Calculate the orientation angle.
+                       // This is the angle between the x-y projection of the up vector onto
+                       // the +y-axis, increasing clockwise in a range of [0, 360] degrees.
+                       int orientationAngle = (int) Math.round(
+                               -Math.atan2(-x, y) * RADIANS_TO_DEGREES);
+                       if (orientationAngle < 0) {
+                           // atan2 returns [-180, 180]; normalize to [0, 360]
+                           orientationAngle += 360;
+                       }
+
+                       // Find the nearest orientation.
+                       // An orientation of 0 can have a nearest angle of 0 or 360 depending
+                       // on which is closer to the measured orientation angle.  We leave the
+                       // nearest angle at 360 in that case since it makes the delta calculation
+                       // for orientation angle confidence easier below.
+                       int nearestOrientation = (orientationAngle + 45) / 90;
+                       int nearestOrientationAngle = nearestOrientation * 90;
+                       if (nearestOrientation == 4) {
+                           nearestOrientation = 0;
+                       }
+
+                       // Determine the proposed orientation.
+                       // The confidence of the proposal is 1.0 when it is ideal and it
+                       // decays exponentially as the proposal moves further from the ideal
+                       // angle, tilt and magnitude of the proposed orientation.
+                       if (isTiltAngleAcceptable(nearestOrientation, tiltAngle)
+                               && isOrientationAngleAcceptable(nearestOrientation,
+                                       orientationAngle)) {
+                           proposedOrientation = nearestOrientation;
+
+                           final float idealOrientationAngle = nearestOrientationAngle;
+                           final float orientationConfidence = confidence(orientationAngle,
+                                   idealOrientationAngle, ORIENTATION_ANGLE_CONFIDENCE_SCALE);
+
+                           final float idealTiltAngle = 0;
+                           final float tiltConfidence = confidence(tiltAngle,
+                                   idealTiltAngle, TILT_ANGLE_CONFIDENCE_SCALE);
+
+                           final float idealMagnitude = SensorManager.STANDARD_GRAVITY;
+                           final float magnitudeConfidence = confidence(magnitude,
+                                   idealMagnitude, MAGNITUDE_CONFIDENCE_SCALE);
+
+                           combinedConfidence = orientationConfidence
+                                   * tiltConfidence * magnitudeConfidence;
+
+                           if (log) {
+                               Slog.v(TAG, "Proposal: "
+                                       + "magnitude=" + magnitude
+                                       + ", tiltAngle=" + tiltAngle
+                                       + ", orientationAngle=" + orientationAngle
+                                       + ", proposedOrientation=" + proposedOrientation
+                                       + ", combinedConfidence=" + combinedConfidence
+                                       + ", orientationConfidence=" + orientationConfidence
+                                       + ", tiltConfidence=" + tiltConfidence
+                                       + ", magnitudeConfidence=" + magnitudeConfidence);
+                           }
+                       } else {
+                           if (log) {
+                               Slog.v(TAG, "Ignoring sensor data, no proposal: "
+                                       + "magnitude=" + magnitude + ", tiltAngle=" + tiltAngle
+                                       + ", orientationAngle=" + orientationAngle);
+                           }
+                       }
+                   }
+                }
+
+                // Sum up the orientation confidence weights.
+                // Detect an orientation change when the sum reaches 1.0.
+                final float confidenceAmount = combinedConfidence * timeDeltaMS
+                        / ORIENTATION_SETTLE_TIME_MS;
+                for (int i = 0; i < 4; i++) {
+                    if (i == proposedOrientation) {
+                        mConfidence[i] += confidenceAmount;
+                        if (mConfidence[i] >= 1.0f) {
+                            mConfidence[i] = 1.0f;
+
+                            if (i != mRotation) {
+                                if (log) {
+                                    Slog.v(TAG, "Orientation changed!  rotation=" + i);
+                                }
+                                mRotation = i;
+                                orientationChanged = true;
+                            }
+                        }
+                    } else {
+                        mConfidence[i] -= confidenceAmount;
+                        if (mConfidence[i] < 0.0f) {
+                            mConfidence[i] = 0.0f;
+                        }
+                    }
+                }
+            }
+
+            // Write final statistics about where we are in the orientation detection process.
+            if (log) {
+                Slog.v(TAG, "Result: rotation=" + mRotation
+                        + ", confidence=["
+                        + mConfidence[0] + ", "
+                        + mConfidence[1] + ", "
+                        + mConfidence[2] + ", "
+                        + mConfidence[3] + "], timeDeltaMS=" + timeDeltaMS);
+            }
+
+            // Tell the listener.
+            if (orientationChanged) {
+                mOrientationListener.onOrientationChanged(mRotation);
+            }
+        }
+
+        /**
+         * Returns true if the tilt angle is acceptable for a proposed
+         * orientation transition.
+         */
+        private boolean isTiltAngleAcceptable(int proposedOrientation,
+                int tiltAngle) {
+            return tiltAngle >= TILT_TOLERANCE[proposedOrientation][0]
+                    && tiltAngle <= TILT_TOLERANCE[proposedOrientation][1];
+        }
+
+        /**
+         * Returns true if the orientation angle is acceptable for a proposed
+         * orientation transition.
+         * This function takes into account the gap between adjacent orientations
+         * for hysteresis.
+         */
+        private boolean isOrientationAngleAcceptable(int proposedOrientation,
+                int orientationAngle) {
+            final int currentOrientation = mRotation;
+
+            // If there is no current rotation, then there is no gap.
+            if (currentOrientation != ROTATION_UNKNOWN) {
+                // If the proposed orientation is the same or is counter-clockwise adjacent,
+                // then we set a lower bound on the orientation angle.
+                // For example, if currentOrientation is ROTATION_0 and proposed is ROTATION_90,
+                // then we want to check orientationAngle > 45 + GAP / 2.
+                if (proposedOrientation == currentOrientation
+                        || proposedOrientation == (currentOrientation + 1) % 4) {
+                    int lowerBound = proposedOrientation * 90 - 45
+                            + ADJACENT_ORIENTATION_ANGLE_GAP / 2;
+                    if (proposedOrientation == 0) {
+                        if (orientationAngle >= 315 && orientationAngle < lowerBound + 360) {
+                            return false;
+                        }
+                    } else {
+                        if (orientationAngle < lowerBound) {
+                            return false;
+                        }
+                    }
+                }
+
+                // If the proposed orientation is the same or is clockwise adjacent,
+                // then we set an upper bound on the orientation angle.
+                // For example, if currentOrientation is ROTATION_0 and proposed is ROTATION_270,
+                // then we want to check orientationAngle < 315 - GAP / 2.
+                if (proposedOrientation == currentOrientation
+                        || proposedOrientation == (currentOrientation + 3) % 4) {
+                    int upperBound = proposedOrientation * 90 + 45
+                            - ADJACENT_ORIENTATION_ANGLE_GAP / 2;
+                    if (proposedOrientation == 0) {
+                        if (orientationAngle <= 45 && orientationAngle > upperBound) {
+                            return false;
+                        }
+                    } else {
+                        if (orientationAngle > upperBound) {
+                            return false;
+                        }
+                    }
+                }
+            }
+            return true;
+        }
+
+        /**
+         * Calculate an exponentially weighted confidence value in the range [0.0, 1.0].
+         * The further the value is from the target, the more the confidence trends to 0.
+         */
+        private static float confidence(float value, float target, float scale) {
+            return (float) Math.exp(-Math.abs(value - target) * scale);
+        }
+
+        /**
+         * Calculate a scale factor for the confidence weight exponent.
+         * The scale value is chosen such that confidence(value, target, scale) == 0.5
+         * whenever abs(value - target) == cutoffDelta.
+         */
+        private static float confidenceScaleFromDelta(float cutoffDelta) {
+            return (float) -Math.log(0.5) / cutoffDelta;
+        }
+    }
 }
diff --git a/policy/src/com/android/internal/policy/impl/PhoneWindowManager.java b/policy/src/com/android/internal/policy/impl/PhoneWindowManager.java
index 67e0e67..9b5c42e 100755
--- a/policy/src/com/android/internal/policy/impl/PhoneWindowManager.java
+++ b/policy/src/com/android/internal/policy/impl/PhoneWindowManager.java
@@ -389,6 +389,8 @@
             resolver.registerContentObserver(Settings.System.getUriFor(
                     Settings.System.SCREEN_OFF_TIMEOUT), false, this);
             resolver.registerContentObserver(Settings.System.getUriFor(
+                    Settings.System.WINDOW_ORIENTATION_LISTENER_LOG), false, this);
+            resolver.registerContentObserver(Settings.System.getUriFor(
                     Settings.System.POINTER_LOCATION), false, this);
             resolver.registerContentObserver(Settings.Secure.getUriFor(
                     Settings.Secure.DEFAULT_INPUT_METHOD), false, this);
@@ -759,6 +761,10 @@
                 updateOrientationListenerLp();
             }
 
+            mOrientationListener.setLogEnabled(
+                    Settings.System.getInt(resolver,
+                            Settings.System.WINDOW_ORIENTATION_LISTENER_LOG, 0) != 0);
+
             if (mSystemReady) {
                 int pointerLocation = Settings.System.getInt(resolver,
                         Settings.System.POINTER_LOCATION, 0);
@@ -2492,18 +2498,11 @@
                     return mSeascapeRotation;
                 case ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE:
                     //return either landscape rotation based on the sensor
-                    mOrientationListener.setAllow180Rotation(
-                            isLandscapeOrSeascape(Surface.ROTATION_180));
                     return getCurrentLandscapeRotation(lastRotation);
                 case ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT:
-                    mOrientationListener.setAllow180Rotation(
-                            !isLandscapeOrSeascape(Surface.ROTATION_180));
                     return getCurrentPortraitRotation(lastRotation);
             }
 
-            mOrientationListener.setAllow180Rotation(mAllowAllRotations ||
-                    orientation == ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
-
             // case for nosensor meaning ignore sensor and consider only lid
             // or orientation sensor disabled
             //or case.unspecified
@@ -2519,7 +2518,15 @@
                 return mUserRotation;
             } else {
                 if (useSensorForOrientationLp(orientation)) {
-                    return mOrientationListener.getCurrentRotation(lastRotation);
+                    // Disable 180 degree rotation unless allowed by default for the device
+                    // or explicitly requested by the application.
+                    int rotation = mOrientationListener.getCurrentRotation(lastRotation);
+                    if (rotation == Surface.ROTATION_180
+                            && !mAllowAllRotations
+                            && orientation != ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR) {
+                        return lastRotation;
+                    }
+                    return rotation;
                 }
                 return Surface.ROTATION_0;
             }
diff --git a/tools/orientationplot/README.txt b/tools/orientationplot/README.txt
new file mode 100644
index 0000000..0143510
--- /dev/null
+++ b/tools/orientationplot/README.txt
@@ -0,0 +1,87 @@
+This directory contains a simple python script for visualizing
+the behavior of the WindowOrientationListener.
+
+
+PREREQUISITES
+-------------
+
+1. Python 2.6
+2. numpy
+3. matplotlib
+
+
+USAGE
+-----
+
+The tool works by scaping the debug log output from WindowOrientationListener
+for interesting data and then plotting it.
+
+1. Enable the Window Orientation Listener debugging data log using the
+   Development Settings in the Dev Tools application (Development.apk).
+
+2. Plug in the device.  Ensure that it is the only device plugged in
+   since this script is of very little brain and will get confused otherwise.
+
+3. Run "orientationplot.py".
+
+4. When finished, remember to disable the debug log output since it is quite verbose!
+
+
+WHAT IT ALL MEANS
+-----------------
+
+The tool displays several time series graphs that plot the output of the
+WindowOrientationListener.  Here you can see the raw accelerometer data,
+filtered accelerometer data, measured tilt and orientation angle, confidence
+intervals for the proposed orientation and accelerometer latency.
+
+Things to look for:
+
+1. Ensure the filtering is not too aggressive.  If the filter cut-off frequency is
+   less than about 1Hz, then the filtered accelorometer data becomes too smooth
+   and the latency for orientation detection goes up.  One way to observe this
+   is by holding the device vertically in one orientation then sharply turning
+   it 90 degrees to a different orientation.  Compared the rapid changes in the
+   raw accelerometer data with the smoothed out filtered data.  If the filtering
+   is too aggressive, the filter response may lag by hundreds of milliseconds.
+
+2. Ensure that there is an appropriate gap between adjacent orientation angles
+   for hysteresis.  Try holding the device in one orientation and slowly turning
+   it 90 degrees.  Note that the confidence intervals will all drop to 0 at some
+   point in between the two orientations; that is the gap.  The gap should be
+   observed between all adjacent pairs of orientations when turning the device
+   in either direction.
+
+   Next try holding the device in one orientation and rapidly turning it end
+   over end to a midpoint about 45 degrees between two opposing orientations.
+   There should be no gap observed initially.  The algorithm should pick one
+   of the orientations and settle into it (since it is obviously quite
+   different from the original orientation of the device).  However, once it
+   settles, the confidence values should start trending to 0 again because
+   the measured orientation angle is now within the gap between the new
+   orientation and the adjacent orientation.
+
+   In other words, the hysteresis gap applies only when the measured orientation
+   angle (say, 45 degrees) is between the current orientation's ideal angle
+   (say, 0 degrees) and an adjacent orientation's ideal angle (say, 90 degrees).
+
+3. Accelerometer jitter.  The accelerometer latency graph displays the interval
+   between sensor events as reported by the SensorEvent.timestamp field.  It
+   should be a fairly constant 60ms.  If the latency jumps around wildly or
+   greatly exceeds 60ms then there is a problem with the accelerometer or the
+   sensor manager.
+
+4. The orientation angle is not measured when the tilt is too close to 90 or -90
+   degrees (refer to MAX_TILT constant).  Consequently, you should expect there
+   to be no data.  Likewise, all dependent calculations are suppressed in this case
+   so there will be no orientation proposal either.
+
+5. Each orientation has its own bound on allowable tilt angles.  It's a good idea to
+   verify that these limits are being enforced by gradually varying the tilt of
+   the device until it is inside/outside the limit for each orientation.
+
+6. Orientation changes should be significantly harder when the device is held
+   overhead.  People reading on tablets in bed often have their head turned
+   a little to the side, or they hold the device loosely so its orientation
+   can be a bit unusual.  The tilt is a good indicator of whether the device is
+   overhead.
diff --git a/tools/orientationplot/orientationplot.py b/tools/orientationplot/orientationplot.py
new file mode 100755
index 0000000..07449d4
--- /dev/null
+++ b/tools/orientationplot/orientationplot.py
@@ -0,0 +1,451 @@
+#!/usr/bin/env python2.6
+#
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Plots debug log output from WindowOrientationListener.
+# See README.txt for details.
+#
+
+import numpy as np
+import matplotlib.pyplot as plot
+import subprocess
+import re
+import fcntl
+import os
+import errno
+import bisect
+from datetime import datetime, timedelta
+
+# Parameters.
+timespan = 15 # seconds total span shown
+scrolljump = 5 # seconds jump when scrolling
+timeticks = 1 # seconds between each time tick
+
+# Non-blocking stream wrapper.
+class NonBlockingStream:
+  def __init__(self, stream):
+    fcntl.fcntl(stream, fcntl.F_SETFL, os.O_NONBLOCK)
+    self.stream = stream
+    self.buffer = ''
+    self.pos = 0
+
+  def readline(self):
+    while True:
+      index = self.buffer.find('\n', self.pos)
+      if index != -1:
+        result = self.buffer[self.pos:index]
+        self.pos = index + 1
+        return result
+
+      self.buffer = self.buffer[self.pos:]
+      self.pos = 0
+      try:
+        chunk = os.read(self.stream.fileno(), 4096)
+      except OSError, e:
+        if e.errno == errno.EAGAIN:
+          return None
+        raise e
+      if len(chunk) == 0:
+        if len(self.buffer) == 0:
+          raise(EOFError)
+        else:
+          result = self.buffer
+          self.buffer = ''
+          self.pos = 0
+          return result
+      self.buffer += chunk
+
+# Plotter
+class Plotter:
+  def __init__(self, adbout):
+    self.adbout = adbout
+
+    self.fig = plot.figure(1)
+    self.fig.suptitle('Window Orientation Listener', fontsize=12)
+    self.fig.set_dpi(96)
+    self.fig.set_size_inches(16, 12, forward=True)
+
+    self.raw_acceleration_x = self._make_timeseries()
+    self.raw_acceleration_y = self._make_timeseries()
+    self.raw_acceleration_z = self._make_timeseries()
+    self.raw_acceleration_axes = self._add_timeseries_axes(
+        1, 'Raw Acceleration', 'm/s^2', [-20, 20],
+        yticks=range(-15, 16, 5))
+    self.raw_acceleration_line_x = self._add_timeseries_line(
+        self.raw_acceleration_axes, 'x', 'red')
+    self.raw_acceleration_line_y = self._add_timeseries_line(
+        self.raw_acceleration_axes, 'y', 'green')
+    self.raw_acceleration_line_z = self._add_timeseries_line(
+        self.raw_acceleration_axes, 'z', 'blue')
+    self._add_timeseries_legend(self.raw_acceleration_axes)
+
+    shared_axis = self.raw_acceleration_axes
+
+    self.filtered_acceleration_x = self._make_timeseries()
+    self.filtered_acceleration_y = self._make_timeseries()
+    self.filtered_acceleration_z = self._make_timeseries()
+    self.magnitude = self._make_timeseries()
+    self.filtered_acceleration_axes = self._add_timeseries_axes(
+        2, 'Filtered Acceleration', 'm/s^2', [-20, 20],
+        sharex=shared_axis,
+        yticks=range(-15, 16, 5))
+    self.filtered_acceleration_line_x = self._add_timeseries_line(
+        self.filtered_acceleration_axes, 'x', 'red')
+    self.filtered_acceleration_line_y = self._add_timeseries_line(
+        self.filtered_acceleration_axes, 'y', 'green')
+    self.filtered_acceleration_line_z = self._add_timeseries_line(
+        self.filtered_acceleration_axes, 'z', 'blue')
+    self.magnitude_line = self._add_timeseries_line(
+        self.filtered_acceleration_axes, 'magnitude', 'orange', linewidth=2)
+    self._add_timeseries_legend(self.filtered_acceleration_axes)
+
+    self.tilt_angle = self._make_timeseries()
+    self.tilt_angle_axes = self._add_timeseries_axes(
+        3, 'Tilt Angle', 'degrees', [-105, 105],
+        sharex=shared_axis,
+        yticks=range(-90, 91, 30))
+    self.tilt_angle_line = self._add_timeseries_line(
+        self.tilt_angle_axes, 'tilt', 'black')
+    self._add_timeseries_legend(self.tilt_angle_axes)
+
+    self.orientation_angle = self._make_timeseries()
+    self.orientation_angle_axes = self._add_timeseries_axes(
+        4, 'Orientation Angle', 'degrees', [-25, 375],
+        sharex=shared_axis,
+        yticks=range(0, 361, 45))
+    self.orientation_angle_line = self._add_timeseries_line(
+        self.orientation_angle_axes, 'orientation', 'black')
+    self._add_timeseries_legend(self.orientation_angle_axes)
+
+    self.actual_orientation = self._make_timeseries()
+    self.proposed_orientation = self._make_timeseries()
+    self.orientation_axes = self._add_timeseries_axes(
+        5, 'Actual / Proposed Orientation and Confidence', 'rotation', [-1, 4],
+        sharex=shared_axis,
+        yticks=range(0, 4))
+    self.actual_orientation_line = self._add_timeseries_line(
+        self.orientation_axes, 'actual', 'black', linewidth=2)
+    self.proposed_orientation_line = self._add_timeseries_line(
+        self.orientation_axes, 'proposed', 'purple', linewidth=3)
+    self._add_timeseries_legend(self.orientation_axes)
+
+    self.confidence = [[self._make_timeseries(), self._make_timeseries()] for i in range(0, 4)]
+    self.confidence_polys = []
+
+    self.combined_confidence = self._make_timeseries()
+    self.orientation_confidence = self._make_timeseries()
+    self.tilt_confidence = self._make_timeseries()
+    self.magnitude_confidence = self._make_timeseries()
+    self.confidence_axes = self._add_timeseries_axes(
+        6, 'Proposed Orientation Confidence Factors', 'confidence', [-0.1, 1.1],
+        sharex=shared_axis,
+        yticks=[0.0, 0.2, 0.4, 0.6, 0.8, 1.0])
+    self.combined_confidence_line = self._add_timeseries_line(
+        self.confidence_axes, 'combined', 'purple', linewidth=2)
+    self.orientation_confidence_line = self._add_timeseries_line(
+        self.confidence_axes, 'orientation', 'black')
+    self.tilt_confidence_line = self._add_timeseries_line(
+        self.confidence_axes, 'tilt', 'brown')
+    self.magnitude_confidence_line = self._add_timeseries_line(
+        self.confidence_axes, 'magnitude', 'orange')
+    self._add_timeseries_legend(self.confidence_axes)
+
+    self.sample_latency = self._make_timeseries()
+    self.sample_latency_axes = self._add_timeseries_axes(
+        7, 'Accelerometer Sampling Latency', 'ms', [-10, 500],
+        sharex=shared_axis,
+        yticks=range(0, 500, 100))
+    self.sample_latency_line = self._add_timeseries_line(
+        self.sample_latency_axes, 'latency', 'black')
+    self._add_timeseries_legend(self.sample_latency_axes)
+
+    self.timer = self.fig.canvas.new_timer(interval=100)
+    self.timer.add_callback(lambda: self.update())
+    self.timer.start()
+
+    self.timebase = None
+    self._reset_parse_state()
+
+  # Initialize a time series.
+  def _make_timeseries(self):
+    return [[], []]
+
+  # Add a subplot to the figure for a time series.
+  def _add_timeseries_axes(self, index, title, ylabel, ylim, yticks, sharex=None):
+    num_graphs = 7
+    height = 0.9 / num_graphs
+    top = 0.95 - height * index
+    axes = self.fig.add_axes([0.1, top, 0.8, height],
+        xscale='linear',
+        xlim=[0, timespan],
+        ylabel=ylabel,
+        yscale='linear',
+        ylim=ylim,
+        sharex=sharex)
+    axes.text(0.02, 0.02, title, transform=axes.transAxes, fontsize=10, fontweight='bold')
+    axes.set_xlabel('time (s)', fontsize=10, fontweight='bold')
+    axes.set_ylabel(ylabel, fontsize=10, fontweight='bold')
+    axes.set_xticks(range(0, timespan + 1, timeticks))
+    axes.set_yticks(yticks)
+    axes.grid(True)
+
+    for label in axes.get_xticklabels():
+      label.set_fontsize(9)
+    for label in axes.get_yticklabels():
+      label.set_fontsize(9)
+
+    return axes
+
+  # Add a line to the axes for a time series.
+  def _add_timeseries_line(self, axes, label, color, linewidth=1):
+    return axes.plot([], label=label, color=color, linewidth=linewidth)[0]
+
+  # Add a legend to a time series.
+  def _add_timeseries_legend(self, axes):
+    axes.legend(
+        loc='upper left',
+        bbox_to_anchor=(1.01, 1),
+        borderpad=0.1,
+        borderaxespad=0.1,
+        prop={'size': 10})
+
+  # Resets the parse state.
+  def _reset_parse_state(self):
+    self.parse_raw_acceleration_x = None
+    self.parse_raw_acceleration_y = None
+    self.parse_raw_acceleration_z = None
+    self.parse_filtered_acceleration_x = None
+    self.parse_filtered_acceleration_y = None
+    self.parse_filtered_acceleration_z = None
+    self.parse_magnitude = None
+    self.parse_tilt_angle = None
+    self.parse_orientation_angle = None
+    self.parse_proposed_orientation = None
+    self.parse_combined_confidence = None
+    self.parse_orientation_confidence = None
+    self.parse_tilt_confidence = None
+    self.parse_magnitude_confidence = None
+    self.parse_actual_orientation = None
+    self.parse_confidence = None
+    self.parse_sample_latency = None
+
+  # Update samples.
+  def update(self):
+    timeindex = 0
+    while True:
+      try:
+        line = self.adbout.readline()
+      except EOFError:
+        plot.close()
+        return
+      if line is None:
+        break
+      print line
+
+      try:
+        timestamp = self._parse_timestamp(line)
+      except ValueError, e:
+        continue
+      if self.timebase is None:
+        self.timebase = timestamp
+      delta = timestamp - self.timebase
+      timeindex = delta.seconds + delta.microseconds * 0.000001
+
+      if line.find('Raw acceleration vector:') != -1:
+        self.parse_raw_acceleration_x = self._get_following_number(line, 'x=')
+        self.parse_raw_acceleration_y = self._get_following_number(line, 'y=')
+        self.parse_raw_acceleration_z = self._get_following_number(line, 'z=')
+
+      if line.find('Filtered acceleration vector:') != -1:
+        self.parse_filtered_acceleration_x = self._get_following_number(line, 'x=')
+        self.parse_filtered_acceleration_y = self._get_following_number(line, 'y=')
+        self.parse_filtered_acceleration_z = self._get_following_number(line, 'z=')
+
+      if line.find('magnitude=') != -1:
+        self.parse_magnitude = self._get_following_number(line, 'magnitude=')
+
+      if line.find('tiltAngle=') != -1:
+        self.parse_tilt_angle = self._get_following_number(line, 'tiltAngle=')
+
+      if line.find('orientationAngle=') != -1:
+        self.parse_orientation_angle = self._get_following_number(line, 'orientationAngle=')
+
+      if line.find('Proposal:') != -1:
+        self.parse_proposed_orientation = self._get_following_number(line, 'proposedOrientation=')
+        self.parse_combined_confidence = self._get_following_number(line, 'combinedConfidence=')
+        self.parse_orientation_confidence = self._get_following_number(line, 'orientationConfidence=')
+        self.parse_tilt_confidence = self._get_following_number(line, 'tiltConfidence=')
+        self.parse_magnitude_confidence = self._get_following_number(line, 'magnitudeConfidence=')
+
+      if line.find('Result:') != -1:
+        self.parse_actual_orientation = self._get_following_number(line, 'rotation=')
+        self.parse_confidence = self._get_following_array_of_numbers(line, 'confidence=')
+        self.parse_sample_latency = self._get_following_number(line, 'timeDeltaMS=')
+
+        for i in range(0, 4):
+          if self.parse_confidence is not None:
+            self._append(self.confidence[i][0], timeindex, i)
+            self._append(self.confidence[i][1], timeindex, i + self.parse_confidence[i])
+          else:
+            self._append(self.confidence[i][0], timeindex, None)
+            self._append(self.confidence[i][1], timeindex, None)
+
+        self._append(self.raw_acceleration_x, timeindex, self.parse_raw_acceleration_x)
+        self._append(self.raw_acceleration_y, timeindex, self.parse_raw_acceleration_y)
+        self._append(self.raw_acceleration_z, timeindex, self.parse_raw_acceleration_z)
+        self._append(self.filtered_acceleration_x, timeindex, self.parse_filtered_acceleration_x)
+        self._append(self.filtered_acceleration_y, timeindex, self.parse_filtered_acceleration_y)
+        self._append(self.filtered_acceleration_z, timeindex, self.parse_filtered_acceleration_z)
+        self._append(self.magnitude, timeindex, self.parse_magnitude)
+        self._append(self.tilt_angle, timeindex, self.parse_tilt_angle)
+        self._append(self.orientation_angle, timeindex, self.parse_orientation_angle)
+        self._append(self.actual_orientation, timeindex, self.parse_actual_orientation)
+        self._append(self.proposed_orientation, timeindex, self.parse_proposed_orientation)
+        self._append(self.combined_confidence, timeindex, self.parse_combined_confidence)
+        self._append(self.orientation_confidence, timeindex, self.parse_orientation_confidence)
+        self._append(self.tilt_confidence, timeindex, self.parse_tilt_confidence)
+        self._append(self.magnitude_confidence, timeindex, self.parse_magnitude_confidence)
+        self._append(self.sample_latency, timeindex, self.parse_sample_latency)
+        self._reset_parse_state()
+
+    # Scroll the plots.
+    if timeindex > timespan:
+      bottom = int(timeindex) - timespan + scrolljump
+      self.timebase += timedelta(seconds=bottom)
+      self._scroll(self.raw_acceleration_x, bottom)
+      self._scroll(self.raw_acceleration_y, bottom)
+      self._scroll(self.raw_acceleration_z, bottom)
+      self._scroll(self.filtered_acceleration_x, bottom)
+      self._scroll(self.filtered_acceleration_y, bottom)
+      self._scroll(self.filtered_acceleration_z, bottom)
+      self._scroll(self.magnitude, bottom)
+      self._scroll(self.tilt_angle, bottom)
+      self._scroll(self.orientation_angle, bottom)
+      self._scroll(self.actual_orientation, bottom)
+      self._scroll(self.proposed_orientation, bottom)
+      self._scroll(self.combined_confidence, bottom)
+      self._scroll(self.orientation_confidence, bottom)
+      self._scroll(self.tilt_confidence, bottom)
+      self._scroll(self.magnitude_confidence, bottom)
+      self._scroll(self.sample_latency, bottom)
+      for i in range(0, 4):
+        self._scroll(self.confidence[i][0], bottom)
+        self._scroll(self.confidence[i][1], bottom)
+
+    # Redraw the plots.
+    self.raw_acceleration_line_x.set_data(self.raw_acceleration_x)
+    self.raw_acceleration_line_y.set_data(self.raw_acceleration_y)
+    self.raw_acceleration_line_z.set_data(self.raw_acceleration_z)
+    self.filtered_acceleration_line_x.set_data(self.filtered_acceleration_x)
+    self.filtered_acceleration_line_y.set_data(self.filtered_acceleration_y)
+    self.filtered_acceleration_line_z.set_data(self.filtered_acceleration_z)
+    self.magnitude_line.set_data(self.magnitude)
+    self.tilt_angle_line.set_data(self.tilt_angle)
+    self.orientation_angle_line.set_data(self.orientation_angle)
+    self.actual_orientation_line.set_data(self.actual_orientation)
+    self.proposed_orientation_line.set_data(self.proposed_orientation)
+    self.combined_confidence_line.set_data(self.combined_confidence)
+    self.orientation_confidence_line.set_data(self.orientation_confidence)
+    self.tilt_confidence_line.set_data(self.tilt_confidence)
+    self.magnitude_confidence_line.set_data(self.magnitude_confidence)
+    self.sample_latency_line.set_data(self.sample_latency)
+
+    for poly in self.confidence_polys:
+      poly.remove()
+    self.confidence_polys = []
+    for i in range(0, 4):
+      self.confidence_polys.append(self.orientation_axes.fill_between(self.confidence[i][0][0],
+        self.confidence[i][0][1], self.confidence[i][1][1],
+        facecolor='goldenrod', edgecolor='goldenrod'))
+
+    self.fig.canvas.draw_idle()
+
+  # Scroll a time series.
+  def _scroll(self, timeseries, bottom):
+    bottom_index = bisect.bisect_left(timeseries[0], bottom)
+    del timeseries[0][:bottom_index]
+    del timeseries[1][:bottom_index]
+    for i, timeindex in enumerate(timeseries[0]):
+      timeseries[0][i] = timeindex - bottom
+
+  # Extract a word following the specified prefix.
+  def _get_following_word(self, line, prefix):
+    prefix_index = line.find(prefix)
+    if prefix_index == -1:
+      return None
+    start_index = prefix_index + len(prefix)
+    delim_index = line.find(',', start_index)
+    if delim_index == -1:
+      return line[start_index:]
+    else:
+      return line[start_index:delim_index]
+
+  # Extract a number following the specified prefix.
+  def _get_following_number(self, line, prefix):
+    word = self._get_following_word(line, prefix)
+    if word is None:
+      return None
+    return float(word)
+
+  # Extract an array of numbers following the specified prefix.
+  def _get_following_array_of_numbers(self, line, prefix):
+    prefix_index = line.find(prefix + '[')
+    if prefix_index == -1:
+      return None
+    start_index = prefix_index + len(prefix) + 1
+    delim_index = line.find(']', start_index)
+    if delim_index == -1:
+      return None
+
+    result = []
+    while start_index < delim_index:
+      comma_index = line.find(', ', start_index, delim_index)
+      if comma_index == -1:
+        result.append(float(line[start_index:delim_index]))
+        break;
+      result.append(float(line[start_index:comma_index]))
+      start_index = comma_index + 2
+    return result
+
+  # Add a value to a time series.
+  def _append(self, timeseries, timeindex, number):
+    timeseries[0].append(timeindex)
+    timeseries[1].append(number)
+
+  # Parse the logcat timestamp.
+  # Timestamp has the form '01-21 20:42:42.930'
+  def _parse_timestamp(self, line):
+    return datetime.strptime(line[0:18], '%m-%d %H:%M:%S.%f')
+
+# Notice
+print "Window Orientation Listener plotting tool"
+print "-----------------------------------------\n"
+print "Please turn on the Window Orientation Listener logging in Development Settings."
+
+# Start adb.
+print "Starting adb logcat.\n"
+
+adb = subprocess.Popen(['adb', 'logcat', '-s', '-v', 'time', 'WindowOrientationListener:V'],
+    stdout=subprocess.PIPE)
+adbout = NonBlockingStream(adb.stdout)
+
+# Prepare plotter.
+plotter = Plotter(adbout)
+plotter.update()
+
+# Main loop.
+plot.show()