Merge "Improve screenshot chord debouncing. Bug: 5011907"
diff --git a/api/current.txt b/api/current.txt
index 92969f6..662ebcd 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -24082,6 +24082,8 @@
     method public int getCurrentItemIndex();
     method public int getFromIndex();
     method public int getItemCount();
+    method public int getMaxScrollX();
+    method public int getMaxScrollY();
     method public android.os.Parcelable getParcelableData();
     method public int getRemovedCount();
     method public int getScrollX();
@@ -24108,6 +24110,8 @@
     method public void setFromIndex(int);
     method public void setFullScreen(boolean);
     method public void setItemCount(int);
+    method public void setMaxScrollX(int);
+    method public void setMaxScrollY(int);
     method public void setParcelableData(android.os.Parcelable);
     method public void setPassword(boolean);
     method public void setRemovedCount(int);
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index d65e6df..9bd4a3b 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -1464,6 +1464,8 @@
         private static final String KEY_MAX_NUM_DETECTED_FACES_SW = "max-num-detected-faces-sw";
         private static final String KEY_RECORDING_HINT = "recording-hint";
         private static final String KEY_VIDEO_SNAPSHOT_SUPPORTED = "video-snapshot-supported";
+        private static final String KEY_VIDEO_STABILIZATION = "video-stabilization";
+        private static final String KEY_VIDEO_STABILIZATION_SUPPORTED = "video-stabilization-supported";
 
         // Parameter key suffix for supported values.
         private static final String SUPPORTED_VALUES_SUFFIX = "-values";
@@ -2443,7 +2445,7 @@
          *
          * @param value new white balance.
          * @see #getWhiteBalance()
-         * @see #setAutoWhiteBalanceLock()
+         * @see #setAutoWhiteBalanceLock(boolean)
          */
         public void setWhiteBalance(String value) {
             set(KEY_WHITE_BALANCE, value);
@@ -3208,6 +3210,59 @@
             return TRUE.equals(str);
         }
 
+        /**
+         * <p>Enables and disables video stabilization. Use
+         * {@link #isVideoStabilizationSupported} to determine if calling this
+         * method is valid.</p>
+         *
+         * <p>Video stabilization reduces the shaking due to the motion of the
+         * camera in both the preview stream and in recorded videos, including
+         * data received from the preview callback. It does not reduce motion
+         * blur in images captured with
+         * {@link Camera#takePicture takePicture}.</p>
+         *
+         * <p>Video stabilization can be enabled and disabled while preview or
+         * recording is active, but toggling it may cause a jump in the video
+         * stream that may be undesirable in a recorded video.</p>
+         *
+         * @param toggle Set to true to enable video stabilization, and false to
+         * disable video stabilization.
+         * @see #isVideoStabilizationSupported()
+         * @see #getVideoStabilization()
+         * @hide
+         */
+        public void setVideoStabilization(boolean toggle) {
+            set(KEY_VIDEO_STABILIZATION, toggle ? TRUE : FALSE);
+        }
+
+        /**
+         * Get the current state of video stabilization. See
+         * {@link #setVideoStabilization} for details of video stabilization.
+         *
+         * @return true if video stabilization is enabled
+         * @see #isVideoStabilizationSupported()
+         * @see #setVideoStabilization(boolean)
+         * @hide
+         */
+        public boolean getVideoStabilization() {
+            String str = get(KEY_VIDEO_STABILIZATION);
+            return TRUE.equals(str);
+        }
+
+        /**
+         * Returns true if video stabilization is supported. See
+         * {@link #setVideoStabilization} for details of video stabilization.
+         *
+         * @return true if video stabilization is supported
+         * @see #setVideoStabilization(boolean)
+         * @see #getVideoStabilization()
+         * @hide
+         */
+        public boolean isVideoStabilizationSupported() {
+            String str = get(KEY_VIDEO_STABILIZATION_SUPPORTED);
+            return TRUE.equals(str);
+        }
+
         // Splits a comma delimited string to an ArrayList of String.
         // Return null if the passing string is null or the size is 0.
         private ArrayList<String> split(String str) {
diff --git a/core/java/android/os/AsyncTask.java b/core/java/android/os/AsyncTask.java
index 70ec0af..9dea4c4 100644
--- a/core/java/android/os/AsyncTask.java
+++ b/core/java/android/os/AsyncTask.java
@@ -32,13 +32,12 @@
 import java.util.concurrent.atomic.AtomicInteger;
 
 /**
- * <p>AsyncTask enables proper and easy use of the UI thread (also called main thread) or
- * any other looper thread. AsyncTask is most commonly used to interact with the UI thread.
- * This class allows to perform background operations and publish results on a looper
- * thread without having to manipulate threads and/or handlers.</p>
+ * <p>AsyncTask enables proper and easy use of the UI thread. This class allows to
+ * perform background operations and publish results on the UI thread without
+ * having to manipulate threads and/or handlers.</p>
  *
  * <p>An asynchronous task is defined by a computation that runs on a background thread and
- * whose result is published on a looper thread. An asynchronous task is defined by 3 generic
+ * whose result is published on the UI thread. An asynchronous task is defined by 3 generic
  * types, called <code>Params</code>, <code>Progress</code> and <code>Result</code>,
  * and 4 steps, called <code>onPreExecute</code>, <code>doInBackground</code>,
  * <code>onProgressUpdate</code> and <code>onPostExecute</code>.</p>
@@ -102,7 +101,7 @@
  * <h2>The 4 steps</h2>
  * <p>When an asynchronous task is executed, the task goes through 4 steps:</p>
  * <ol>
- *     <li>{@link #onPreExecute()}, invoked on the looper thread immediately after the task
+ *     <li>{@link #onPreExecute()}, invoked on the UI thread immediately after the task
  *     is executed. This step is normally used to setup the task, for instance by
  *     showing a progress bar in the user interface.</li>
  *     <li>{@link #doInBackground}, invoked on the background thread
@@ -111,14 +110,14 @@
  *     of the asynchronous task are passed to this step. The result of the computation must
  *     be returned by this step and will be passed back to the last step. This step
  *     can also use {@link #publishProgress} to publish one or more units
- *     of progress. These values are published on the looper thread, in the
+ *     of progress. These values are published on the UI thread, in the
  *     {@link #onProgressUpdate} step.</li>
- *     <li>{@link #onProgressUpdate}, invoked on the looper thread after a
+ *     <li>{@link #onProgressUpdate}, invoked on the UI thread after a
  *     call to {@link #publishProgress}. The timing of the execution is
  *     undefined. This method is used to display any form of progress in the user
  *     interface while the background computation is still executing. For instance,
  *     it can be used to animate a progress bar or show logs in a text field.</li>
- *     <li>{@link #onPostExecute}, invoked on the looper thread after the background
+ *     <li>{@link #onPostExecute}, invoked on the UI thread after the background
  *     computation finishes. The result of the background computation is passed to
  *     this step as a parameter.</li>
  * </ol>
@@ -136,8 +135,8 @@
  * <p>There are a few threading rules that must be followed for this class to
  * work properly:</p>
  * <ul>
- *     <li>The task instance must be created on the looper thread.</li>
- *     <li>{@link #execute} must be invoked on the looper thread.</li>
+ *     <li>The task instance must be created on the UI thread.</li>
+ *     <li>{@link #execute} must be invoked on the UI thread.</li>
  *     <li>Do not call {@link #onPreExecute()}, {@link #onPostExecute},
  *     {@link #doInBackground}, {@link #onProgressUpdate} manually.</li>
  *     <li>The task can be executed only once (an exception will be thrown if
@@ -153,9 +152,6 @@
  *     <li>Set member fields in {@link #doInBackground}, and refer to them in
  *     {@link #onProgressUpdate} and {@link #onPostExecute}.
  * </ul>
- * 
- * @see Looper
- * @see Handler
  */
 public abstract class AsyncTask<Params, Progress, Result> {
     private static final String LOG_TAG = "AsyncTask";
@@ -191,13 +187,7 @@
     private static final int MESSAGE_POST_RESULT = 0x1;
     private static final int MESSAGE_POST_PROGRESS = 0x2;
 
-    private static final ThreadLocal<InternalHandler> sHandler =
-            new ThreadLocal<InternalHandler>() {
-                @Override
-                protected InternalHandler initialValue() {
-                    return new InternalHandler();
-                }
-            };
+    private static final InternalHandler sHandler = new InternalHandler();
 
     private static volatile Executor sDefaultExecutor = SERIAL_EXECUTOR;
     private final WorkerRunnable<Params, Result> mWorker;
@@ -206,7 +196,6 @@
     private volatile Status mStatus = Status.PENDING;
     
     private final AtomicBoolean mTaskInvoked = new AtomicBoolean();
-    private final InternalHandler mHandler;
 
     private static class SerialExecutor implements Executor {
         final ArrayDeque<Runnable> mTasks = new ArrayDeque<Runnable>();
@@ -253,8 +242,9 @@
         FINISHED,
     }
 
-    /** @hide */
+    /** @hide Used to force static handler to be created. */
     public static void init() {
+        sHandler.getLooper();
     }
 
     /** @hide */
@@ -263,26 +253,14 @@
     }
 
     /**
-     * Creates a new asynchronous task. This constructor must be invoked on the looper thread.
-     * 
-     * @throws IllegalStateException if this constructor is invoked on a non-looper thread
-     * 
-     * @see Looper
+     * Creates a new asynchronous task. This constructor must be invoked on the UI thread.
      */
     public AsyncTask() {
-        if (Looper.myLooper() == null) {
-            throw new IllegalStateException("AsyncTask can be only instanciated on a " 
-                    + "looper thread. The current thread is " + Thread.currentThread());
-        }
-
-        mHandler = sHandler.get();
-
         mWorker = new WorkerRunnable<Params, Result>() {
             public Result call() throws Exception {
                 mTaskInvoked.set(true);
 
                 Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
-                //noinspection unchecked
                 return postResult(doInBackground(mParams));
             }
         };
@@ -317,8 +295,7 @@
     }
 
     private Result postResult(Result result) {
-        @SuppressWarnings({"unchecked"})
-        Message message = mHandler.obtainMessage(MESSAGE_POST_RESULT,
+        Message message = sHandler.obtainMessage(MESSAGE_POST_RESULT,
                 new AsyncTaskResult<Result>(this, result));
         message.sendToTarget();
         return result;
@@ -339,7 +316,7 @@
      * by the caller of this task.
      *
      * This method can call {@link #publishProgress} to publish updates
-     * on the looper thread.
+     * on the UI thread.
      *
      * @param params The parameters of the task.
      *
@@ -352,7 +329,7 @@
     protected abstract Result doInBackground(Params... params);
 
     /**
-     * Runs on the looper thread before {@link #doInBackground}.
+     * Runs on the UI thread before {@link #doInBackground}.
      *
      * @see #onPostExecute
      * @see #doInBackground
@@ -361,7 +338,7 @@
     }
 
     /**
-     * <p>Runs on the looper thread after {@link #doInBackground}. The
+     * <p>Runs on the UI thread after {@link #doInBackground}. The
      * specified result is the value returned by {@link #doInBackground}.</p>
      * 
      * <p>This method won't be invoked if the task was cancelled.</p>
@@ -377,7 +354,7 @@
     }
 
     /**
-     * Runs on the looper thread after {@link #publishProgress} is invoked.
+     * Runs on the UI thread after {@link #publishProgress} is invoked.
      * The specified values are the values passed to {@link #publishProgress}.
      *
      * @param values The values indicating progress.
@@ -390,7 +367,7 @@
     }
 
     /**
-     * <p>Runs on the looper thread after {@link #cancel(boolean)} is invoked and
+     * <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and
      * {@link #doInBackground(Object[])} has finished.</p>
      * 
      * <p>The default implementation simply invokes {@link #onCancelled()} and
@@ -413,7 +390,7 @@
      * This method is invoked by the default implementation of
      * {@link #onCancelled(Object)}.</p>
      * 
-     * <p>Runs on the looper thread after {@link #cancel(boolean)} is invoked and
+     * <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and
      * {@link #doInBackground(Object[])} has finished.</p>
      *
      * @see #onCancelled(Object) 
@@ -448,7 +425,7 @@
      * an attempt to stop the task.</p>
      * 
      * <p>Calling this method will result in {@link #onCancelled(Object)} being
-     * invoked on the looper thread after {@link #doInBackground(Object[])}
+     * invoked on the UI thread after {@link #doInBackground(Object[])}
      * returns. Calling this method guarantees that {@link #onPostExecute(Object)}
      * is never invoked. After invoking this method, you should check the
      * value returned by {@link #isCancelled()} periodically from
@@ -521,15 +498,14 @@
      * with {@link #THREAD_POOL_EXECUTOR}; however, see commentary there for warnings on
      * its use.
      *
-     * <p>This method must be invoked on the looper thread.
+     * <p>This method must be invoked on the UI thread.
      *
      * @param params The parameters of the task.
      *
      * @return This instance of AsyncTask.
      *
      * @throws IllegalStateException If {@link #getStatus()} returns either
-     *         {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED} or
-     *         the current thread is not a looper thread.
+     *         {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED}.
      */
     public final AsyncTask<Params, Progress, Result> execute(Params... params) {
         return executeOnExecutor(sDefaultExecutor, params);
@@ -555,7 +531,7 @@
      * executed in serial; to guarantee such work is serialized regardless of
      * platform version you can use this function with {@link #SERIAL_EXECUTOR}.
      *
-     * <p>This method must be invoked on the looper thread.
+     * <p>This method must be invoked on the UI thread.
      *
      * @param exec The executor to use.  {@link #THREAD_POOL_EXECUTOR} is available as a
      *              convenient process-wide thread pool for tasks that are loosely coupled.
@@ -564,16 +540,10 @@
      * @return This instance of AsyncTask.
      *
      * @throws IllegalStateException If {@link #getStatus()} returns either
-     *         {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED} or
-     *         the current thread is not a looper thread.
+     *         {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED}.
      */
     public final AsyncTask<Params, Progress, Result> executeOnExecutor(Executor exec,
             Params... params) {
-        if (Looper.myLooper() == null) {
-            throw new IllegalStateException("AsyncTask can be only instanciated on a " 
-                    + "looper thread. The current thread is " + Thread.currentThread());
-        }
-        
         if (mStatus != Status.PENDING) {
             switch (mStatus) {
                 case RUNNING:
@@ -606,9 +576,9 @@
 
     /**
      * This method can be invoked from {@link #doInBackground} to
-     * publish updates on the looper thread while the background computation is
+     * publish updates on the UI thread while the background computation is
      * still running. Each call to this method will trigger the execution of
-     * {@link #onProgressUpdate} on the looper thread.
+     * {@link #onProgressUpdate} on the UI thread.
      *
      * {@link #onProgressUpdate} will note be called if the task has been
      * canceled.
@@ -620,7 +590,7 @@
      */
     protected final void publishProgress(Progress... values) {
         if (!isCancelled()) {
-            mHandler.obtainMessage(MESSAGE_POST_PROGRESS,
+            sHandler.obtainMessage(MESSAGE_POST_PROGRESS,
                     new AsyncTaskResult<Progress>(this, values)).sendToTarget();
         }
     }
diff --git a/core/java/android/provider/CalendarContract.java b/core/java/android/provider/CalendarContract.java
index 4b4d308..d7060c1 100644
--- a/core/java/android/provider/CalendarContract.java
+++ b/core/java/android/provider/CalendarContract.java
@@ -300,8 +300,25 @@
         public static final String CALENDAR_COLOR = "calendar_color";
 
         /**
+         * An index for looking up a color from the {@link Colors} table. NULL
+         * or an empty string are reserved for indicating that the calendar does
+         * not use an index for looking up the color. The provider will update
+         * {@link #CALENDAR_COLOR} automatically when a valid index is written
+         * to this column. @see Colors
+         * <P>
+         * Type: TEXT
+         * </P>
+         * TODO UNHIDE
+         *
+         * @hide
+         */
+        public static final String CALENDAR_COLOR_INDEX = "calendar_color_index";
+
+        /**
          * The display name of the calendar. Column name.
-         * <P>Type: TEXT</P>
+         * <P>
+         * Type: TEXT
+         * </P>
          */
         public static final String CALENDAR_DISPLAY_NAME = "calendar_displayName";
 
@@ -392,6 +409,34 @@
          * <P>Type: TEXT</P>
          */
         public static final String ALLOWED_REMINDERS = "allowedReminders";
+
+        /**
+         * A comma separated list of availability types supported for this
+         * calendar in the format "#,#,#". Valid types are
+         * {@link Events#AVAILABILITY_BUSY}, {@link Events#AVAILABILITY_FREE},
+         * {@link Events#AVAILABILITY_TENTATIVE}. Setting this field to only
+         * {@link Events#AVAILABILITY_BUSY} should be used to indicate that
+         * changing the availability is not supported.
+         *
+         * TODO UNHIDE, Update Calendars doc
+         *
+         * @hide
+         */
+        public static final String ALLOWED_AVAILABILITY = "allowedAvailability";
+
+        /**
+         * A comma separated list of attendee types supported for this calendar
+         * in the format "#,#,#". Valid types are {@link Attendees#TYPE_NONE},
+         * {@link Attendees#TYPE_OPTIONAL}, {@link Attendees#TYPE_REQUIRED},
+         * {@link Attendees#TYPE_RESOURCE}. Setting this field to only
+         * {@link Attendees#TYPE_NONE} should be used to indicate that changing
+         * the attendee type is not supported.
+         *
+         * TODO UNHIDE, Update Calendars doc
+         *
+         * @hide
+         */
+        public static final String ALLOWED_ATTENDEE_TYPES = "allowedAttendeeTypes";
     }
 
     /**
@@ -688,13 +733,22 @@
 
         /**
          * The type of attendee. Column name.
-         * <P>Type: Integer (one of {@link #TYPE_REQUIRED}, {@link #TYPE_OPTIONAL})</P>
+         * <P>
+         * Type: Integer (one of {@link #TYPE_REQUIRED}, {@link #TYPE_OPTIONAL}
+         * </P>
          */
         public static final String ATTENDEE_TYPE = "attendeeType";
 
         public static final int TYPE_NONE = 0;
         public static final int TYPE_REQUIRED = 1;
         public static final int TYPE_OPTIONAL = 2;
+        /**
+         * This specifies that an attendee is a resource, such as a room, and
+         * not an actual person. TODO UNHIDE and add to ATTENDEE_TYPE comment
+         * 
+         * @hide
+         */
+        public static final int TYPE_RESOURCE = 3;
 
         /**
          * The attendance status of the attendee. Column name.
@@ -787,13 +841,26 @@
         public static final String EVENT_LOCATION = "eventLocation";
 
         /**
-         * A secondary color for the individual event. Reserved for future use.
-         * Column name.
+         * A secondary color for the individual event. This should only be
+         * updated by the sync adapter for a given account.
          * <P>Type: INTEGER</P>
          */
         public static final String EVENT_COLOR = "eventColor";
 
         /**
+         * A secondary color index for the individual event. NULL or an empty
+         * string are reserved for indicating that the event does not use an
+         * index for looking up the color. The provider will update
+         * {@link #EVENT_COLOR} automatically when a valid index is written to
+         * this column. @see Colors
+         * <P>Type: TEXT</P>
+         * TODO UNHIDE
+         *
+         * @hide
+         */
+        public static final String EVENT_COLOR_INDEX = "eventColor_index";
+
+        /**
          * The event status. Column name.
          * <P>Type: INTEGER (one of {@link #STATUS_TENTATIVE}...)</P>
          */
@@ -964,6 +1031,15 @@
          * other events.
          */
         public static final int AVAILABILITY_FREE = 1;
+        /**
+         * Indicates that the owner's availability may change, but should be
+         * considered busy time that will conflict.
+         *
+         * TODO UNHIDE
+         *
+         * @hide
+         */
+        public static final int AVAILABILITY_TENTATIVE = 2;
 
         /**
          * Whether the event has an alarm or not. Column name.
@@ -2224,6 +2300,91 @@
         }
     }
 
+    /**
+     * @hide
+     * TODO UNHIDE
+     */
+    protected interface ColorsColumns extends SyncStateContract.Columns {
+
+        /**
+         * The type of color, which describes how it should be used. Valid types
+         * are {@link #TYPE_CALENDAR} and {@link #TYPE_EVENT}. Column name.
+         * <P>
+         * Type: INTEGER (NOT NULL)
+         * </P>
+         */
+        public static final String COLOR_TYPE = "color_type";
+
+        /**
+         * This indicateds a color that can be used for calendars.
+         */
+        public static final int TYPE_CALENDAR = 0;
+        /**
+         * This indicates a color that can be used for events.
+         */
+        public static final int TYPE_EVENT = 1;
+
+        /**
+         * The index used to reference this color. This can be any non-empty
+         * string, but must be unique for a given {@link #ACCOUNT_TYPE} and
+         * {@link #ACCOUNT_NAME} . Column name.
+         * <P>
+         * Type: TEXT
+         * </P>
+         */
+        public static final String COLOR_INDEX = "color_index";
+
+        /**
+         * The version of this color that will work with dark text as an 8-bit
+         * ARGB integer value. Colors should specify alpha as fully opaque (eg
+         * 0xFF993322) as the alpha may be ignored or modified for display.
+         * Column name.
+         * <P>
+         * Type: INTEGER (NOT NULL)
+         * </P>
+         */
+        public static final String COLOR_LIGHT = "color_light";
+
+        /**
+         * The version of this color that will work with light text as an 8-bit
+         * ARGB integer value. Colors should specify alpha as fully opaque (eg
+         * 0xFF993322) as the alpha may be ignored or modified for display.
+         * Column name.
+         * <P>
+         * Type: INTEGER (NOT NULL)
+         * </P>
+         */
+        public static final String COLOR_DARK = "color_dark";
+
+    }
+
+    /**
+     * Fields for accessing colors available for a given account. Colors are
+     * referenced by {@link #COLOR_INDEX} which must be unique for a given
+     * account name/type. These values should only be updated by the sync
+     * adapter.
+     * TODO UNHIDE
+     *
+     * @hide
+     */
+    public static final class Colors implements ColorsColumns {
+        /**
+         * @hide
+         */
+        public static final String TABLE_NAME = "Colors";
+        /**
+         * The Uri for querying color information
+         */
+        @SuppressWarnings("hiding")
+        public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/colors");
+
+        /**
+         * This utility class cannot be instantiated
+         */
+        private Colors() {
+        }
+    }
+
     protected interface ExtendedPropertiesColumns {
         /**
          * The event the extended property belongs to. Column name.
diff --git a/core/java/android/provider/Settings.java b/core/java/android/provider/Settings.java
index bc05078..3d2a3ce 100644
--- a/core/java/android/provider/Settings.java
+++ b/core/java/android/provider/Settings.java
@@ -4087,7 +4087,9 @@
             MOUNT_UMS_AUTOSTART,
             MOUNT_UMS_PROMPT,
             MOUNT_UMS_NOTIFY_ENABLED,
-            UI_NIGHT_MODE
+            UI_NIGHT_MODE,
+            LOCK_SCREEN_OWNER_INFO,
+            LOCK_SCREEN_OWNER_INFO_ENABLED
         };
 
         /**
diff --git a/core/java/android/view/accessibility/AccessibilityRecord.java b/core/java/android/view/accessibility/AccessibilityRecord.java
index fe06d98..a4e0688 100644
--- a/core/java/android/view/accessibility/AccessibilityRecord.java
+++ b/core/java/android/view/accessibility/AccessibilityRecord.java
@@ -391,8 +391,6 @@
      * Gets the max scroll offset of the source left edge in pixels.
      *
      * @return The max scroll.
-     *
-     * @hide
      */
     public int getMaxScrollX() {
         return mMaxScrollX;
@@ -401,8 +399,6 @@
      * Sets the max scroll offset of the source left edge in pixels.
      *
      * @param maxScrollX The max scroll.
-     *
-     * @hide
      */
     public void setMaxScrollX(int maxScrollX) {
         enforceNotSealed();
@@ -413,8 +409,6 @@
      * Gets the max scroll offset of the source top edge in pixels.
      *
      * @return The max scroll.
-     *
-     * @hide
      */
     public int getMaxScrollY() {
         return mMaxScrollY;
@@ -424,8 +418,6 @@
      * Sets the max scroll offset of the source top edge in pixels.
      *
      * @param maxScrollY The max scroll.
-     *
-     * @hide
      */
     public void setMaxScrollY(int maxScrollY) {
         enforceNotSealed();
diff --git a/core/java/android/webkit/BrowserFrame.java b/core/java/android/webkit/BrowserFrame.java
index c8b67a8..2cc928f 100644
--- a/core/java/android/webkit/BrowserFrame.java
+++ b/core/java/android/webkit/BrowserFrame.java
@@ -1204,22 +1204,20 @@
      * We delegate the request to CallbackProxy, and route its response to
      * {@link #nativeSslClientCert(int, X509Certificate)}.
      */
-    private void requestClientCert(int handle, byte[] host_and_port_bytes) {
-        String host_and_port = new String(host_and_port_bytes, Charsets.UTF_8);
+    private void requestClientCert(int handle, String hostAndPort) {
         SslClientCertLookupTable table = SslClientCertLookupTable.getInstance();
-        if (table.IsAllowed(host_and_port)) {
+        if (table.IsAllowed(hostAndPort)) {
             // previously allowed
             nativeSslClientCert(handle,
-                                table.PrivateKey(host_and_port),
-                                table.CertificateChain(host_and_port));
-        } else if (table.IsDenied(host_and_port)) {
+                                table.PrivateKey(hostAndPort),
+                                table.CertificateChain(hostAndPort));
+        } else if (table.IsDenied(hostAndPort)) {
             // previously denied
             nativeSslClientCert(handle, null, null);
         } else {
             // previously ignored or new
             mCallbackProxy.onReceivedClientCertRequest(
-                    new ClientCertRequestHandler(this, handle, host_and_port, table),
-                    host_and_port);
+                    new ClientCertRequestHandler(this, handle, hostAndPort, table), hostAndPort);
         }
     }
 
diff --git a/core/java/android/widget/AbsSeekBar.java b/core/java/android/widget/AbsSeekBar.java
index df8eb05..475b8ee 100644
--- a/core/java/android/widget/AbsSeekBar.java
+++ b/core/java/android/widget/AbsSeekBar.java
@@ -335,6 +335,7 @@
                     mTouchDownX = event.getX();
                 } else {
                     setPressed(true);
+                    invalidate(mThumb.getBounds()); // This may be within the padding region
                     onStartTrackingTouch();
                     trackTouchEvent(event);
                     attemptClaimDrag();
@@ -348,6 +349,7 @@
                     final float x = event.getX();
                     if (Math.abs(x - mTouchDownX) > mScaledTouchSlop) {
                         setPressed(true);
+                        invalidate(mThumb.getBounds()); // This may be within the padding region
                         onStartTrackingTouch();
                         trackTouchEvent(event);
                         attemptClaimDrag();
diff --git a/core/java/android/widget/FastScroller.java b/core/java/android/widget/FastScroller.java
index 51506e8..083a952 100644
--- a/core/java/android/widget/FastScroller.java
+++ b/core/java/android/widget/FastScroller.java
@@ -29,12 +29,14 @@
 import android.os.SystemClock;
 import android.view.MotionEvent;
 import android.view.View;
+import android.view.ViewConfiguration;
 import android.widget.AbsListView.OnScrollListener;
 
 /**
  * Helper class for AbsListView to draw and control the Fast Scroll thumb
  */
 class FastScroller {
+    private static final String TAG = "FastScroller";
    
     // Minimum number of pages to justify showing a fast scroll thumb
     private static int MIN_PAGES = 4;
@@ -81,15 +83,15 @@
     private Drawable mOverlayDrawableLeft;
     private Drawable mOverlayDrawableRight;
 
-    private int mThumbH;
-    private int mThumbW;
-    private int mThumbY;
+    int mThumbH;
+    int mThumbW;
+    int mThumbY;
 
     private RectF mOverlayPos;
     private int mOverlaySize;
 
-    private AbsListView mList;
-    private boolean mScrollCompleted;
+    AbsListView mList;
+    boolean mScrollCompleted;
     private int mVisibleItem;
     private Paint mPaint;
     private int mListOffset;
@@ -105,7 +107,7 @@
     
     private Handler mHandler = new Handler();
     
-    private BaseAdapter mListAdapter;
+    BaseAdapter mListAdapter;
     private SectionIndexer mSectionIndexer;
 
     private boolean mChangedBounds;
@@ -118,10 +120,36 @@
 
     private boolean mMatchDragPosition;
 
+    float mInitialTouchY;
+    boolean mPendingDrag;
+    private int mScaledTouchSlop;
+
     private static final int FADE_TIMEOUT = 1500;
+    private static final int PENDING_DRAG_DELAY = 180;
 
     private final Rect mTmpRect = new Rect();
 
+    private final Runnable mDeferStartDrag = new Runnable() {
+        public void run() {
+            if (mList.mIsAttached) {
+                beginDrag();
+
+                final int viewHeight = mList.getHeight();
+                // Jitter
+                int newThumbY = (int) mInitialTouchY - mThumbH + 10;
+                if (newThumbY < 0) {
+                    newThumbY = 0;
+                } else if (newThumbY + mThumbH > viewHeight) {
+                    newThumbY = viewHeight - mThumbH;
+                }
+                mThumbY = newThumbY;
+                scrollTo((float) mThumbY / (viewHeight - mThumbH));
+            }
+
+            mPendingDrag = false;
+        }
+    };
+
     public FastScroller(Context context, AbsListView listView) {
         mList = listView;
         init(context);
@@ -264,6 +292,8 @@
 
         ta.recycle();
 
+        mScaledTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop();
+
         mMatchDragPosition = context.getApplicationInfo().targetSdkVersion >=
                 android.os.Build.VERSION_CODES.HONEYCOMB;
 
@@ -456,7 +486,7 @@
         return mSections;
     }
 
-    private void getSectionsFromIndexer() {
+    void getSectionsFromIndexer() {
         Adapter adapter = mList.getAdapter();
         mSectionIndexer = null;
         if (adapter instanceof HeaderViewListAdapter) {
@@ -489,7 +519,7 @@
         mListAdapter = null;
     }
 
-    private void scrollTo(float position) {
+    void scrollTo(float position) {
         int count = mList.getCount();
         mScrollCompleted = false;
         float fThreshold = (1.0f / count) / 8;
@@ -647,12 +677,45 @@
         cancelFling.recycle();
     }
     
+    void cancelPendingDrag() {
+        mList.removeCallbacks(mDeferStartDrag);
+        mPendingDrag = false;
+    }
+
+    void startPendingDrag() {
+        mPendingDrag = true;
+        mList.postDelayed(mDeferStartDrag, PENDING_DRAG_DELAY);
+    }
+
+    void beginDrag() {
+        setState(STATE_DRAGGING);
+        if (mListAdapter == null && mList != null) {
+            getSectionsFromIndexer();
+        }
+        if (mList != null) {
+            mList.requestDisallowInterceptTouchEvent(true);
+            mList.reportScrollStateChange(OnScrollListener.SCROLL_STATE_TOUCH_SCROLL);
+        }
+
+        cancelFling();
+    }
+
     boolean onInterceptTouchEvent(MotionEvent ev) {
-        if (mState > STATE_NONE && ev.getAction() == MotionEvent.ACTION_DOWN) {
-            if (isPointInside(ev.getX(), ev.getY())) {
-                setState(STATE_DRAGGING);
-                return true;
-            }
+        switch (ev.getActionMasked()) {
+            case MotionEvent.ACTION_DOWN:
+                if (mState > STATE_NONE && isPointInside(ev.getX(), ev.getY())) {
+                    if (!mList.isInScrollingContainer()) {
+                        beginDrag();
+                        return true;
+                    }
+                    mInitialTouchY = ev.getY();
+                    startPendingDrag();
+                }
+                break;
+            case MotionEvent.ACTION_UP:
+            case MotionEvent.ACTION_CANCEL:
+                cancelPendingDrag();
+                break;
         }
         return false;
     }
@@ -666,19 +729,32 @@
 
         if (action == MotionEvent.ACTION_DOWN) {
             if (isPointInside(me.getX(), me.getY())) {
-                setState(STATE_DRAGGING);
-                if (mListAdapter == null && mList != null) {
-                    getSectionsFromIndexer();
+                if (!mList.isInScrollingContainer()) {
+                    beginDrag();
+                    return true;
                 }
-                if (mList != null) {
-                    mList.requestDisallowInterceptTouchEvent(true);
-                    mList.reportScrollStateChange(OnScrollListener.SCROLL_STATE_TOUCH_SCROLL);
-                }
-
-                cancelFling();
-                return true;
+                mInitialTouchY = me.getY();
+                startPendingDrag();
             }
         } else if (action == MotionEvent.ACTION_UP) { // don't add ACTION_CANCEL here
+            if (mPendingDrag) {
+                // Allow a tap to scroll.
+                beginDrag();
+
+                final int viewHeight = mList.getHeight();
+                // Jitter
+                int newThumbY = (int) me.getY() - mThumbH + 10;
+                if (newThumbY < 0) {
+                    newThumbY = 0;
+                } else if (newThumbY + mThumbH > viewHeight) {
+                    newThumbY = viewHeight - mThumbH;
+                }
+                mThumbY = newThumbY;
+                scrollTo((float) mThumbY / (viewHeight - mThumbH));
+
+                cancelPendingDrag();
+                // Will hit the STATE_DRAGGING check below
+            }
             if (mState == STATE_DRAGGING) {
                 if (mList != null) {
                     // ViewGroup does the right thing already, but there might
@@ -698,6 +774,23 @@
                 return true;
             }
         } else if (action == MotionEvent.ACTION_MOVE) {
+            if (mPendingDrag) {
+                final float y = me.getY();
+                if (Math.abs(y - mInitialTouchY) > mScaledTouchSlop) {
+                    setState(STATE_DRAGGING);
+                    if (mListAdapter == null && mList != null) {
+                        getSectionsFromIndexer();
+                    }
+                    if (mList != null) {
+                        mList.requestDisallowInterceptTouchEvent(true);
+                        mList.reportScrollStateChange(OnScrollListener.SCROLL_STATE_TOUCH_SCROLL);
+                    }
+
+                    cancelFling();
+                    cancelPendingDrag();
+                    // Will hit the STATE_DRAGGING check below
+                }
+            }
             if (mState == STATE_DRAGGING) {
                 final int viewHeight = mList.getHeight();
                 // Jitter
@@ -717,6 +810,8 @@
                 }
                 return true;
             }
+        } else if (action == MotionEvent.ACTION_CANCEL) {
+            cancelPendingDrag();
         }
         return false;
     }
diff --git a/core/java/android/widget/SpellChecker.java b/core/java/android/widget/SpellChecker.java
index 510e2d4..1da18aa 100644
--- a/core/java/android/widget/SpellChecker.java
+++ b/core/java/android/widget/SpellChecker.java
@@ -239,6 +239,7 @@
             SuggestionsInfo suggestionsInfo, SpellCheckSpan spellCheckSpan) {
         final int start = editable.getSpanStart(spellCheckSpan);
         final int end = editable.getSpanEnd(spellCheckSpan);
+        if (start < 0 || end < 0) return; // span was removed in the meantime
 
         // Other suggestion spans may exist on that region, with identical suggestions, filter
         // them out to avoid duplicates. First, filter suggestion spans on that exact region.
@@ -249,7 +250,6 @@
             final int spanEnd = editable.getSpanEnd(suggestionSpans[i]);
             if (spanStart != start || spanEnd != end) {
                 suggestionSpans[i] = null;
-                break;
             }
         }
 
diff --git a/core/java/android/widget/TextView.java b/core/java/android/widget/TextView.java
index 41daf70..324198f 100644
--- a/core/java/android/widget/TextView.java
+++ b/core/java/android/widget/TextView.java
@@ -2476,6 +2476,7 @@
 
         if (gravity != mGravity) {
             invalidate();
+            mLayoutAlignment = null;
         }
 
         mGravity = gravity;
@@ -9607,15 +9608,6 @@
             SpannableStringBuilder text = new SpannableStringBuilder();
             TextAppearanceSpan highlightSpan = new TextAppearanceSpan(mContext,
                     android.R.style.TextAppearance_SuggestionHighlight);
-
-            void removeMisspelledFlag() {
-                int suggestionSpanFlags = suggestionSpan.getFlags();
-                if ((suggestionSpanFlags & SuggestionSpan.FLAG_MISSPELLED) > 0) {
-                    suggestionSpanFlags &= ~SuggestionSpan.FLAG_MISSPELLED;
-                    suggestionSpanFlags &= ~SuggestionSpan.FLAG_EASY_CORRECT;
-                    suggestionSpan.setFlags(suggestionSpanFlags);
-                }
-            }
         }
 
         private class SuggestionAdapter extends BaseAdapter {
@@ -9931,6 +9923,14 @@
                     suggestionSpansStarts[i] = editable.getSpanStart(suggestionSpan);
                     suggestionSpansEnds[i] = editable.getSpanEnd(suggestionSpan);
                     suggestionSpansFlags[i] = editable.getSpanFlags(suggestionSpan);
+
+                    // Remove potential misspelled flags
+                    int suggestionSpanFlags = suggestionSpan.getFlags();
+                    if ((suggestionSpanFlags & SuggestionSpan.FLAG_MISSPELLED) > 0) {
+                        suggestionSpanFlags &= ~SuggestionSpan.FLAG_MISSPELLED;
+                        suggestionSpanFlags &= ~SuggestionSpan.FLAG_EASY_CORRECT;
+                        suggestionSpan.setFlags(suggestionSpanFlags);
+                    }
                 }
 
                 final int suggestionStart = suggestionInfo.suggestionStart;
@@ -9939,8 +9939,6 @@
                         suggestionStart, suggestionEnd).toString();
                 editable.replace(spanStart, spanEnd, suggestion);
 
-                suggestionInfo.removeMisspelledFlag();
-
                 // Notify source IME of the suggestion pick. Do this before swaping texts.
                 if (!TextUtils.isEmpty(
                         suggestionInfo.suggestionSpan.getNotificationTargetClassName())) {
diff --git a/core/java/com/android/internal/app/PlatLogoActivity.java b/core/java/com/android/internal/app/PlatLogoActivity.java
index 9fbbb3d..a0e125a 100644
--- a/core/java/com/android/internal/app/PlatLogoActivity.java
+++ b/core/java/com/android/internal/app/PlatLogoActivity.java
@@ -17,32 +17,79 @@
 package com.android.internal.app;
 
 import android.app.Activity;
+import android.content.ActivityNotFoundException;
+import android.content.Intent;
 import android.os.Bundle;
+import android.os.Handler;
+import android.os.Vibrator;
 import android.view.MotionEvent;
+import android.view.View;
+import android.view.ViewConfiguration;
 import android.widget.ImageView;
 import android.widget.Toast;
 
 public class PlatLogoActivity extends Activity {
     Toast mToast;
+    ImageView mContent;
+    Vibrator mZzz = new Vibrator();
+    int mCount;
+    final Handler mHandler = new Handler();
+
+    Runnable mSuperLongPress = new Runnable() {
+        public void run() {
+            mCount++;
+            mZzz.vibrate(50 * mCount);
+            final float scale = 1f + 0.25f * mCount * mCount;
+            mContent.setScaleX(scale);
+            mContent.setScaleY(scale);
+
+            if (mCount <= 3) {
+                mHandler.postDelayed(mSuperLongPress, ViewConfiguration.getLongPressTimeout());
+            } else {
+                try {
+                    startActivity(new Intent(Intent.ACTION_MAIN)
+                        .setFlags(Intent.FLAG_ACTIVITY_NEW_TASK
+                            | Intent.FLAG_ACTIVITY_CLEAR_TASK
+                            | Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS)
+                        .setClassName("com.android.systemui","com.android.systemui.Nyandroid"));
+                } catch (ActivityNotFoundException ex) {
+                    android.util.Log.e("PlatLogoActivity", "Couldn't find platlogo screensaver.");
+                }
+                finish();
+            }
+        }
+    };
 
     @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
         
-        mToast = Toast.makeText(this, "REZZZZZZZ...", Toast.LENGTH_SHORT);
+        mToast = Toast.makeText(this, "Android 4.0: Ice Cream Sandwich", Toast.LENGTH_SHORT);
 
-        ImageView content = new ImageView(this);
-        content.setImageResource(com.android.internal.R.drawable.platlogo);
-        content.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
+        mContent = new ImageView(this);
+        mContent.setImageResource(com.android.internal.R.drawable.platlogo);
+        mContent.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
+
+        mContent.setOnTouchListener(new View.OnTouchListener() {
+            @Override
+            public boolean onTouch(View v, MotionEvent event) {
+                final int action = event.getAction();
+                if (action == MotionEvent.ACTION_DOWN) {
+                    mContent.setPressed(true);
+                    mHandler.removeCallbacks(mSuperLongPress);
+                    mCount = 0;
+                    mHandler.postDelayed(mSuperLongPress, 2*ViewConfiguration.getLongPressTimeout());
+                } else if (action == MotionEvent.ACTION_UP) {
+                    if (mContent.isPressed()) {
+                        mContent.setPressed(false);
+                        mHandler.removeCallbacks(mSuperLongPress);
+                        mToast.show();
+                    }
+                }
+                return true;
+            }
+        });
         
-        setContentView(content);
-    }
-
-    @Override
-    public boolean dispatchTouchEvent(MotionEvent ev) {
-        if (ev.getAction() == MotionEvent.ACTION_UP) {
-            mToast.show();
-        }
-        return super.dispatchTouchEvent(ev);
+        setContentView(mContent);
     }
 }
diff --git a/core/java/com/android/internal/view/menu/IconMenuPresenter.java b/core/java/com/android/internal/view/menu/IconMenuPresenter.java
index 3b1decd..2439b5d 100644
--- a/core/java/com/android/internal/view/menu/IconMenuPresenter.java
+++ b/core/java/com/android/internal/view/menu/IconMenuPresenter.java
@@ -22,7 +22,6 @@
 import android.os.Parcelable;
 import android.util.SparseArray;
 import android.view.ContextThemeWrapper;
-import android.view.LayoutInflater;
 import android.view.MenuItem;
 import android.view.View;
 import android.view.ViewGroup;
@@ -44,15 +43,14 @@
     private static final String OPEN_SUBMENU_KEY = "android:menu:icon:submenu";
 
     public IconMenuPresenter(Context context) {
-        super(context, com.android.internal.R.layout.icon_menu_layout,
+        super(new ContextThemeWrapper(context, com.android.internal.R.style.Theme_IconMenu),
+                com.android.internal.R.layout.icon_menu_layout,
                 com.android.internal.R.layout.icon_menu_item_layout);
     }
 
     @Override
     public void initForMenu(Context context, MenuBuilder menu) {
-        mContext = new ContextThemeWrapper(context, com.android.internal.R.style.Theme_IconMenu);
-        mInflater = LayoutInflater.from(mContext);
-        mMenu = menu;
+        super.initForMenu(context, menu);
         mMaxItems = -1;
     }
 
diff --git a/core/res/res/drawable-nodpi/platlogo.png b/core/res/res/drawable-nodpi/platlogo.png
index e619ed5..8aa3b9e 100644
--- a/core/res/res/drawable-nodpi/platlogo.png
+++ b/core/res/res/drawable-nodpi/platlogo.png
Binary files differ
diff --git a/docs/html/guide/appendix/media-formats.jd b/docs/html/guide/appendix/media-formats.jd
index ccc63a2..137f138 100644
--- a/docs/html/guide/appendix/media-formats.jd
+++ b/docs/html/guide/appendix/media-formats.jd
@@ -14,7 +14,7 @@
 
 <h2>See also</h2>
 <ol>
-<li><a href="{@docRoot}guide/topics/media/index.html">Audio and Video</a></li>
+<li><a href="{@docRoot}guide/topics/media/index.html">Multimedia and Camera</a></li>
 </ol>
 
 <h2>Key classes</h2>
diff --git a/docs/html/guide/guide_toc.cs b/docs/html/guide/guide_toc.cs
index 18d9a48..f3540e2 100644
--- a/docs/html/guide/guide_toc.cs
+++ b/docs/html/guide/guide_toc.cs
@@ -233,23 +233,32 @@
     </ul>
 
     <ul>
-      <li class="toggle-list">
+    <li class="toggle-list">
         <div><a href="<?cs var:toroot ?>guide/topics/graphics/index.html">
             <span class="en">Graphics</span>
-          </a></div>
+          </a><span class="new-child">new!</span></div>
         <ul>
           <li><a href="<?cs var:toroot ?>guide/topics/graphics/2d-graphics.html">
-                <span class="en">2D Graphics</span>
-              </a></li>
+              <span class="en">Canvas and Drawables</span></a></li>
+          <li><a href="<?cs var:toroot ?>guide/topics/graphics/hardware-accel.html">
+              <span class="en">Hardware Acceleration</span></a>
+            <span class="new">new!</span></li>
           <li><a href="<?cs var:toroot ?>guide/topics/graphics/opengl.html">
-                <span class="en">3D with OpenGL</span>
-              </a></li>
-          <li><a href="<?cs var:toroot ?>guide/topics/graphics/animation.html">
-                <span class="en">Property Animation</span>
-              </a></li>
+              <span class="en">OpenGL</span>
+            </a><span class="new">updated</span></li>
+        </ul>
+      </li>
+      <li class="toggle-list">
+        <div><a href="<?cs var:toroot ?>guide/topics/graphics/animation.html">
+            <span class="en">Animation</span>
+          </a></div>
+        <ul>
+          <li><a href="<?cs var:toroot ?>guide/topics/graphics/prop-animation.html">
+              <span class="en">Property Animation</span></a></li>
           <li><a href="<?cs var:toroot ?>guide/topics/graphics/view-animation.html">
-                <span class="en">View Animation</span>
-              </a></li>
+              <span class="en">View Animation</span></a></li>
+          <li><a href="<?cs var:toroot ?>guide/topics/graphics/drawable-animation.html">
+              <span class="en">Drawable Animation</span></a></li>
         </ul>
       </li>
       <li class="toggle-list">
@@ -258,7 +267,7 @@
 	          </a></div>
 	        <ul>
 	          <li><a href="<?cs var:toroot ?>guide/topics/renderscript/graphics.html">
-	                <span class="en">3D Graphics</span>
+	                <span class="en">Graphics</span>
 	              </a>
 	          </li>
 	          <li><a href="<?cs var:toroot ?>guide/topics/renderscript/compute.html">
@@ -268,9 +277,26 @@
 	        </ul>
   	  </li>
 
-      <li><a href="<?cs var:toroot ?>guide/topics/media/index.html">
-            <span class="en">Media</span>
-          </a></li>
+      <li class="toggle-list">
+          <div><a href="<?cs var:toroot ?>guide/topics/media/index.html">
+            <span class="en">Multimedia and Camera</span>
+          </a><span class="new">updated</span></div>
+          <ul>
+            <li><a href="<?cs var:toroot ?>guide/topics/media/mediaplayer.html">
+                  <span class="en">Media Playback</span></a>
+                </li>
+            <li><a href="<?cs var:toroot ?>guide/topics/media/jetplayer.html">
+                  <span class="en">JetPlayer</span></a>
+                </li>
+            <li><a href="<?cs var:toroot ?>guide/topics/media/camera.html">
+                  <span class="en">Camera</span></a>
+                  <span class="new">new!</span>
+                </li>
+            <li><a href="<?cs var:toroot ?>guide/topics/media/audio-capture.html">
+                  <span class="en">Audio Capture</span></a>
+                </li>
+          </ul>
+      </li>
       <li>
         <a href="<?cs var:toroot ?>guide/topics/clipboard/copy-paste.html">
             <span class="en">Copy and Paste</span>
diff --git a/docs/html/guide/topics/graphics/2d-graphics.jd b/docs/html/guide/topics/graphics/2d-graphics.jd
index 618cdf8..ac2b47c 100644
--- a/docs/html/guide/topics/graphics/2d-graphics.jd
+++ b/docs/html/guide/topics/graphics/2d-graphics.jd
@@ -1,296 +1,484 @@
-page.title=2D Graphics
+page.title=Canvas and Drawables
 parent.title=Graphics
 parent.link=index.html
 @jd:body
 
-
 <div id="qv-wrapper">
   <div id="qv">
-    <h2>In this document</h2>
+  <h2>In this document</h2>
+  <ol>
+    <li><a href="#draw-with-canvas">Draw with a Canvas</a>
     <ol>
-      <li><a href="#drawables">Drawables</a>
+      <li><a href="#on-view">On a View</a></li>
+      <li><a href="#on-surfaceview">On a SurfaceView</a></li>
+    </ol>
+    </li>
+    <li><a href="#drawables">Drawables</a>
         <ol>
           <li><a href="#drawables-from-images">Creating from resource images</a></li>
           <li><a href="#drawables-from-xml">Creating from resource XML</a></li>
         </ol>
-      </li>
-      <li><a href="#shape-drawable">Shape Drawable</a></li>
-   <!--   <li><a href="#state-list">StateListDrawable</a></li> -->
-      <li><a href="#nine-patch">Nine-patch</a></li>
-    </ol>
+    </li>
+    <li><a href="#shape-drawable">Shape Drawable</a></li>
+    <li><a href="#nine-patch">Nine-patch</a></li>
+  </ol>
+
+  <h2>See also</h2>
+  <ol>
+    <li><a href="{@docRoot}guide/topics/graphics/opengl.html">OpenGL with the Framework
+APIs</a></li>
+    <li><a href="{@docRoot}guide/topics/renderscript/index.html">RenderScript</a></li>
+  </ol>
   </div>
 </div>
 
-<p>Android offers a custom 2D graphics library for drawing and animating shapes and images.
-The {@link android.graphics.drawable} and {@link android.view.animation}
-packages are where you'll find the common classes used for drawing and animating in two-dimensions.
+<p>The Android framework APIs provides a set 2D drawing APIs that allow you to render your own
+custom graphics onto a canvas or to modify existing Views to customize their look and feel.
+When drawing 2D graphics, you'll typically do so in one of two ways:</p>
+
+<ol type="a">
+  <li>Draw your graphics or animations into a View object from your layout. In this manner,
+  the drawing of your graphics is handled by the system's
+  normal View hierarchy drawing process &mdash; you simply define the graphics to go inside the View.</li>
+  <li>Draw your graphics directly to a Canvas. This way, you personally call the appropriate class's
+  {@link android.view.View#onDraw onDraw()} method (passing it your Canvas), or one of the Canvas
+<code>draw...()</code> methods (like
+  <code>{@link android.graphics.Canvas#drawPicture(Picture,Rect) drawPicture()}</code>). In doing so, you are also in
+  control of any animation.</li>
+</ol>
+
+<p>Option "a," drawing to a View, is your best choice when you want to draw simple graphics that do not
+need to change dynamically and are not part of a performance-intensive game. For example, you should
+draw your graphics into a View when you want to display a static graphic or predefined animation, within
+an otherwise static application. Read <a href="#drawables">Drawables</a> for more information.</li>
 </p>
 
-<p>This document offers an introduction to drawing graphics in your Android application.
-We'll discuss the basics of using Drawable objects to draw
-graphics, how to use a couple subclasses of the Drawable class, and how to
-create animations that either tween (move, stretch, rotate) a single graphic
-or animate a series of graphics (like a roll of film).</p>
+<p>Option "b," drawing to a Canvas, is better when your application needs to regularly re-draw itself.
+Applications such as video games should be drawing to the Canvas on its own. However, there's more than
+one way to do this:</p>
 
+<ul>
+  <li>In the same thread as your UI Activity, wherein you create a custom View component in
+  your layout, call <code>{@link android.view.View#invalidate()}</code> and then handle the
+  <code>{@link android.view.View#onDraw(Canvas) onDraw()}</code> callback.</li>
+  <li>Or, in a separate thread, wherein you manage a {@link android.view.SurfaceView} and
+  perform draws to the Canvas as fast as your thread is capable
+  (you do not need to request <code>invalidate()</code>).</li>
+</ul>
+
+<h2 id="draw-with-canvas">Draw with a Canvas</h2>
+
+<p>When you're writing an application in which you would like to perform specialized drawing
+and/or control the animation of graphics,
+you should do so by drawing through a {@link android.graphics.Canvas}. A Canvas works for you as
+a pretense, or interface, to the actual surface upon which your graphics will be drawn &mdash; it
+holds all of your "draw" calls. Via the Canvas, your drawing is actually performed upon an
+underlying {@link android.graphics.Bitmap}, which is placed into the window.</p>
+
+<p>In the event that you're drawing within the <code>{@link android.view.View#onDraw(Canvas) onDraw()}</code>
+callback method, the Canvas is provided for you and you need only place your drawing calls upon it.
+You can also acquire a Canvas from <code>{@link android.view.SurfaceHolder#lockCanvas() SurfaceHolder.lockCanvas()}</code>,
+when dealing with a SurfaceView object. (Both of these scenarios are discussed in the following sections.)
+However, if you need to create a new Canvas, then you must define the {@link android.graphics.Bitmap}
+upon which drawing will actually be performed. The Bitmap is always required for a Canvas. You can set up
+a new Canvas like this:</p>
+<pre>
+Bitmap b = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);
+Canvas c = new Canvas(b);
+</pre>
+
+<p>Now your Canvas will draw onto the defined Bitmap. After drawing upon it with the Canvas, you can then carry your
+Bitmap to another Canvas with one of the <code>{@link android.graphics.Canvas#drawBitmap(Bitmap,Matrix,Paint)
+Canvas.drawBitmap(Bitmap,...)}</code> methods. It's recommended that you ultimately draw your final
+graphics through a Canvas offered to you
+by <code>{@link android.view.View#onDraw(Canvas) View.onDraw()}</code> or
+<code>{@link android.view.SurfaceHolder#lockCanvas() SurfaceHolder.lockCanvas()}</code> (see the following sections).</p>
+
+<p>The {@link android.graphics.Canvas} class has its own set of drawing methods that you can use,
+like <code>drawBitmap(...)</code>, <code>drawRect(...)</code>, <code>drawText(...)</code>, and many more.
+Other classes that you might use also have <code>draw()</code> methods. For example, you'll probably
+have some {@link android.graphics.drawable.Drawable} objects that you want to put on the Canvas. Drawable
+has its own <code>{@link android.graphics.drawable.Drawable#draw(Canvas) draw()}</code> method
+that takes your Canvas as an argument.</p>
+
+
+<h3 id="on-view">On a View</h3>
+
+<p>If your application does not require a significant amount of processing or
+frame-rate speed (perhaps for a chess game, a snake game,
+or another slowly-animated application), then you should consider creating a custom View component
+and drawing with a Canvas in <code>{@link android.view.View#onDraw(Canvas) View.onDraw()}</code>.
+The most convenient aspect of doing so is that the Android framework will
+provide you with a pre-defined Canvas to which you will place your drawing calls.</p>
+
+<p>To start, extend the {@link android.view.View} class (or descendant thereof) and define
+the <code>{@link android.view.View#onDraw(Canvas) onDraw()}</code> callback method. This method will be called by the Android
+framework to request that your View draw itself. This is where you will perform all your calls
+to draw through the {@link android.graphics.Canvas}, which is passed to you through the <code>onDraw()</code> callback.</p>
+
+<p>The Android framework will only call <code>onDraw()</code> as necessary. Each time that
+your application is prepared to be drawn, you must request your View be invalidated by calling
+<code>{@link android.view.View#invalidate()}</code>. This indicates that you'd like your View to be drawn and
+Android will then call your <code>onDraw()</code> method (though is not guaranteed that the callback will
+be instantaneous). </p>
+
+<p>Inside your View component's <code>onDraw()</code>, use the Canvas given to you for all your drawing,
+using various <code>Canvas.draw...()</code> methods, or other class <code>draw()</code> methods that
+take your Canvas as an argument. Once your <code>onDraw()</code> is complete, the Android framework will
+use your Canvas to draw a Bitmap handled by the system.</p>
+
+<p class="note"><strong>Note: </strong> In order to request an invalidate from a thread other than your main
+Activity's thread, you must call <code>{@link android.view.View#postInvalidate()}</code>.</p>
+
+<p>Also read <a href="{@docRoot}guide/topics/ui/custom-components.html">Building Custom Components</a>
+for a guide to extending a View class, and <a href="2d-graphics.html">2D Graphics: Drawables</a> for
+information on using Drawable objects like images from your resources and other primitive shapes.</p>
+
+<p>For a sample application, see the Snake game, in the SDK samples folder:
+<code>&lt;your-sdk-directory>/samples/Snake/</code>.</p>
+
+<h3 id="on-surfaceview">On a SurfaceView</h3>
+
+<p>The {@link android.view.SurfaceView} is a special subclass of View that offers a dedicated
+drawing surface within the View hierarchy. The aim is to offer this drawing surface to
+an application's secondary thread, so that the application isn't required
+to wait until the system's View hierarchy is ready to draw. Instead, a secondary thread
+that has reference to a SurfaceView can draw to its own Canvas at its own pace.</p>
+
+<p>To begin, you need to create a new class that extends {@link android.view.SurfaceView}. The class should also
+implement {@link android.view.SurfaceHolder.Callback}. This subclass is an interface that will notify you
+with information about the underlying {@link android.view.Surface}, such as when it is created, changed, or destroyed.
+These events  are important so that you know when you can start drawing, whether you need
+to make adjustments based on new surface properties, and when to stop drawing and potentially
+kill some tasks. Inside your SurfaceView class is also a good place to define your secondary Thread class, which will
+perform all the drawing procedures to your Canvas.</p>
+
+<p>Instead of handling the Surface object directly, you should handle it via
+a {@link android.view.SurfaceHolder}. So, when your SurfaceView is initialized, get the SurfaceHolder by calling
+<code>{@link android.view.SurfaceView#getHolder()}</code>. You should then notify the SurfaceHolder that you'd
+like to receive SurfaceHolder callbacks (from {@link android.view.SurfaceHolder.Callback}) by calling
+{@link android.view.SurfaceHolder#addCallback(SurfaceHolder.Callback) addCallback()}
+(pass it <var>this</var>). Then override each of the
+{@link android.view.SurfaceHolder.Callback} methods inside your SurfaceView class.</p>
+
+<p>In order to draw to the Surface Canvas from within your second thread, you must pass the thread your SurfaceHandler
+and retrieve the Canvas with <code>{@link android.view.SurfaceHolder#lockCanvas() lockCanvas()}</code>.
+You can now take the Canvas given to you by the SurfaceHolder and do your necessary drawing upon it.
+Once you're done drawing with the Canvas, call
+<code>{@link android.view.SurfaceHolder#unlockCanvasAndPost(Canvas) unlockCanvasAndPost()}</code>, passing it
+your Canvas object. The Surface will now draw the Canvas as you left it. Perform this sequence of locking and
+unlocking the canvas each time you want to redraw.</p>
+
+<p class="note"><strong>Note:</strong> On each pass you retrieve the Canvas from the SurfaceHolder,
+the previous state of the Canvas will be retained. In order to properly animate your graphics, you must re-paint the
+entire surface. For example, you can clear the previous state of the Canvas by filling in a color
+with <code>{@link android.graphics.Canvas#drawColor(int) drawColor()}</code> or setting a background image
+with <code>{@link android.graphics.Canvas#drawBitmap(Bitmap,Rect,RectF,Paint) drawBitmap()}</code>. Otherwise,
+you will see traces of the drawings you previously performed.</p>
+
+
+<p>For a sample application, see the Lunar Lander game, in the SDK samples folder:
+<code>&lt;your-sdk-directory>/samples/LunarLander/</code>. Or,
+browse the source in the <a href="{@docRoot}guide/samples/index.html">Sample Code</a> section.</p>
 
 <h2 id="drawables">Drawables</h2>
+<p>Android offers a custom 2D graphics library for drawing shapes and images.
+  The {@link android.graphics.drawable} package is where you'll find the common classes used for
+  drawing in two-dimensions.</p>
 
-<p>A {@link android.graphics.drawable.Drawable} is a general abstraction for "something that can be drawn."
-You'll discover that the Drawable class extends to define a variety of specific kinds of drawable graphics,
-including {@link android.graphics.drawable.BitmapDrawable}, {@link android.graphics.drawable.ShapeDrawable},
-{@link android.graphics.drawable.PictureDrawable}, {@link android.graphics.drawable.LayerDrawable}, and several more.
-Of course, you can also extend these to define your own custom Drawable objects that behave in unique ways.</p>
+<p>This document discusses the basics of using Drawable objects to draw graphics and how to use a
+couple subclasses of the Drawable class. For information on using Drawables to do frame-by-frame
+animation, see <a href="{@docRoot}guide/topics/animation/frame-animation.html">Frame-by-Frame
+Animation</a>.</p>
 
-<p>There are three ways to define and instantiate a Drawable: using an image saved in your project resources;
-using an XML file that defines the Drawable properties; or using the normal class constructors. Below, we'll discuss
-each the first two techniques (using constructors is nothing new for an experienced developer).</p>
+<p>A {@link android.graphics.drawable.Drawable} is a general abstraction for "something that can be
+  drawn."  You'll discover that the Drawable class extends to define a variety of specific kinds of
+drawable graphics, including {@link android.graphics.drawable.BitmapDrawable}, {@link
+  android.graphics.drawable.ShapeDrawable}, {@link android.graphics.drawable.PictureDrawable},
+{@link android.graphics.drawable.LayerDrawable}, and several more.  Of course, you can also extend
+these to define your own custom Drawable objects that behave in unique ways.</p>
+
+<p>There are three ways to define and instantiate a Drawable: using an image saved in your project
+  resources; using an XML file that defines the Drawable properties; or using the normal class
+constructors. Below, we'll discuss each the first two techniques (using constructors is nothing new
+for an experienced developer).</p>
 
 
 <h3 id="drawables-from-images">Creating from resource images</h3>
 
-<p>A simple way to add graphics to your application is by referencing an image file from your project resources. 
-Supported file types are PNG (preferred), JPG (acceptable) and GIF (discouraged). This technique would 
-obviously be preferred for application icons, logos, or other graphics such as those used in a game.</p>
+<p>A simple way to add graphics to your application is by referencing an image file from your
+  project resources. Supported file types are PNG (preferred), JPG (acceptable) and GIF
+(discouraged). This technique would obviously be preferred for application icons, logos, or other
+graphics such as those used in a game.</p>
 
-<p>To use an image resource, just add your file to the <code>res/drawable/</code> directory of your project.
-From there, you can reference it from your code or your XML layout. 
-Either way, it is referred using a resource ID, which is the file name without the file type
-extension (E.g., <code>my_image.png</code> is referenced as <var>my_image</var>).</p>
+<p>To use an image resource, just add your file to the <code>res/drawable/</code> directory of your
+  project. From there, you can reference it from your code or your XML layout.
+  Either way, it is referred using a resource ID, which is the file name without the file type
+  extension (E.g., <code>my_image.png</code> is referenced as <var>my_image</var>).</p>
 
-<p class="note"><strong>Note:</strong> Image resources placed in <code>res/drawable/</code> may be 
-automatically optimized with lossless image compression by the 
-<code>aapt</code> tool during the build process. For example, a true-color PNG that does
-not require more than 256 colors may be converted to an 8-bit PNG with a color palette. This 
-will result in an image of equal quality but which requires less memory. So be aware that the
-image binaries placed in this directory can change during the build. If you plan on reading
-an image as a bit stream in order to convert it to a bitmap, put your images in the <code>res/raw/</code>
-folder instead, where they will not be optimized.</p>
+<p class="note"><strong>Note:</strong> Image resources placed in <code>res/drawable/</code> may be
+  automatically optimized with lossless image compression by the
+  <code>aapt</code> tool during the build process. For example, a true-color PNG that does
+  not require more than 256 colors may be converted to an 8-bit PNG with a color palette. This
+  will result in an image of equal quality but which requires less memory. So be aware that the
+  image binaries placed in this directory can change during the build. If you plan on reading
+  an image as a bit stream in order to convert it to a bitmap, put your images in the
+  <code>res/raw/</code> folder instead, where they will not be optimized.</p>
 
 <h4>Example code</h4>
-<p>The following code snippet demonstrates how to build an {@link android.widget.ImageView} that uses an image
-from drawable resources and add it to the layout.</p>
+<p>The following code snippet demonstrates how to build an {@link android.widget.ImageView} that
+  uses an image from drawable resources and add it to the layout.</p>
 <pre>
-LinearLayout mLinearLayout;
+  LinearLayout mLinearLayout;
 
-protected void onCreate(Bundle savedInstanceState) {
-    super.onCreate(savedInstanceState);
+  protected void onCreate(Bundle savedInstanceState) {
+  super.onCreate(savedInstanceState);
 
-    // Create a LinearLayout in which to add the ImageView
-    mLinearLayout = new LinearLayout(this);
+  // Create a LinearLayout in which to add the ImageView
+  mLinearLayout = new LinearLayout(this);
 
-    // Instantiate an ImageView and define its properties
-    ImageView i = new ImageView(this);
-    i.setImageResource(R.drawable.my_image);
-    i.setAdjustViewBounds(true); // set the ImageView bounds to match the Drawable's dimensions
-    i.setLayoutParams(new Gallery.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
+  // Instantiate an ImageView and define its properties
+  ImageView i = new ImageView(this);
+  i.setImageResource(R.drawable.my_image);
+  i.setAdjustViewBounds(true); // set the ImageView bounds to match the Drawable's dimensions
+  i.setLayoutParams(new Gallery.LayoutParams(LayoutParams.WRAP_CONTENT,
+  LayoutParams.WRAP_CONTENT));
 
-    // Add the ImageView to the layout and set the layout as the content view
-    mLinearLayout.addView(i);
-    setContentView(mLinearLayout);
-}
+  // Add the ImageView to the layout and set the layout as the content view
+  mLinearLayout.addView(i);
+  setContentView(mLinearLayout);
+  }
 </pre>
-<p>In other cases, you may want to handle your image resource as a 
-{@link android.graphics.drawable.Drawable} object.
-To do so, create a Drawable from the resource like so:
-<pre>
-Resources res = mContext.getResources();
-Drawable myImage = res.getDrawable(R.drawable.my_image);
-</pre>
+<p>In other cases, you may want to handle your image resource as a
+  {@link android.graphics.drawable.Drawable} object.
+  To do so, create a Drawable from the resource like so:
+  <pre>
+    Resources res = mContext.getResources();
+    Drawable myImage = res.getDrawable(R.drawable.my_image);
+  </pre>
 
-<p class="warning"><strong>Note:</strong> Each unique resource in your project can maintain only one
-state, no matter how many different objects you may instantiate for it. For example, if you instantiate two
-Drawable objects from the same image resource, then change a property (such as the alpha) for one of the 
-Drawables, then it will also affect the other. So when dealing with multiple instances of an image resource, 
-instead of directly transforming the Drawable, you should perform a <a href="#tween-animation">tween animation</a>.</p>
+  <p class="warning"><strong>Note:</strong> Each unique resource in your project can maintain only
+one state, no matter how many different objects you may instantiate for it. For example, if you
+    instantiate two Drawable objects from the same image resource, then change a property (such
+as the alpha) for one of the Drawables, then it will also affect the other. So when dealing with
+multiple instances of an image resource, instead of directly transforming the Drawable, you
+should perform a <a href="{@docRoot}guide/topics/graphics/view-animation.html#tween-animation">tween
+animation</a>.</p>
 
 
-<h4>Example XML</h4>
-<p>The XML snippet below shows how to add a resource Drawable to an 
-{@link android.widget.ImageView} in the XML layout (with some red tint just for fun).
-<pre>
-&lt;ImageView   
-  android:layout_width="wrap_content"
-  android:layout_height="wrap_content"
-  android:tint="#55ff0000"
-  android:src="@drawable/my_image"/>
-</pre>
-<p>For more information on using project resources, read about
-  <a href="{@docRoot}guide/topics/resources/index.html">Resources and Assets</a>.</p>
+  <h4>Example XML</h4>
+  <p>The XML snippet below shows how to add a resource Drawable to an
+    {@link android.widget.ImageView} in the XML layout (with some red tint just for fun).
+    <pre>
+      &lt;ImageView
+      android:layout_width="wrap_content"
+      android:layout_height="wrap_content"
+      android:tint="#55ff0000"
+      android:src="@drawable/my_image"/>
+  </pre>
+  <p>For more information on using project resources, read about
+    <a href="{@docRoot}guide/topics/resources/index.html">Resources and Assets</a>.</p>
 
 
-<h3 id="drawables-from-xml">Creating from resource XML</h3>
+  <h3 id="drawables-from-xml">Creating from resource XML</h3>
 
-<p>By now, you should be familiar with Android's principles of developing a
-<a href="{@docRoot}guide/topics/ui/index.html">User Interface</a>. Hence, you understand the power
-and flexibility inherent in defining objects in XML. This philosophy caries over from Views to Drawables.
-If there is a Drawable object that you'd like to create, which is not initially dependent on variables defined by
-your application code or user interaction, then defining the Drawable in XML is a good option.
-Even if you expect your Drawable to change its properties during the user's experience with your application, 
-you should consider defining the object in XML, as you can always modify properties once it is instantiated.</p>
+  <p>By now, you should be familiar with Android's principles of developing a
+    <a href="{@docRoot}guide/topics/ui/index.html">User Interface</a>. Hence, you understand the
+power and flexibility inherent in defining objects in XML. This philosophy caries over from Views
+to Drawables.  If there is a Drawable object that you'd like to create, which is not initially
+dependent on variables defined by your application code or user interaction, then defining the
+Drawable in XML is a good option. Even if you expect your Drawable to change its properties
+during the user's experience with your application, you should consider defining the object in
+XML, as you can always modify properties once it is instantiated.</p>
 
-<p>Once you've defined your Drawable in XML, save the file in the <code>res/drawable/</code> directory of
-your project. Then, retrieve and instantiate the object by calling
-{@link android.content.res.Resources#getDrawable(int) Resources.getDrawable()}, passing it the resource ID 
-of your XML file. (See the <a href="#drawable-xml-example">example below</a>.)</p>
+  <p>Once you've defined your Drawable in XML, save the file in the <code>res/drawable/</code>
+    directory of your project. Then, retrieve and instantiate the object by calling
+    {@link android.content.res.Resources#getDrawable(int) Resources.getDrawable()}, passing it the
+    resource ID of your XML file. (See the <a href="#drawable-xml-example">example
+below</a>.)</p>
 
-<p>Any Drawable subclass that supports the <code>inflate()</code> method can be defined in 
-XML and instantiated by your application. 
-Each Drawable that supports XML inflation utilizes specific XML attributes that help define the object
-properties (see the class reference to see what these are). See the class documentation for each
-Drawable subclass for information on how to define it in XML.
+  <p>Any Drawable subclass that supports the <code>inflate()</code> method can be defined in
+    XML and instantiated by your application. Each Drawable that supports XML inflation utilizes
+specific XML attributes that help define the object
+    properties (see the class reference to see what these are). See the class documentation for each
+    Drawable subclass for information on how to define it in XML.
 
-<h4 id="drawable-xml-example">Example</h4>
-<p>Here's some XML that defines a TransitionDrawable:</p>
-<pre>
-&lt;transition xmlns:android="http://schemas.android.com/apk/res/android">
-  &lt;item android:drawable="&#64;drawable/image_expand">
-  &lt;item android:drawable="&#64;drawable/image_collapse">
-&lt;/transition>
-</pre>
+    <h4 id="drawable-xml-example">Example</h4>
+    <p>Here's some XML that defines a TransitionDrawable:</p>
+    <pre>
+      &lt;transition xmlns:android="http://schemas.android.com/apk/res/android">
+      &lt;item android:drawable="&#64;drawable/image_expand">
+      &lt;item android:drawable="&#64;drawable/image_collapse">
+      &lt;/transition>
+    </pre>
 
-<p>With this XML saved in the file <code>res/drawable/expand_collapse.xml</code>, 
-the following code will instantiate the TransitionDrawable and set it as the content of an ImageView:</p>
-<pre>
-Resources res = mContext.getResources();
-TransitionDrawable transition = (TransitionDrawable) res.getDrawable(R.drawable.expand_collapse);
-ImageView image = (ImageView) findViewById(R.id.toggle_image);
-image.setImageDrawable(transition);
-</pre>
-<p>Then this transition can be run forward (for 1 second) with:</p>
-<pre>transition.startTransition(1000);</pre>
+    <p>With this XML saved in the file <code>res/drawable/expand_collapse.xml</code>,
+      the following code will instantiate the TransitionDrawable and set it as the content of an
+      ImageView:</p>
+    <pre>
+      Resources res = mContext.getResources();
+      TransitionDrawable transition = (TransitionDrawable)
+res.getDrawable(R.drawable.expand_collapse);
+      ImageView image = (ImageView) findViewById(R.id.toggle_image);
+      image.setImageDrawable(transition);
+    </pre>
+    <p>Then this transition can be run forward (for 1 second) with:</p>
+    <pre>transition.startTransition(1000);</pre>
 
-<p>Refer to the Drawable classes listed above for more information on the XML attributes supported by each.</p>
+    <p>Refer to the Drawable classes listed above for more information on the XML attributes
+supported by each.</p>
 
 
 
-<h2 id="shape-drawable">Shape Drawable</h2>
+    <h2 id="shape-drawable">Shape Drawable</h2>
 
-<p>When you want to dynamically draw some two-dimensional graphics, a {@link android.graphics.drawable.ShapeDrawable}
-object will probably suit your needs. With a ShapeDrawable, you can programmatically draw
-primitive shapes and style them in any way imaginable.</p>
+    <p>When you want to dynamically draw some two-dimensional graphics, a {@link
+      android.graphics.drawable.ShapeDrawable}
+      object will probably suit your needs. With a ShapeDrawable, you can programmatically draw
+      primitive shapes and style them in any way imaginable.</p>
 
-<p>A ShapeDrawable is an extension of {@link android.graphics.drawable.Drawable}, so you can use one where ever
-a Drawable is expected &mdash; perhaps for the background of a View, set with 
-{@link android.view.View#setBackgroundDrawable(android.graphics.drawable.Drawable) setBackgroundDrawable()}. 
-Of course, you can also draw your shape as its own custom {@link android.view.View}, 
-to be added to your layout however you please.
-Because the ShapeDrawable has its own <code>draw()</code> method, you can create a subclass of View that 
-draws the ShapeDrawable during the <code>View.onDraw()</code> method.
-Here's a basic extension of the View class that does just this, to draw a ShapeDrawable as a View:</p>
-<pre>
-public class CustomDrawableView extends View {
-    private ShapeDrawable mDrawable;
+    <p>A ShapeDrawable is an extension of {@link android.graphics.drawable.Drawable}, so you can use
+one      where ever
+      a Drawable is expected &mdash; perhaps for the background of a View, set with
+      {@link android.view.View#setBackgroundDrawable(android.graphics.drawable.Drawable)
+      setBackgroundDrawable()}.
+      Of course, you can also draw your shape as its own custom {@link android.view.View},
+      to be added to your layout however you please.
+      Because the ShapeDrawable has its own <code>draw()</code> method, you can create a subclass of
+View      that
+      draws the ShapeDrawable during the <code>View.onDraw()</code> method.
+      Here's a basic extension of the View class that does just this, to draw a ShapeDrawable as a
+      View:</p>
+    <pre>
+      public class CustomDrawableView extends View {
+      private ShapeDrawable mDrawable;
 
-    public CustomDrawableView(Context context) {
-        super(context);
+      public CustomDrawableView(Context context) {
+      super(context);
 
-        int x = 10;
-        int y = 10;
-        int width = 300;
-        int height = 50;
+      int x = 10;
+      int y = 10;
+      int width = 300;
+      int height = 50;
 
-        mDrawable = new ShapeDrawable(new OvalShape());
-        mDrawable.getPaint().setColor(0xff74AC23);
-        mDrawable.setBounds(x, y, x + width, y + height);
-    }
+      mDrawable = new ShapeDrawable(new OvalShape());
+      mDrawable.getPaint().setColor(0xff74AC23);
+      mDrawable.setBounds(x, y, x + width, y + height);
+      }
 
-    protected void onDraw(Canvas canvas) {
-        mDrawable.draw(canvas);
-    }
-}
-</pre>
+      protected void onDraw(Canvas canvas) {
+      mDrawable.draw(canvas);
+      }
+      }
+    </pre>
 
-<p>In the constructor, a ShapeDrawable is defines as an {@link android.graphics.drawable.shapes.OvalShape}.
-It's then given a color and the bounds of the shape are set. If you do not set the bounds, then the
-shape will not be drawn, whereas if you don't set the color, it will default to black.</p>
-<p>With the custom View defined, it can be drawn any way you like. With the sample above, we can
-draw the shape programmatically in an Activity:</p>
-<pre>
-CustomDrawableView mCustomDrawableView;
+    <p>In the constructor, a ShapeDrawable is defines as an {@link
+      android.graphics.drawable.shapes.OvalShape}.
+      It's then given a color and the bounds of the shape are set. If you do not set the bounds,
+then the
+      shape will not be drawn, whereas if you don't set the color, it will default to black.</p>
+    <p>With the custom View defined, it can be drawn any way you like. With the sample above, we can
+      draw the shape programmatically in an Activity:</p>
+    <pre>
+      CustomDrawableView mCustomDrawableView;
 
-protected void onCreate(Bundle savedInstanceState) {
-    super.onCreate(savedInstanceState);
-    mCustomDrawableView = new CustomDrawableView(this);
-    
-    setContentView(mCustomDrawableView);
-}
-</pre>
+      protected void onCreate(Bundle savedInstanceState) {
+      super.onCreate(savedInstanceState);
+      mCustomDrawableView = new CustomDrawableView(this);
 
-<p>If you'd like to draw this custom drawable from the XML layout instead of from the Activity, 
-then the CustomDrawable class must override the {@link android.view.View#View(android.content.Context, android.util.AttributeSet) View(Context, AttributeSet)} constructor, which is called when 
-instantiating a View via inflation from XML. Then add a CustomDrawable element to the XML, 
-like so:</p>
-<pre>
-&lt;com.example.shapedrawable.CustomDrawableView
-    android:layout_width="fill_parent" 
-    android:layout_height="wrap_content" 
-    />
-</pre>
+      setContentView(mCustomDrawableView);
+      }
+    </pre>
 
-<p>The ShapeDrawable class (like many other Drawable types in the {@link android.graphics.drawable} package)
-allows you to define various properties of the drawable with public methods. 
-Some properties you might want to adjust include
-alpha transparency, color filter, dither, opacity and color.</p>
+    <p>If you'd like to draw this custom drawable from the XML layout instead of from the Activity,
+      then the CustomDrawable class must override the {@link
+      android.view.View#View(android.content.Context, android.util.AttributeSet) View(Context,
+      AttributeSet)} constructor, which is called when
+      instantiating a View via inflation from XML. Then add a CustomDrawable element to the XML,
+      like so:</p>
+    <pre>
+      &lt;com.example.shapedrawable.CustomDrawableView
+      android:layout_width="fill_parent"
+      android:layout_height="wrap_content"
+      />
+  </pre>
 
-<p>You can also define primitive drawable shapes using XML. For more information, see the
-section about Shape Drawables in the <a
+  <p>The ShapeDrawable class (like many other Drawable types in the {@link
+android.graphics.drawable}    package)
+    allows you to define various properties of the drawable with public methods.
+    Some properties you might want to adjust include
+    alpha transparency, color filter, dither, opacity and color.</p>
+
+  <p>You can also define primitive drawable shapes using XML. For more information, see the
+    section about Shape Drawables in the <a
+
 href="{@docRoot}guide/topics/resources/drawable-resource.html#Shape">Drawable Resources</a>
-document.</p>
+    document.</p>
 
-<!-- TODO
-<h2 id="state-list">StateListDrawable</h2>
+  <!-- TODO
+       <h2 id="state-list">StateListDrawable</h2>
 
-<p>A StateListDrawable is an extension of the DrawableContainer class, making it  little different. 
-The primary distinction is that the 
-StateListDrawable manages a collection of images for the Drawable, instead of just one. 
-This means that it can switch the image when you want, without switching objects. However, the 
-intention of the StateListDrawable is to automatically change the image used based on the state
-of the object it's attached to.
--->
+       <p>A StateListDrawable is an extension of the DrawableContainer class, making it  little
+different.
+         The primary distinction is that the
+         StateListDrawable manages a collection of images for the Drawable, instead of just one.
+         This means that it can switch the image when you want, without switching objects. However,
+the
+         intention of the StateListDrawable is to automatically change the image used based on the
+state
+         of the object it's attached to.
+         -->
 
-<h2 id="nine-patch">Nine-patch</h2>
+         <h2 id="nine-patch">Nine-patch</h2>
 
-<p>A {@link android.graphics.drawable.NinePatchDrawable} graphic is a stretchable bitmap image, which Android
-will automatically resize to accommodate the contents of the View in which you have placed it as the background. 
-An example use of a NinePatch is the backgrounds used by standard Android buttons &mdash;
-buttons must stretch to accommodate strings of various lengths. A NinePatch drawable is a standard PNG 
-image that includes an extra 1-pixel-wide border. It must be saved with the extension <code>.9.png</code>,
-and saved into the <code>res/drawable/</code> directory of your project.
-</p>
-<p>
-    The border is used to define the stretchable and static areas of 
-    the image. You indicate a stretchable section by drawing one (or more) 1-pixel-wide 
-    black line(s) in the left and top part of the border (the other border pixels should
-    be fully transparent or white). You can have as many stretchable sections as you want:
-    their relative size stays the same, so the largest sections always remain the largest.
-</p>
-<p>
-    You can also define an optional drawable section of the image (effectively, 
-    the padding lines) by drawing a line on the right and bottom lines. 
-    If a View object sets the NinePatch as its background and then specifies the 
-    View's text, it will stretch itself so that all the text fits inside only
-    the area designated by the right and bottom lines (if included). If the 
-    padding lines are not included, Android uses the left and top lines to 
-    define this drawable area.
-</p>
-<p>To clarify the difference between the different lines, the left and top lines define 
-which pixels of the image are allowed to be replicated in order to stretch the image.
-The bottom and right lines define the relative area within the image that the contents
-of the View are allowed to lie within.</p>
-<p>
-    Here is a sample NinePatch file used to define a button:
-</p>
-    <img src="{@docRoot}images/ninepatch_raw.png" alt="" />
+         <p>A {@link android.graphics.drawable.NinePatchDrawable} graphic is a stretchable bitmap
+image,           which Android
+           will automatically resize to accommodate the contents of the View in which you have
+placed it as the           background.
+           An example use of a NinePatch is the backgrounds used by standard Android buttons &mdash;
+           buttons must stretch to accommodate strings of various lengths. A NinePatch drawable is a
+standard           PNG
+           image that includes an extra 1-pixel-wide border. It must be saved with the extension
+           <code>.9.png</code>,
+           and saved into the <code>res/drawable/</code> directory of your project.
+         </p>
+         <p>
+           The border is used to define the stretchable and static areas of
+           the image. You indicate a stretchable section by drawing one (or more) 1-pixel-wide
+           black line(s) in the left and top part of the border (the other border pixels should
+           be fully transparent or white). You can have as many stretchable sections as you want:
+           their relative size stays the same, so the largest sections always remain the largest.
+         </p>
+         <p>
+           You can also define an optional drawable section of the image (effectively,
+           the padding lines) by drawing a line on the right and bottom lines.
+           If a View object sets the NinePatch as its background and then specifies the
+           View's text, it will stretch itself so that all the text fits inside only
+           the area designated by the right and bottom lines (if included). If the
+           padding lines are not included, Android uses the left and top lines to
+           define this drawable area.
+         </p>
+         <p>To clarify the difference between the different lines, the left and top lines define
+           which pixels of the image are allowed to be replicated in order to stretch the image.
+           The bottom and right lines define the relative area within the image that the contents
+           of the View are allowed to lie within.</p>
+         <p>
+           Here is a sample NinePatch file used to define a button:
+         </p>
+         <img src="{@docRoot}images/ninepatch_raw.png" alt="" />
 
-<p>This NinePatch defines one stretchable area with the left and top lines
-and the drawable area with the bottom and right lines. In the top image, the dotted grey
-lines identify the regions of the image that will be replicated in order to stretch the image. The pink
-rectangle in the bottom image identifies the region in which the contents of the View are allowed.
-If the contents don't fit in this region, then the image will be stretched so that they do.
+         <p>This NinePatch defines one stretchable area with the left and top lines
+           and the drawable area with the bottom and right lines. In the top image, the dotted grey
+           lines identify the regions of the image that will be replicated in order to stretch the
+image. The           pink
+           rectangle in the bottom image identifies the region in which the contents of the View are
+allowed.
+           If the contents don't fit in this region, then the image will be stretched so that they
+do.
 </p>
 
-<p>The <a href="{@docRoot}guide/developing/tools/draw9patch.html">Draw 9-patch</a> tool offers 
-   an extremely handy way to create your NinePatch images, using a WYSIWYG graphics editor. It 
+<p>The <a href="{@docRoot}guide/developing/tools/draw9patch.html">Draw 9-patch</a> tool offers
+   an extremely handy way to create your NinePatch images, using a WYSIWYG graphics editor. It
 even raises warnings if the region you've defined for the stretchable area is at risk of
 producing drawing artifacts as a result of the pixel replication.
 </p>
@@ -298,7 +486,8 @@
 <h3>Example XML</h3>
 
 <p>Here's some sample layout XML that demonstrates how to add a NinePatch image to a
-couple of buttons. (The NinePatch image is saved as <code>res/drawable/my_button_background.9.png</code>
+couple of buttons. (The NinePatch image is saved as
+<code>res/drawable/my_button_background.9.png</code>
 <pre>
 &lt;Button id="@+id/tiny"
         android:layout_width="wrap_content"
@@ -318,11 +507,12 @@
         android:textSize="30sp"
         android:background="@drawable/my_button_background"/&gt;
 </pre>
-<p>Note that the width and height are set to "wrap_content" to make the button fit neatly around the text.
+<p>Note that the width and height are set to "wrap_content" to make the button fit neatly around the
+text.
 </p>
 
-<p>Below are the two buttons rendered from the XML and NinePatch image shown above. 
-Notice how the width and height of the button varies with the text, and the background image 
+<p>Below are the two buttons rendered from the XML and NinePatch image shown above.
+Notice how the width and height of the button varies with the text, and the background image
 stretches to accommodate it.
 </p>
 
diff --git a/docs/html/guide/topics/graphics/animation.jd b/docs/html/guide/topics/graphics/animation.jd
index e7a07e0..e8996f6 100644
--- a/docs/html/guide/topics/graphics/animation.jd
+++ b/docs/html/guide/topics/graphics/animation.jd
@@ -1,949 +1,63 @@
-page.title=Property Animation
-parent.title=Graphics
-parent.link=index.html
+page.title=Animation
 @jd:body
 
   <div id="qv-wrapper">
     <div id="qv">
-      <h2>In this document</h2>
 
+      <h2>See also</h2>
       <ol>
-        <li><a href="#what">What is Property Animation?</a>
-          <ol>
-            <li><a href="#how">How property animation works</a></li>
-          </ol>
-        </li>
-
-        <li><a href="#value-animator">Animating with ValueAnimator</a></li>
-
-        <li><a href="#object-animator">Animating with ObjectAnimator</a></li>
-
-        <li><a href="#choreography">Choreographing Multiple Animations with
-        AnimatorSet</a></li>
-
-        <li><a href="#listeners">Animation Listeners</a></li>
-
-        <li><a href="#type-evaluator">Using a TypeEvaluator</a></li>
-
-        <li><a href="#interpolators">Using Interpolators</a></li>
-
-        <li><a href="#keyframes">Specifying Keyframes</a></li>
-
-        <li><a href="#layout">Animating Layout Changes to ViewGroups</a></li>
-
-        <li><a href="#views">Animating Views</a>
-          <ol>
-            <li><a href="#view-prop-animator">ViewPropertyAnimator</a></li>
-          </ol>
-        </li>
-
-        <li><a href="#declaring-xml">Declaring Animations in XML</a></li>
-      </ol>
-
-      <h2>Key classes</h2>
-
+        <li><a href="{@docRoot}guide/topics/graphics/property-animation.html">Property Animation</a></li>
+        <li><a href="{@docRoot}guide/topics/graphics/view-animation.html">View Animation</a></li>
+        <li><a href="{@docRoot}guide/topics/graphics/drawable-animation.html">Drawable Animation</a></li>
       <ol>
-        <li><code><a href=
-        "/reference/android/animation/ValueAnimator.html">ValueAnimator</a></code></li>
-
-        <li><code><a href=
-        "/reference/android/animation/ObjectAnimator.html">ObjectAnimator</a></code></li>
-
-        <li><code><a href=
-        "/reference/android/animation/TypeEvaluator.html">TypeEvaluator</a></code></li>
-      </ol>
-
-      <h2>Related samples</h2>
-
-      <ol>
-        <li><a href=
-        "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/index.html">API
-        Demos</a></li>
-      </ol>
     </div>
   </div>
 
-  <p>Introduced in Android 3.0, the property animation system is a robust framework that allows you
-  to animate almost anything. Property animation is not confined to objects drawn on the screen.
-  You can define an animation to change any object property over time, regardless of whether it
-  draws to the screen or not.The property animation system also has a few advantages over the view
-  animation system, which makes it more flexible to use.</p>
+  <p>The Android framework provides two animation systems: property animation
+  (introduced in Android 3.0) and view animation. Both animation systems are viable options,
+  but the property animation system, in general, is the preferred method to use, because it
+  is more flexible and offers more features. In addition to these two systems, you can utilize Drawable
+animation, which allows you to load drawable resources and display them one frame after
+another.</p>
 
-  <p>The view animation system provides the capability to only animate View objects, so if
-  you wanted to animate non-View objects, you had to implement your own code to do so. The view
-  animation system also was constrained in the fact that it only exposed a few aspects of a View
-  object to animate, such as the scaling and rotation of a View but not the background color for
-  instance.</p>
+  <p>The view animation system provides the capability to only animate {@link android.view.View}
+objects, so if you wanted to animate non-{@link android.view.View} objects, you have to implement
+your own code to do so. The view animation system is also constrained in the fact that it only
+exposes a few aspects of a {@link android.view.View} object to animate, such as the scaling and
+rotation of a View but not the background color, for instance.</p>
 
   <p>Another disadvantage of the view animation system is that it only modified where the
   View was drawn, and not the actual View itself. For instance, if you animated a button to move
   across the screen, the button draws correctly, but the actual location where you can click the
-  button does not change, so you have to implement your own logic to handle this. With the property
-  animation system, these constraints are completely removed, and you can animate any property of
-  any object, including View objects, and the object itself is actually modified.</p>
+  button does not change, so you have to implement your own logic to handle this.</p>
+
+  <p>With the property animation system, these constraints are completely removed, and you can animate
+  any property of any object (Views and non-Views) and the object itself is actually modified.
+  The property animation system is also more robust in the way it carries out animation. At
+  a high level, you assign animators to the properties that you want to animate, such as color,
+  position, or size and can define aspects of the animation such as interpolation and
+  synchronization of multiple animators.</p>
 
   <p>The view animation system, however, takes less time to setup and requires less code to write.
   If view animation accomplishes everything that you need to do, or if your existing code already
-  works the way you want, there is no need to use the property animation system.</p>
+  works the way you want, there is no need to use the property animation system. It also might
+  make sense to use both animation systems for different situations if the use case arises.</p>
 
-    <p class="note"><strong>Tip:</strong> To see how the ADT layout editor allows you to develop and
-preview animations in your layout, watch the <a
-href="http://www.youtube.com/watch?v=Oq05KqjXTvs&feature=player_detailpage#t=1709s">Android
-Developer Tools session</a> from Google I/O '11</p>
+<dl>
+<dt><strong><a href="{@docRoot}guide/topics/graphics/prop-animation.html">Property
+Animation</a></strong></dt>
+<dd>Introduced in Android 3.0 (API level 11), the property animation system lets you
+animate properties of any object, including ones that are not rendered to the screen. The system is
+extensible and lets you animate properties of custom types as well.</dd>
 
+<dt><strong><a href="{@docRoot}guide/topics/graphics/view-animation.html">View
+Animation</a></strong></dt>
+<dd>View Animation is the older system and can only be used for Views. It is relatively easy to
+setup and offers enough capabilities to meet many application's needs.</dd>
+</dl>
 
-  <h2 id="what">What is Property Animation?</h2>
-  A property animation changes a property's (a field in
-  an object) value over a specified length of time. To animate something, you specify the
-  object property that you want to animate, such as an object's position on the screen, how long
-  you want to animate it for, and what values you want to animate between. </p>
-
-  <p>The property animation system lets you define the following characteristics of an
-  animation:</p>
-
-  <ul>
-    <li>Duration: You can specify the duration of an animation. The default length is 300 ms.</li>
-
-    <li>Time interpolation: You can specify how the values for the property are calculated as a
-    function of the animation's current elapsed time.</li>
-
-    <li>Repeat count and behavior: You can specify whether or not to have an animation repeat when
-    it reaches the end of a duration and how many times to repeat the animation. You can also
-    specify whether you want the animation to play back in reverse. Setting it to reverse plays
-    the animation forwards then backwards repeatedly, until the number of repeats is reached.</li>
-
-    <li>Animator sets: You can group animations into logical sets that play together or
-    sequentially or after specified delays.</li>
-
-    <li>Frame refresh delay: You can specify how often to refresh frames of your animation. The
-    default is set to  refresh every 10 ms, but the speed in which your application can refresh frames is
-    ultimately dependent on how busy the system is overall and how fast the system can service the underlying timer.</li>
-  </ul>
-
-
-  <h3 id="how">How the property animation system works</h3>
-
-  <p>First, let's go over how an animation works with a simple example. Figure 1 depicts a
-  hypothetical object that is animated with its <code>x</code> property, which represents its
-  horizontal location on a screen. The duration of the animation is set to 40 ms and the distance
-  to travel is 40 pixels. Every 10 ms, which is the default frame refresh rate, the object moves
-  horizontally by 10 pixels. At the end of 40ms, the animation stops, and the object ends at
-  horizontal position 40. This is an example of an animation with linear interpolation, meaning the
-  object moves at a constant speed.</p><img src="{@docRoot}images/animation/animation-linear.png">
-
-  <p class="img-caption"><strong>Figure 1.</strong> Example of a linear animation</p>
-
-  <p>You can also specify animations to have a non-linear interpolation. Figure 2 illustrates a
-  hypothetical object that accelerates at the beginning of the animation, and decelerates at the
-  end of the animation. The object still moves 40 pixels in 40 ms, but non-linearly. In the
-  beginning, this animation accelerates up to the halfway point then decelerates from the
-  halfway point until the end of the animation. As Figure 2 shows, the distance traveled
-  at the beginning and end of the animation is less than in the middle.</p><img src=
-  "{@docRoot}images/animation/animation-nonlinear.png">
-
-  <p class="img-caption"><strong>Figure 2.</strong> Example of a non-linear animation</p>
-
-  <p>Let's take a detailed look at how the important components of the property animation system
-  would calculate animations like the ones illustrated above. Figure 3 depicts how the main classes
-  work with one another.</p><img src="{@docRoot}images/animation/valueanimator.png">
-
-  <p class="img-caption"><strong>Figure 3.</strong> How animations are calculated</p>
-
-  <p>The {@link android.animation.ValueAnimator} object keeps track of your animation's timing,
-  such as how long the animation has been running, and the current value of the property that it is
-  animating.</p>
-
-  <p>The {@link android.animation.ValueAnimator} encapsulates a {@link
-  android.animation.TimeInterpolator}, which defines animation interpolation, and a {@link
-  android.animation.TypeEvaluator}, which defines how to calculate values for the property being
-  animated. For example, in Figure 2, the {@link android.animation.TimeInterpolator} used would be
-  {@link android.view.animation.AccelerateDecelerateInterpolator} and the {@link
-  android.animation.TypeEvaluator} would be {@link android.animation.IntEvaluator}.</p>
-
-  <p>To start an animation, create a {@link android.animation.ValueAnimator} and give it the
-  starting and ending values for the property that you want to animate, along with the duration of
-  the animation. When you call {@link android.animation.ValueAnimator#start start()} the animation
-  begins. During the whole animation, the {@link android.animation.ValueAnimator} calculates an <em>elapsed fraction</em>
-  between 0 and 1, based on the duration of the animation and how much time has elapsed. The
-  elapsed fraction represents the percentage of time that the animation has completed, 0 meaning 0%
-  and 1 meaning 100%. For example, in Figure 1, the elapsed fraction at t = 10 ms would be .25
-  because the total duration is t = 40 ms.</p>
-
-  <p>When the {@link android.animation.ValueAnimator} is done calculating an elapsed fraction, it
-  calls the {@link android.animation.TimeInterpolator} that is currently set, to calculate an
-  <em>interpolated fraction</em>. An interpolated fraction maps the elapsed fraction to a new
-  fraction that takes into account the time interpolation that is set. For example, in Figure 2,
-  because the animation slowly accelerates, the interpolated fraction, about .15, is less than the
-  elapsed fraction, .25, at t = 10 ms. In Figure 1, the interpolated fraction is always the same as
-  the elapsed fraction.</p>
-
-  <p>When the interpolated fraction is calculated, {@link android.animation.ValueAnimator} calls
-  the appropriate {@link android.animation.TypeEvaluator}, to calculate the value of the
-  property that you are animating, based on the interpolated fraction, the starting value, and the
-  ending value of the animation. For example, in Figure 2, the interpolated fraction was .15 at t =
-  10 ms, so the value for the property at that time would be .15 X (40 - 0), or 6.</p>
-
- <!-- <p>When the final value is calculated, the {@link android.animation.ValueAnimator} calls the
-  {@link android.animation.ValueAnimator.AnimatorUpdateListener#onAnimationUpdate
-  onAnimationUpdate()} method. Implement this callback to obtain the property value by
-  calling {@link android.animation.ValueAnimator#getAnimatedValue getAnimatedValue()} and set the
-  value for the property in the object that you are animating. Setting the property doesn't redraw
-  the object on the screen, so you need to call {@link
-  android.view.View#invalidate invalidate()} to refresh the View that the object
-  resides in. If the object is actually a View object, then the system calls {@link
-  android.view.View#invalidate invalidate()} when the property is changed.
-  The system redraws the window and the {@link android.animation.ValueAnimator}
-  repeats the process.</p>-->
-
-  <p>The <code>com.example.android.apis.animation</code> package in the <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/index.html">API
-  Demos</a> sample project provides many examples on how to use the property
-  animation system.</p>
-
-  <h2>API Overview</h2>
-
-  <p>You can find most of the property animation system's APIs in {@link android.animation
-  android.animation}. Because the view animation system already
-  defines many interpolators in {@link android.view.animation android.view.animation}, you can use
-  those interpolators in the property animation system as well. The following tables describe the main
-  components of the property animation system.</p>
-
-  <p>The {@link android.animation.Animator} class provides the basic structure for creating
-  animations. You normally do not use this class directly as it only provides minimal
-  functionality that must be extended to fully support animating values. The following
-  subclasses extend {@link android.animation.Animator}:
-  </p>
-  <p class="table-caption"><strong>Table 1.</strong> Animators</p>
-      <table>
-        <tr>
-          <th>Class</th>
-
-          <th>Description</th>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.ValueAnimator}</td>
-
-          <td>The main timing engine for property animation that also computes the values for the
-          property to be animated. It has all of the core functionality that calculates animation
-          values and contains the timing details of each animation, information about whether an
-          animation repeats, listeners that receive update events, and the ability to set custom
-          types to evaluate. There are two pieces to animating properties: calculating the animated
-          values and setting those values on the object and property that is being animated. {@link
-          android.animation.ValueAnimator} does not carry out the second piece, so you must listen
-          for updates to values calculated by the {@link android.animation.ValueAnimator} and
-          modify the objects that you want to animate with your own logic. See the section about
-          <a href="#value-animator">Animating with ValueAnimator</a> for more information.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.ObjectAnimator}</td>
-
-          <td>A subclass of {@link android.animation.ValueAnimator} that allows you to set a target
-          object and object property to animate. This class updates the property accordingly when
-          it computes a new value for the animation. You want to use
-          {@link android.animation.ObjectAnimator} most of the time,
-          because it makes the process of animating values on target objects much easier. However,
-          you sometimes want to use {@link android.animation.ValueAnimator} directly because {@link
-          android.animation.ObjectAnimator} has a few more restrictions, such as requiring specific
-          acessor methods to be present on the target object.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.AnimatorSet}</td>
-
-          <td>Provides a mechanism to group animations together so that they run in
-          relation to one another. You can set animations to play together, sequentially, or after
-          a specified delay. See the section about <a href="#choreography">Choreographing multiple
-          animations with Animator Sets</a> for more information.</td>
-        </tr>
-      </table>
-
-
-      <p>Evaluators tell the property animation system how to calculate values for a given
-      property. They take the timing data that is provided by an {@link android.animation.Animator}
-      class, the animation's start and end value, and calculate the animated values of the property
-      based on this data. The property animation system provides the following evaluators:</p>
-      <p class="table-caption"><strong>Table 2.</strong> Evaluators</p>
-      <table>
-        <tr>
-          <th>Class/Interface</th>
-
-          <th>Description</th>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.IntEvaluator}</td>
-
-          <td>The default evaluator to calculate values for <code>int</code> properties.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.FloatEvaluator}</td>
-
-          <td>The default evaluator to calculate values for <code>float</code> properties.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.ArgbEvaluator}</td>
-
-          <td>The default evaluator to calculate values for color properties that are represented
-          as hexidecimal values.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.TypeEvaluator}</td>
-
-          <td>An interface that allows you to create your own evaluator. If you are animating an
-          object property that is <em>not</em> an <code>int</code>, <code>float</code>, or color,
-          you must implement the {@link android.animation.TypeEvaluator} interface to specify how
-          to compute the object property's animated values. You can also specify a custom {@link
-          android.animation.TypeEvaluator} for <code>int</code>, <code>float</code>, and color
-          values as well, if you want to process those types differently than the default behavior.
-          See the section about <a href="#type-evaluator">Using a TypeEvaluator</a> for more
-          information on how to write a custom evaluator.</td>
-        </tr>
-      </table>
-
-
-
-
-      <p>A time interpolator defines how specific values in an animation are calculated as a
-      function of time. For example, you can specify animations to happen linearly across the whole
-      animation, meaning the animation moves evenly the entire time, or you can specify animations
-      to use non-linear time, for example, accelerating at the beginning and decelerating at the
-      end of the animation. Table 3 describes the interpolators that are contained in {@link
-      android.view.animation android.view.animation}. If none of the provided interpolators suits
-      your needs, implement the {@link android.animation.TimeInterpolator} interface and create your own. See <a href=
-  "#interpolators">Using interpolators</a> for more information on how to write a custom
-  interpolator.</p>
-      <p class="table-caption"><strong>Table 3.</strong> Interpolators</p>
-      <table>
-        <tr>
-          <th>Class/Interface</th>
-
-          <th>Description</th>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.AccelerateDecelerateInterpolator}</td>
-
-          <td>An interpolator whose rate of change starts and ends slowly but accelerates
-          through the middle.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.AccelerateInterpolator}</td>
-
-          <td>An interpolator whose rate of change starts out slowly and then
-          accelerates.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.AnticipateInterpolator}</td>
-
-          <td>An interpolator whose change starts backward then flings forward.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.AnticipateOvershootInterpolator}</td>
-
-          <td>An interpolator whose change starts backward, flings forward and overshoots
-          the target value, then finally goes back to the final value.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.BounceInterpolator}</td>
-
-          <td>An interpolator whose change bounces at the end.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.CycleInterpolator}</td>
-
-          <td>An interpolator whose animation repeats for a specified number of cycles.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.DecelerateInterpolator}</td>
-
-          <td>An interpolator whose rate of change starts out quickly and and then
-          decelerates.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.LinearInterpolator}</td>
-
-          <td>An interpolator whose rate of change is constant.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.view.animation.OvershootInterpolator}</td>
-
-          <td>An interpolator whose change flings forward and overshoots the last value then
-          comes back.</td>
-        </tr>
-
-        <tr>
-          <td>{@link android.animation.TimeInterpolator}</td>
-
-          <td>An interface that allows you to implement your own interpolator.</td>
-        </tr>
-      </table>
-
-  <h2 id="value-animator">Animating with ValueAnimator</h2>
-
-  <p>The {@link android.animation.ValueAnimator} class lets you animate values of some type for the
-  duration of an animation by specifying a set of <code>int</code>, <code>float</code>, or color
-  values to animate through. You obtain a {@link android.animation.ValueAnimator} by calling one of
-  its factory methods: {@link android.animation.ValueAnimator#ofInt ofInt()}, {@link
-  android.animation.ValueAnimator#ofFloat ofFloat()}, or {@link
-  android.animation.ValueAnimator#ofObject ofObject()}. For example:</p>
-  <pre>
-ValueAnimator animation = ValueAnimator.ofFloat(0f, 1f);
-animation.setDuration(1000);
-animation.start();
-</pre>
-
-  <p>In this code, the {@link android.animation.ValueAnimator} starts calculating the values of the
-  animation, between 0 and 1, for a duration of 1000 ms, when the <code>start()</code> method
-  runs.</p>
-
-  <p>You can also specify a custom type to animate by doing the following:</p>
-  <pre>
-ValueAnimator animation = ValueAnimator.ofObject(new MyTypeEvaluator(), startPropertyValue, endPropertyValue);
-animation.setDuration(1000);
-animation.start();
-</pre>
-
-  <p>In this code, the {@link android.animation.ValueAnimator} starts calculating the values of the
-  animation, between <code>startPropertyValue</code> and <code>endPropertyValue</code> using the
-  logic supplied by <code>MyTypeEvaluator</code> for a duration of 1000 ms, when the {@link
-  android.animation.ValueAnimator#start start()} method runs.</p>
-
-  <p>The previous code snippets, however, has no real effect on an object, because the {@link
-  android.animation.ValueAnimator} does not operate on objects or properties directly. The most likely thing
-  that you want to do is modify the objects that you want to animate with these calculated values. You do
-  this by defining listeners in the {@link android.animation.ValueAnimator} to appropriately handle important events
-  during the animation's lifespan, such as frame updates. When implementing the listeners, you can
-  obtain the calculated value for that specific frame refresh by calling {@link
-  android.animation.ValueAnimator#getAnimatedValue getAnimatedValue()}. For more information on listeners,
-  see the section about <a href="#listeners">Animation Listeners</a>.
-
-  <h2 id="object-animator">Animating with ObjectAnimator</h2>
-
-  <p>The {@link android.animation.ObjectAnimator} is a subclass of the {@link
-  android.animation.ValueAnimator} (discussed in the previous section) and combines the timing
-  engine and value computation of {@link android.animation.ValueAnimator} with the ability to
-  animate a named property of a target object. This makes animating any object much easier, as you
-  no longer need to implement the {@link android.animation.ValueAnimator.AnimatorUpdateListener},
-  because the animated property updates automatically.</p>
-
-  <p>Instantiating an {@link android.animation.ObjectAnimator} is similar to a {@link
-  android.animation.ValueAnimator}, but you also specify the object and the name of that object's property (as
-  a String) along with the values to animate between:</p>
-  <pre>
-ObjectAnimator anim = ObjectAnimator.ofFloat(foo, "alpha", 0f, 1f);
-anim.setDuration(1000);
-anim.start();
-</pre>
-
-  <p>To have the {@link android.animation.ObjectAnimator} update properties correctly, you must do
-  the following:</p>
-
-  <ul>
-    <li>The object property that you are animating must have a setter function (in camel case) in the form of
-    <code>set&lt;propertyName&gt;()</code>. Because the {@link android.animation.ObjectAnimator}
-    automatically updates the property during animation, it must be able to access the property
-    with this setter method. For example, if the property name is <code>foo</code>, you need to
-    have a <code>setFoo()</code> method. If this setter method does not exist, you have three
-    options:
-
-      <ul>
-        <li>Add the setter method to the class if you have the rights to do so.</li>
-
-        <li>Use a wrapper class that you have rights to change and have that wrapper receive the
-        value with a valid setter method and forward it to the original object.</li>
-
-        <li>Use {@link android.animation.ValueAnimator} instead.</li>
-      </ul>
-    </li>
-
-    <li>If you specify only one value for the <code>values...</code> parameter in one of the {@link
-    android.animation.ObjectAnimator} factory methods, it is assumed to be the ending value of the
-    animation. Therefore, the object property that you are animating must have a getter function
-    that is used to obtain the starting value of the animation. The getter function must be in the
-    form of <code>get&lt;propertyName&gt;()</code>. For example, if the property name is
-    <code>foo</code>, you need to have a <code>getFoo()</code> method.</li>
-
-    <li>The getter (if needed) and setter methods of the property that you are animating must
-    operate on the same type as the starting and ending values that you specify to {@link
-    android.animation.ObjectAnimator}. For example, you must have
-    <code>targetObject.setPropName(float)</code> and <code>targetObject.getPropName(float)</code>
-    if you construct the following {@link android.animation.ObjectAnimator}:
-      <pre>
-ObjectAnimator.ofFloat(targetObject, "propName", 1f)
-</pre>
-    </li>
-
-    <li>Depending on what property or object you are animating, you might need to call the {@link
-    android.view.View#invalidate invalidate()} method on a View force the screen to redraw itself with the
-    updated animated values. You do this in the
-    {@link android.animation.ValueAnimator.AnimatorUpdateListener#onAnimationUpdate onAnimationUpdate()}
-    callback. For example, animating the color property of a Drawable object only cause updates to the
-    screen when that object redraws itself. All of the property setters on View, such as
-    {@link android.view.View#setAlpha setAlpha()} and {@link android.view.View#setTranslationX setTranslationX()}
-    invalidate the View properly, so you do not need to invalidate the View when calling these
-    methods with new values. For more information on listeners, see the section about <a href="#listeners">Animation Listeners</a>.
-    </li>
-  </ul>
-
-  <h2 id="choreography">Choreographing Multiple Animations with AnimatorSet</h2>
-
-  <p>In many cases, you want to play an animation that depends on when another animation starts or
-  finishes. The Android system lets you bundle animations together into an {@link
-  android.animation.AnimatorSet}, so that you can specify whether to start animations
-  simultaneously, sequentially, or after a specified delay. You can also nest {@link
-  android.animation.AnimatorSet} objects within each other.</p>
-
-  <p>The following sample code taken from the <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/BouncingBalls.html">Bouncing
-  Balls</a> sample (modified for simplicity) plays the following {@link android.animation.Animator}
-  objects in the following manner:</p>
-
-  <ol>
-    <li>Plays <code>bounceAnim</code>.</li>
-
-    <li>Plays <code>squashAnim1</code>, <code>squashAnim2</code>, <code>stretchAnim1</code>, and
-    <code>stretchAnim2</code> at the same time.</li>
-
-    <li>Plays <code>bounceBackAnim</code>.</li>
-
-    <li>Plays <code>fadeAnim</code>.</li>
-  </ol>
-  <pre>
-AnimatorSet bouncer = new AnimatorSet();
-bouncer.play(bounceAnim).before(squashAnim1);
-bouncer.play(squashAnim1).with(squashAnim2);
-bouncer.play(squashAnim1).with(stretchAnim1);
-bouncer.play(squashAnim1).with(stretchAnim2);
-bouncer.play(bounceBackAnim).after(stretchAnim2);
-ValueAnimator fadeAnim = ObjectAnimator.ofFloat(newBall, "alpha", 1f, 0f);
-fadeAnim.setDuration(250);
-AnimatorSet animatorSet = new AnimatorSet();
-animatorSet.play(bouncer).before(fadeAnim);
-animatorSet.start();
-</pre>
-
-  <p>For a more complete example on how to use animator sets, see the <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/BouncingBalls.html">Bouncing
-  Balls</a> sample in APIDemos.</p>
-
-<h2 id="listeners">Animation Listeners</h2>
-<p>
-You can listen for important events during an animation's duration with the listeners described below.
-</p>
-
-  <ul>
-    <li>{@link android.animation.Animator.AnimatorListener}
-
-      <ul>
-        <li>{@link android.animation.Animator.AnimatorListener#onAnimationStart onAnimationStart()}
-        - Called when the animation starts.</li>
-
-        <li>{@link android.animation.Animator.AnimatorListener#onAnimationEnd onAnimationEnd()} -
-        Called when the animation ends.</li>
-
-        <li>{@link android.animation.Animator.AnimatorListener#onAnimationRepeat
-        onAnimationRepeat()} - Called when the animation repeats itself.</li>
-
-        <li>{@link android.animation.Animator.AnimatorListener#onAnimationCancel
-        onAnimationCancel()} - Called when the animation is canceled. A cancelled animation
-        also calls {@link android.animation.Animator.AnimatorListener#onAnimationEnd onAnimationEnd()},
-        regardless of how they were ended.</li>
-      </ul>
-    </li>
-
-    <li>{@link android.animation.ValueAnimator.AnimatorUpdateListener}
-
-      <ul>
-        <li>
-          <p>{@link android.animation.ValueAnimator.AnimatorUpdateListener#onAnimationUpdate
-          onAnimationUpdate()} - called on every frame of the animation. Listen to this event to
-          use the calculated values generated by {@link android.animation.ValueAnimator} during an
-          animation. To use the value, query the {@link android.animation.ValueAnimator} object
-          passed into the event to get the current animated value with the {@link
-          android.animation.ValueAnimator#getAnimatedValue getAnimatedValue()} method. Implementing this
-          listener is required if you use {@link android.animation.ValueAnimator}. </p>
-
-          <p>
-          Depending on what property or object you are animating, you might need to call
-          {@link android.view.View#invalidate invalidate()} on a View to force that area of the
-          screen to redraw itself with the new animated values. For example, animating the
-          color property of a Drawable object only cause updates to the screen when that object
-          redraws itself. All of the property setters on View,
-          such as {@link android.view.View#setAlpha setAlpha()} and
-          {@link android.view.View#setTranslationX setTranslationX()} invalidate the View
-          properly, so you do not need to invalidate the View when calling these methods with new values.
-          </p>
-
-        </li>
-      </ul>
-    </li>
-  </ul>
-
-<p>You can extend the {@link android.animation.AnimatorListenerAdapter} class instead of
-implementing the {@link android.animation.Animator.AnimatorListener} interface, if you do not
-want to implement all of the methods of the {@link android.animation.Animator.AnimatorListener}
-interface. The {@link android.animation.AnimatorListenerAdapter} class provides empty
-implementations of the methods that you can choose to override.</p>
-  <p>For example, the <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/BouncingBalls.html">Bouncing
-  Balls</a> sample in the API demos creates an {@link android.animation.AnimatorListenerAdapter}
-  for just the {@link android.animation.Animator.AnimatorListener#onAnimationEnd onAnimationEnd()}
-  callback:</p>
-  <pre>
-ValueAnimatorAnimator fadeAnim = ObjectAnimator.ofFloat(newBall, "alpha", 1f, 0f);
-fadeAnim.setDuration(250);
-fadeAnim.addListener(new AnimatorListenerAdapter() {
-public void onAnimationEnd(Animator animation) {
-    balls.remove(((ObjectAnimator)animation).getTarget());
-}
-</pre>
-
-
-  <h2 id="layout">Animating Layout Changes to ViewGroups</h2>
-
-  <p>The property animation system provides the capability to animate changes to ViewGroup objects
-  as well as provide an easy way to animate View objects themselves.</p>
-
-  <p>You can animate layout changes within a ViewGroup with the {@link
-  android.animation.LayoutTransition} class. Views inside a ViewGroup can go through an appearing
-  and disappearing animation when you add them to or remove them from a ViewGroup or when you call
-  a View's {@link android.view.View#setVisibility setVisibility()} method with {@link
-  android.view.View#VISIBLE}, android.view.View#INVISIBLE}, or {@link android.view.View#GONE}. The remaining Views in the
-  ViewGroup can also animate into their new positions when you add or remove Views. You can define
-  the following animations in a {@link android.animation.LayoutTransition} object by calling {@link
-  android.animation.LayoutTransition#setAnimator setAnimator()} and passing in an {@link
-  android.animation.Animator} object with one of the following {@link
-  android.animation.LayoutTransition} constants:</p>
-
-  <ul>
-    <li><code>APPEARING</code> - A flag indicating the animation that runs on items that are
-    appearing in the container.</li>
-
-    <li><code>CHANGE_APPEARING</code> - A flag indicating the animation that runs on items that are
-    changing due to a new item appearing in the container.</li>
-
-    <li><code>DISAPPEARING</code> - A flag indicating the animation that runs on items that are
-    disappearing from the container.</li>
-
-    <li><code>CHANGE_DISAPPEARING</code> - A flag indicating the animation that runs on items that
-    are changing due to an item disappearing from the container.</li>
-  </ul>
-
-  <p>You can define your own custom animations for these four types of events to customize the look
-  of your layout transitions or just tell the animation system to use the default animations.</p>
-
-  <p>The <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/LayoutAnimations.html">
-  LayoutAnimations</a> sample in API Demos shows you how to define animations for layout
-  transitions and then set the animations on the View objects that you want to animate.</p>
-
-  <p>The <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/LayoutAnimationsByDefault.html">
-  LayoutAnimationsByDefault</a> and its corresponding <a href=
-  "{@docRoot}resources/samples/ApiDemos/res/layout/layout_animations_by_default.html">layout_animations_by_default.xml</a>
-  layout resource file show you how to enable the default layout transitions for ViewGroups in XML.
-  The only thing that you need to do is to set the <code>android:animateLayoutchanges</code>
-  attribute to <code>true</code> for the ViewGroup. For example:</p>
-  <pre>
-&lt;LinearLayout
-    android:orientation="vertical"
-    android:layout_width="wrap_content"
-    android:layout_height="match_parent"
-    android:id="@+id/verticalContainer"
-    android:animateLayoutChanges="true" /&gt;
-</pre>
-
-  <p>Setting this attribute to true automatically animates Views that are added or removed from the
-  ViewGroup as well as the remaining Views in the ViewGroup.</p>
-
-  <h2 id="type-evaluator">Using a TypeEvaluator</h2>
-
-  <p>If you want to animate a type that is unknown to the Android system, you can create your own
-  evaluator by implementing the {@link android.animation.TypeEvaluator} interface. The types that
-  are known by the Android system are <code>int</code>, <code>float</code>, or a color, which are
-  supported by the {@link android.animation.IntEvaluator}, {@link
-  android.animation.FloatEvaluator}, and {@link android.animation.ArgbEvaluator} type
-  evaluators.</p>
-
-  <p>There is only one method to implement in the {@link android.animation.TypeEvaluator}
-  interface, the {@link android.animation.TypeEvaluator#evaluate evaluate()} method. This allows
-  the animator that you are using to return an appropriate value for your animated property at the
-  current point of the animation. The {@link android.animation.FloatEvaluator} class demonstrates
-  how to do this:</p>
-  <pre>
-public class FloatEvaluator implements TypeEvaluator {
-
-    public Object evaluate(float fraction, Object startValue, Object endValue) {
-        float startFloat = ((Number) startValue).floatValue();
-        return startFloat + fraction * (((Number) endValue).floatValue() - startFloat);
-    }
-}
-</pre>
-
-  <p class="note"><strong>Note:</strong> When {@link android.animation.ValueAnimator} (or {@link
-  android.animation.ObjectAnimator}) runs, it calculates a current elapsed fraction of the
-  animation (a value between 0 and 1) and then calculates an interpolated version of that depending
-  on what interpolator that you are using. The interpolated fraction is what your {@link
-  android.animation.TypeEvaluator} receives through the <code>fraction</code> parameter, so you do
-  not have to take into account the interpolator when calculating animated values.</p>
-
-  <h2 id="interpolators">Using Interpolators</h2>
-
-  <p>An interpolator define how specific values in an animation are calculated as a function of
-  time. For example, you can specify animations to happen linearly across the whole animation,
-  meaning the animation moves evenly the entire time, or you can specify animations to use
-  non-linear time, for example, using acceleration or deceleration at the beginning or end of the
-  animation.</p>
-
-  <p>Interpolators in the animation system receive a fraction from Animators that represent the
-  elapsed time of the animation. Interpolators modify this fraction to coincide with the type of
-  animation that it aims to provide. The Android system provides a set of common interpolators in
-  the {@link android.view.animation android.view.animation package}. If none of these suit your
-  needs, you can implement the {@link android.animation.TimeInterpolator} interface and create your
-  own.</p>
-
-  <p>As an example, how the default interpolator {@link
-  android.view.animation.AccelerateDecelerateInterpolator} and the {@link
-  android.view.animation.LinearInterpolator} calculate interpolated fractions are compared below.
-  The {@link android.view.animation.LinearInterpolator} has no effect on the elapsed fraction. The {@link
-  android.view.animation.AccelerateDecelerateInterpolator} accelerates into the animation and
-  decelerates out of it. The following methods define the logic for these interpolators:</p>
-
-  <p><strong>AccelerateDecelerateInterpolator</strong></p>
-  <pre>
-public float getInterpolation(float input) {
-    return (float)(Math.cos((input + 1) * Math.PI) / 2.0f) + 0.5f;
-}
-</pre>
-
-  <p><strong>LinearInterpolator</strong></p>
-  <pre>
-public float getInterpolation(float input) {
-    return input;
-}
-</pre>
-
-  <p>The following table represents the approximate values that are calculated by these
-  interpolators for an animation that lasts 1000ms:</p>
-
-  <table>
-    <tr>
-      <th>ms elapsed</th>
-
-      <th>Elapsed fraction/Interpolated fraction (Linear)</th>
-
-      <th>Interpolated fraction (Accelerate/Decelerate)</th>
-    </tr>
-
-    <tr>
-      <td>0</td>
-
-      <td>0</td>
-
-      <td>0</td>
-    </tr>
-
-    <tr>
-      <td>200</td>
-
-      <td>.2</td>
-
-      <td>.1</td>
-    </tr>
-
-    <tr>
-      <td>400</td>
-
-      <td>.4</td>
-
-      <td>.345</td>
-    </tr>
-
-    <tr>
-      <td>600</td>
-
-      <td>.6</td>
-
-      <td>.8</td>
-    </tr>
-
-    <tr>
-      <td>800</td>
-
-      <td>.8</td>
-
-      <td>.9</td>
-    </tr>
-
-    <tr>
-      <td>1000</td>
-
-      <td>1</td>
-
-      <td>1</td>
-    </tr>
-  </table>
-
-  <p>As the table shows, the {@link android.view.animation.LinearInterpolator} changes the values
-  at the same speed, .2 for every 200ms that passes. The {@link
-  android.view.animation.AccelerateDecelerateInterpolator} changes the values faster than {@link
-  android.view.animation.LinearInterpolator} between 200ms and 600ms and slower between 600ms and
-  1000ms.</p>
-
-  <h2 id="keyframes">Specifying Keyframes</h2>
-
-  <p>A {@link android.animation.Keyframe} object consists of a time/value pair that lets you define
-  a specific state at a specific time of an animation. Each keyframe can also have its own
-  interpolator to control the behavior of the animation in the interval between the previous
-  keyframe's time and the time of this keyframe.</p>
-
-  <p>To instantiate a {@link android.animation.Keyframe} object, you must use one of the factory
-  methods, {@link android.animation.Keyframe#ofInt ofInt()}, {@link
-  android.animation.Keyframe#ofFloat ofFloat()}, or {@link android.animation.Keyframe#ofObject
-  ofObject()} to obtain the appropriate type of {@link android.animation.Keyframe}. You then call
-  the {@link android.animation.PropertyValuesHolder#ofKeyframe ofKeyframe()} factory method to
-  obtain a {@link android.animation.PropertyValuesHolder} object. Once you have the object, you can
-  obtain an animator by passing in the {@link android.animation.PropertyValuesHolder} object and
-  the object to animate. The following code snippet demonstrates how to do this:</p>
-  <pre>
-Keyframe kf0 = Keyframe.ofFloat(0f, 0f);
-Keyframe kf1 = Keyframe.ofFloat(.5f, 360f);
-Keyframe kf2 = Keyframe.ofFloat(1f, 0f);
-PropertyValuesHolder pvhRotation = PropertyValuesHolder.ofKeyframe("rotation", kf0, kf1, kf2);
-ObjectAnimator rotationAnim = ObjectAnimator.ofPropertyValuesHolder(target, pvhRotation)
-rotationAnim.setDuration(5000ms);
-</pre>
-
-  <p>For a more complete example on how to use keyframes, see the <a href=
-  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/MultiPropertyAnimation.html">
-  MultiPropertyAnimation</a> sample in APIDemos.</p>
-
-  <h2 id="views">Animating Views</h2>
-
-  <p>The property animation system allow streamlined animation of View objects and offerse
-  a few advantages over the view animation system. The view
-  animation system transformed View objects by changing the way that they were drawn. This was
-  handled in the container of each View, because the View itself had no properties to manipulate.
-  This resulted in the View being animated, but caused no change in the View object itself. This
-  led to behavior such as an object still existing in its original location, even though it was
-  drawn on a different location on the screen. In Android 3.0, new properties and the corresponding
-  getter and setter methods were added to eliminate this drawback.</p>
-  <p>The property animation system
-  can animate Views on the screen by changing the actual properties in the View objects. In
-  addition, Views also automatically call the {@link android.view.View#invalidate invalidate()}
-  method to refresh the screen whenever its properties are changed. The new properties in the {@link
-  android.view.View} class that facilitate property animations are:</p>
-
-  <ul>
-    <li><code>translationX</code> and <code>translationY</code>: These properties control where the
-    View is located as a delta from its left and top coordinates which are set by its layout
-    container.</li>
-
-    <li><code>rotation</code>, <code>rotationX</code>, and <code>rotationY</code>: These properties
-    control the rotation in 2D (<code>rotation</code> property) and 3D around the pivot point.</li>
-
-    <li><code>scaleX</code> and <code>scaleY</code>: These properties control the 2D scaling of a
-    View around its pivot point.</li>
-
-    <li><code>pivotX</code> and <code>pivotY</code>: These properties control the location of the
-    pivot point, around which the rotation and scaling transforms occur. By default, the pivot
-    point is located at the center of the object.</li>
-
-    <li><code>x</code> and <code>y</code>: These are simple utility properties to describe the
-    final location of the View in its container, as a sum of the left and top values and
-    translationX and translationY values.</li>
-
-    <li><code>alpha</code>: Represents the alpha transparency on the View. This value is 1 (opaque)
-    by default, with a value of 0 representing full transparency (not visible).</li>
-  </ul>
-
-  <p>To animate a property of a View object, such as its color or rotation value, all you need to
-  do is create a property animator and specify the View property that you want to
-  animate. For example:</p>
-  <pre>
-ObjectAnimator.ofFloat(myView, "rotation", 0f, 360f);
-</pre>
-
-<p>For more information on creating animators, see the sections on animating with
-<a href="#value-animator">ValueAnimator</a> and <a href="#object-animator">ObjectAnimator</a>.
-</p>
-
-<h3 id="view-prop-animator">Animating with ViewPropertyAnimator</h3>
-<p>The {@link android.view.ViewPropertyAnimator} provides a simple way to animate several
-properties of a {@link android.view.View} in parallel, using a single underlying {@link
-android.animation.Animator}
-object. It behaves much like an {@link android.animation.ObjectAnimator}, because it modifies the
-actual values of the view's properties, but is more efficient when animating many properties at
-once. In addition, the code for using the {@link android.view.ViewPropertyAnimator} is much
-more concise and easier to read. The following code snippets show the differences in using multiple
-{@link android.animation.ObjectAnimator} objects, a single
-{@link android.animation.ObjectAnimator}, and the {@link android.view.ViewPropertyAnimator} when
-simultaneously animating the <code>x</code> and <code>y</code> property of a view.</p>
-
-<p><strong>Multiple ObjectAnimator objects</strong></p>
-<pre>
-ObjectAnimator animX = ObjectAnimator.ofFloat(myView, "x", 50f);
-ObjectAnimator animY = ObjectAnimator.ofFloat(myView, "y", 100f);
-AnimatorSet animSetXY = new AnimatorSet();
-animSetXY.playTogether(animX, animY);
-animSetXY.start();
-</pre>
-
-<p><strong>One ObjectAnimator</strong></p>
-<pre>
-PropertyValuesHolder pvhX = PropertyValuesHolder.ofFloat("x", 50f);
-PropertyValuesHolder pvhY = PropertyValuesHolder.ofFloat("y", 100f);
-ObjectAnimator.ofPropertyValuesHolder(myView, pvhX, pvyY).start();
-</pre>
-
-<p><strong>ViewPropertyAnimator</strong></p>
-<pre>
-myView.animate().x(50f).y(100f);
-</pre>
-
-<p>
-For more detailed information about {@link
-android.view.ViewPropertyAnimator}, see the corresponding Android Developers
-<a href="http://android-developers.blogspot.com/2011/05/introducing-viewpropertyanimator.html">blog
-post</a>.</p>
-
-<h2 id="declaring-xml">Declaring Animations in XML</h2>
-
-  <p>The property animation system lets you declare property animations with XML instead of doing
-  it programmatically. By defining your animations in XML, you can easily reuse your animations
-in multiple activities and more easily edit the animation sequence.</p>
-
-<p>To distinguish animation files that use the new property animation APIs from those that use the
-legacy <a href="{@docRoot}guide/topics/graphics/view-animation.html">view animation</a> framework,
-starting with Android 3.1, you should save the XML files for property animations in the {@code
-res/animator/} directory (instead of {@code res/anim/}). Using the {@code animator} directory name
-is optional, but necessary if you want to use the layout editor tools in the Eclipse ADT plugin (ADT
-11.0.0+), because ADT only searches the {@code res/animator/} directory for property animation
-resources.</p>
-
-<p>The following property animation classes have XML declaration support with the
-  following XML tags:</p>
-
-  <ul>
-    <li>{@link android.animation.ValueAnimator} - <code>&lt;animator&gt;</code></li>
-
-    <li>{@link android.animation.ObjectAnimator} - <code>&lt;objectAnimator&gt;</code></li>
-
-    <li>{@link android.animation.AnimatorSet} - <code>&lt;set&gt;</code></li>
-  </ul>
-
-<p>See <a href="{@docRoot}guide/topics/resources/animation-resource.html#Property">Animation Resources</a>
-
+<dt><strong><a href="{@docRoot}guide/topics/graphics/drawable-animation.html">Drawable
+Animation</a></strong></dt>
+<dd>Drawable animation involves displaying {@link android.graphics.drawable.Drawable} resources one
+after another, like a roll of film. This method of animation is useful if you want to animate
+things that are easier to represent with Drawable resources, such as a progression of bitmaps.</dd>
diff --git a/docs/html/guide/topics/graphics/drawable-animation.jd b/docs/html/guide/topics/graphics/drawable-animation.jd
new file mode 100644
index 0000000..65bf02f
--- /dev/null
+++ b/docs/html/guide/topics/graphics/drawable-animation.jd
@@ -0,0 +1,66 @@
+page.title=Drawable Animation
+parent.title=Animation
+parent.link=animation.html
+@jd:body
+
+  <p>Drawable animation lets you load a series of Drawable resources one after
+  another to create an animation. This is a traditional animation in the sense that it is created with a sequence of different
+  images, played in order, like a roll of film. The {@link
+  android.graphics.drawable.AnimationDrawable} class is the basis for Drawable animations.</p>
+
+  <p>While you can define the frames of an animation in your code, using the {@link
+  android.graphics.drawable.AnimationDrawable} class API, it's more simply accomplished with a
+  single XML file that lists the frames that compose the animation. The XML file for this kind
+  of animation belongs in the <code>res/drawable/</code> directory of
+  your Android project. In this case, the instructions are the order and duration for each frame of
+  the animation.</p>
+
+  <p>The XML file consists of an <code>&lt;animation-list&gt;</code> element as the root node and a
+  series of child <code>&lt;item&gt;</code> nodes that each define a frame: a drawable resource for
+  the frame and the frame duration. Here's an example XML file for a Drawable animation:</p>
+  <pre>
+&lt;animation-list xmlns:android="http://schemas.android.com/apk/res/android"
+    android:oneshot="true"&gt;
+    &lt;item android:drawable="@drawable/rocket_thrust1" android:duration="200" /&gt;
+    &lt;item android:drawable="@drawable/rocket_thrust2" android:duration="200" /&gt;
+    &lt;item android:drawable="@drawable/rocket_thrust3" android:duration="200" /&gt;
+&lt;/animation-list&gt;
+</pre>
+
+  <p>This animation runs for just three frames. By setting the <code>android:oneshot</code>
+  attribute of the list to <var>true</var>, it will cycle just once then stop and hold on the last
+  frame. If it is set <var>false</var> then the animation will loop. With this XML saved as
+  <code>rocket_thrust.xml</code> in the <code>res/drawable/</code> directory of the project, it can
+  be added as the background image to a View and then called to play. Here's an example Activity,
+  in which the animation is added to an {@link android.widget.ImageView} and then animated when the
+  screen is touched:</p>
+  <pre>
+AnimationDrawable rocketAnimation;
+
+public void onCreate(Bundle savedInstanceState) {
+  super.onCreate(savedInstanceState);
+  setContentView(R.layout.main);
+
+  ImageView rocketImage = (ImageView) findViewById(R.id.rocket_image);
+  rocketImage.setBackgroundResource(R.drawable.rocket_thrust);
+  rocketAnimation = (AnimationDrawable) rocketImage.getBackground();
+}
+
+public boolean onTouchEvent(MotionEvent event) {
+  if (event.getAction() == MotionEvent.ACTION_DOWN) {
+    rocketAnimation.start();
+    return true;
+  }
+  return super.onTouchEvent(event);
+}
+</pre>
+
+  <p>It's important to note that the <code>start()</code> method called on the AnimationDrawable
+  cannot be called during the <code>onCreate()</code> method of your Activity, because the
+  AnimationDrawable is not yet fully attached to the window. If you want to play the animation
+  immediately, without requiring interaction, then you might want to call it from the <code>{@link
+  android.app.Activity#onWindowFocusChanged(boolean) onWindowFocusChanged()}</code> method in your
+  Activity, which will get called when Android brings your window into focus.</p>
+
+  <p>For more information on the XML syntax, available tags and attributes, see <a href=
+  "{@docRoot}guide/topics/resources/animation-resource.html">Animation Resources</a>.</p>
diff --git a/docs/html/guide/topics/graphics/hardware-accel.jd b/docs/html/guide/topics/graphics/hardware-accel.jd
new file mode 100644
index 0000000..c8703a5
--- /dev/null
+++ b/docs/html/guide/topics/graphics/hardware-accel.jd
@@ -0,0 +1,522 @@
+page.title=Hardware Acceleration
+parent.title=Graphics
+parent.link=index.html
+@jd:body
+
+
+  <div id="qv-wrapper">
+    <div id="qv">
+      <h2>In this document</h2>
+
+      <ol>
+        <li><a href="#controlling">Controlling Hardware Acceleration</a></li>
+        <li><a href="#determining">Determining if a View is Hardware Accelerated</a></li>
+        <li><a href="#model">Android Drawing Models</a>
+
+          <ol>
+            <li><a href="#software-model">Software-based drawing model</a></li>
+            <li><a href="#hardware-model">Hardware accelerated drawing model</a></li>
+          </ol>
+        </li>
+
+        <li>
+          <a href="#unsupported">Unsupported Drawing Operations</a>
+        </li>
+
+
+
+        <li>
+          <a href="#layers">View Layers</a>
+
+          <ol>
+            <li><a href="#layers-anims">View Layers and Animations</a></li>
+          </ol>
+        </li>
+
+        <li><a href="#tips">Tips and Tricks</a></li>
+      </ol>
+
+      <h2>See also</h2>
+
+      <ol>
+        <li><a href="{@docRoot}guide/topics/graphics/opengl.html">OpenGL with the Framework
+        APIs</a></li>
+
+        <li><a href="{@docRoot}guide/topics/renderscript/index.html">RenderScript</a></li>
+      </ol>
+    </div>
+  </div>
+
+  <p>Beginning in Android 3.0 (API level 11), the Android 2D rendering pipeline is designed to
+  better support hardware acceleration. Hardware acceleration carries out all drawing operations
+  that are performed on a {@link android.view.View}'s canvas using the GPU.</p>
+
+  <p>The easiest way to enable hardware acceleration is to turn it on
+  globally for your entire application. If your application uses only standard views and {@link
+  android.graphics.drawable.Drawable}s, turning it on globally should not cause any adverse
+  effects. However, because hardware acceleration is not supported for all of the 2D drawing
+  operations, turning it on might affect some of your applications that use custom views or drawing
+  calls. Problems usually manifest themselves as invisible elements, exceptions, or wrongly
+  rendered pixels. To remedy this, Android gives you the option to enable or disable hardware
+  acceleration at the following levels:</p>
+
+  <ul>
+    <li>Application</li>
+
+    <li>Activity</li>
+
+    <li>Window</li>
+
+    <li>View</li>
+  </ul>
+
+  <p>If your application performs custom drawing, test your application on actual hardware
+devices with hardware acceleration turned on to find any problems. The <a
+href="#drawing-support">Unsupported drawing operations</a> section describes known issues with
+drawing operations that cannot be hardware accelerated and how to work around them.</p>
+
+
+ <h2 id="controlling">Controlling Hardware Acceleration</h2>
+  <p>You can control hardware acceleration at the following levels:</p>
+  <ul>
+    <li>Application</li>
+
+    <li>Activity</li>
+
+    <li>Window</li>
+
+    <li>View</li>
+  </ul>
+
+  <h4>Application level</h4>
+  <p>In your Android manifest file, add the following attribute to the
+  <a href="{@docRoot}guide/topics/manifest/application-element.html">
+    <code>&lt;application&gt;</code></a> tag to enable hardware acceleration for your entire
+  application:</p>
+
+<pre>
+&lt;application android:hardwareAccelerated="true" ...&gt;
+</pre>
+
+  <h4>Activity level</h4>
+  <p>If your application does not behave properly with hardware acceleration turned on globally,
+  you can control it for individual activities as well. To enable or disable hardware acceleration
+  at the  activity level, you can use the <code>android:hardwareAccelerated</code>
+  attribute for the <a href="{@docRoot}guide/topics/manifest/activity-element.html">
+    <code>&lt;activity&gt;</code></a> element. The following example enables hardware acceleration
+for the  entire application but disables it for one activity:</p>
+
+<pre>
+&lt;application android:hardwareAccelerated="true"&gt;
+    &lt;activity ... /&gt;
+    &lt;activity android:hardwareAccelerated="false" /&gt;
+&lt;/application&gt;
+</pre>
+
+  <h4>Window level</h4>
+  <p>If you need even more fine-grained control, you can enable hardware acceleration for a given
+  window with the following code:</p>
+
+<pre>
+getWindow().setFlags(
+    WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED,
+    WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED);
+
+</pre>
+
+<p class="note"><strong>Note</strong>:  You currently cannot disable hardware acceleration at
+the window level.</p>
+
+  <h4>View level</h4>
+
+  <p>You can disable hardware acceleration for an individual view at runtime with the
+following code:</p>
+
+<pre>
+myView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
+</pre>
+
+<p class="note"><strong>Note</strong>: You currently cannot enable hardware acceleration at
+the view level. View layers have other functions besides disabling hardware acceleration. See <a
+href="#layers">View layers</a> for more information about their uses.</p>
+
+  <h2 id="determining">Determining if a View is Hardware Accelerated</h2>
+
+  <p>It is sometimes useful for an application to know whether it is currently hardware
+  accelerated, especially for things such as custom views. This is particularly useful if your
+  application does a lot of custom drawing and not all operations are properly supported by the new
+  rendering pipeline.</p>
+
+  <p>There are two different ways to check whether the application is hardware accelerated:</p>
+
+  <ul>
+    <li>{@link android.view.View#isHardwareAccelerated View.isHardwareAccelerated()} returns
+    <code>true</code> if the {@link android.view.View} is attached to a hardware accelerated
+    window.</li>
+
+    <li>{@link android.graphics.Canvas#isHardwareAccelerated Canvas.isHardwareAccelerated()}
+    returns <code>true</code> if the {@link android.graphics.Canvas} is hardware accelerated</li>
+  </ul>
+
+  <p>If you must do this check in your drawing code, use {@link
+  android.graphics.Canvas#isHardwareAccelerated Canvas.isHardwareAccelerated()} instead of {@link
+  android.view.View#isHardwareAccelerated View.isHardwareAccelerated()} when possible. When a view
+  is attached to a hardware accelerated window, it can still be drawn using a non-hardware
+  accelerated Canvas. This happens, for instance, when drawing a view into a bitmap for caching
+  purposes.</p>
+
+
+  <h2 id="model">Android Drawing Models</h2>
+
+  <p>When hardware acceleration is enabled, the Android framework utilizes a new drawing model that
+  utilizes <em>display lists</em> to render your application to the screen. To fully understand
+  display lists and how they might affect your application, it is useful to understand how Android
+  draws views without hardware acceleration as well. The following sections describe the
+  software-based  and hardware-accelerated drawing models.</p>
+
+<h3>Software-based drawing model</h3>
+<p>In the software drawing model, views are drawn with the following two steps:</p>
+  <ol>
+    <li>Invalidate the hierarchy</li>
+
+    <li>Draw the hierarchy</li>
+  </ol>
+
+  <p>Whenever an application needs to update a part of its UI, it invokes {@link
+  android.view.View#invalidate invalidate()} (or one of its variants) on any view that has changed
+  content. The invalidation messages are propagated all the way up the view hierarchy to compute
+  the regions of the screen that need to be redrawn (the dirty region). The Android system then
+  draws any view in the hierarchy that intersects with the dirty region. Unfortunately, there are
+  two drawbacks to this drawing model:</p>
+  <ul>
+    <li>First, this model requires execution of a lot of code on every draw pass. For example, if
+your application calls {@link android.view.View#invalidate invalidate()} on a button and that
+button sits on top of another view, the Android system redraws the view even though it hasn't
+changed.</li>
+    <li>The second issue is that the drawing model can hide bugs in your application. Since the
+  Android system redraws views when they intersect the dirty region, a view whose content you
+  changed might be redrawn even though {@link android.view.View#invalidate invalidate()} was not
+  called on it. When this happens, you are relying on another view being invalidated to obtain the
+  proper behavior. This behavior can change every time you modify your application. Because of
+  this, you should always call {@link android.view.View#invalidate invalidate()} on your custom
+  views whenever you modify data or state that affects the view’s drawing code.</li>
+</ul>
+
+  <p class="note"><strong>Note</strong>: Android views automatically call {@link
+  android.view.View#invalidate invalidate()} when their properties change, such as the background
+  color or the text in a {@link android.widget.TextView}.</p>
+
+  <h3>Hardware accelerated drawing model</h3>
+  <p>The Android system still uses {@link android.view.View#invalidate invalidate()} and {@link
+  android.view.View#draw draw()} to request screen updates and to render views, but handles the
+  actual drawing differently. Instead of executing the drawing commands immediately, the Android
+  system records them inside display lists, which contain the output of the view hierarchy’s
+  drawing code. Another optimization is that the Android system only needs to record and update
+  display lists for views marked dirty by an {@link android.view.View#invalidate invalidate()}
+  call. Views that have not been invalidated can be redrawn simply by re-issuing the previously
+  recorded display list. The new drawing model contains three stages:</p>
+
+  <ol>
+    <li>Invalidate the hierarchy</li>
+
+    <li>Record and update display lists</li>
+
+    <li>Draw the display lists</li>
+  </ol>
+
+  <p>With this model, you cannot rely on a view intersecting the dirty region to have its {@link
+  android.view.View#draw draw()} method executed. To ensure that the Android system records a
+  view’s display list, you must call {@link android.view.View#invalidate invalidate()}. Forgetting
+  to do so causes a view to look the same even after changing it, which is an easier bug to find if
+  it happens.</p>
+
+  <p>Using display lists also benefits animation performance because setting specific properties,
+  such as alpha or rotation, does not require invalidating the targeted view (it is done
+  automatically). This optimization also applies to views with display lists (any view when your
+  application is hardware accelerated.) For example, assume there is a {@link
+  android.widget.LinearLayout} that contains a {@link android.widget.ListView} above a {@link
+  android.widget.Button}. The display list for the {@link android.widget.LinearLayout} looks like
+  this:</p>
+
+  <ul>
+    <li>DrawDisplayList(ListView)</li>
+
+    <li>DrawDisplayList(Button)</li>
+  </ul>
+
+  <p>Assume now that you want to change the {@link android.widget.ListView}'s opacity. After
+  invoking <code>setAlpha(0.5f)</code> on the {@link android.widget.ListView}, the display list now
+  contains this:</p>
+
+  <ul>
+    <li>SaveLayerAlpha(0.5)</li>
+
+    <li>DrawDisplayList(ListView)</li>
+
+    <li>Restore</li>
+
+    <li>DrawDisplayList(Button)</li>
+  </ul>
+
+  <p>The complex drawing code of {@link android.widget.ListView} was not executed. Instead, the
+  system only updated the display list of the much simpler {@link android.widget.LinearLayout}. In
+  an application without hardware acceleration enabled, the drawing code of both the list and its
+  parent are executed again.</p>
+
+  <h2 id="unsupported">Unsupported Drawing Operations</h2>
+
+  <p>When hardware accelerated, the 2D rendering pipeline supports the most commonly used {@link
+  android.graphics.Canvas} drawing operations as well as many less-used operations. All of the
+  drawing operations that are used to render applications that ship with Android, default widgets
+  and layouts, and common advanced visual effects such as reflections and tiled textures are
+  supported. The following list describes known operations that are <strong>not supported</strong>
+  with hardware acceleration:</p>
+
+  <ul>
+    <li>
+      <strong>Canvas</strong>
+
+      <ul>
+        <li>{@link android.graphics.Canvas#clipPath clipPath()}</li>
+
+        <li>{@link android.graphics.Canvas#clipRegion clipRegion()}</li>
+
+        <li>{@link android.graphics.Canvas#drawPicture drawPicture()}</li>
+
+        <li>{@link android.graphics.Canvas#drawPosText drawPosText()}</li>
+
+        <li>{@link android.graphics.Canvas#drawTextOnPath drawTextOnPath()}</li>
+
+        <li>{@link android.graphics.Canvas#drawVertices drawVertices()}</li>
+      </ul>
+    </li>
+
+    <li>
+      <strong>Paint</strong>
+
+      <ul>
+        <li>{@link android.graphics.Paint#setLinearText setLinearText()}</li>
+
+        <li>{@link android.graphics.Paint#setMaskFilter setMaskFilter()}</li>
+
+        <li>{@link android.graphics.Paint#setRasterizer setRasterizer()}</li>
+      </ul>
+    </li>
+  </ul>
+
+  <p>In addition, some operations behave differently with hardware acceleration enabled:</p>
+
+  <ul>
+    <li>
+      <strong>Canvas</strong>
+
+      <ul>
+        <li>{@link android.graphics.Canvas#clipRect clipRect()}: <code>XOR</code>,
+        <code>Difference</code> and <code>ReverseDifference</code> clip modes are ignored. 3D
+        transforms do not apply to the clip rectangle</li>
+
+        <li>{@link android.graphics.Canvas#drawBitmapMesh drawBitmapMesh()}: colors array is
+        ignored</li>
+
+        <li>{@link android.graphics.Canvas#drawLines drawLines()}: anti-aliasing is not
+        supported</li>
+
+        <li>{@link android.graphics.Canvas#setDrawFilter setDrawFilter()}: can be set, but is
+        ignored</li>
+      </ul>
+    </li>
+
+    <li>
+      <strong>Paint</strong>
+
+      <ul>
+        <li>{@link android.graphics.Paint#setDither setDither()}: ignored</li>
+
+        <li>{@link android.graphics.Paint#setFilterBitmap setFilterBitmap()}: filtering is always
+        on</li>
+
+        <li>{@link android.graphics.Paint#setShadowLayer setShadowLayer()}: works with text
+        only</li>
+      </ul>
+    </li>
+
+    <li>
+      <strong>ComposeShader</strong>
+
+      <ul>
+        <li>{@link android.graphics.ComposeShader} can only contain shaders of different types (a
+        {@link android.graphics.BitmapShader} and a {@link android.graphics.LinearGradient} for
+        instance, but not two instances of {@link android.graphics.BitmapShader} )</li>
+
+        <li>{@link android.graphics.ComposeShader} cannot contain a {@link
+        android.graphics.ComposeShader}</li>
+      </ul>
+    </li>
+  </ul>
+
+  <p>If your application is affected by any of these missing features or limitations, you can turn
+  off hardware acceleration for just the affected portion of your application by calling
+  {@link android.view.View#setLayerType setLayerType(View.LAYER_TYPE_SOFTWARE, null)}. This way,
+you can still take advantage of hardware acceleratin everywhere else. See <a
+href="#controlling">Controlling Hardware Acceleration</a> for more information on how to enable and
+disable hardware acceleration at different levels in your application.
+
+
+
+  <h2 id="layers">View Layers</h2>
+
+  <p>In all versions of Android, views have had the ability to render into off-screen buffers,
+either by using a view's drawing cache, or by using {@link android.graphics.Canvas#saveLayer
+  Canvas.saveLayer()}. Off-screen buffers, or layers, have several uses. You can use them to get
+  better performance when animating complex views or to apply composition effects. For instance,
+  you can implement fade effects using <code>Canvas.saveLayer()</code> to temporarily render a view
+  into a layer and then composite it back on screen with an opacity factor.</p>
+
+  <p>Beginning in Android 3.0 (API level 11), you have more control on how and when to use layers
+  with the {@link android.view.View#setLayerType View.setLayerType()} method. This API takes two
+  parameters: the type of layer you want to use and an optional {@link android.graphics.Paint}
+  object that describes how the layer should be composited. You can use the {@link
+  android.graphics.Paint} parameter to apply color filters, special blending modes, or opacity to a
+  layer. A view can use one of three layer types:</p>
+
+  <ul>
+    <li>{@link android.view.View#LAYER_TYPE_NONE}: The view is rendered normally and is not backed
+    by an off-screen buffer. This is the default behavior.</li>
+
+    <li>{@link android.view.View#LAYER_TYPE_HARDWARE}: The view is rendered in hardware into a
+    hardware texture if the application is hardware accelerated. If the application is not hardware
+    accelerated, this layer type behaves the same as {@link
+    android.view.View#LAYER_TYPE_SOFTWARE}.</li>
+
+    <li>{@link android.view.View#LAYER_TYPE_SOFTWARE}: The view is rendered in software into a
+    bitmap.</li>
+  </ul>
+
+  <p>The type of layer you use depends on your goal:</p>
+
+  <ul>
+    <li><strong>Performance</strong>: Use a hardware layer type to render a view into a hardware
+    texture. Once a view is rendered into a layer, its drawing code does not have to be executed
+    until the view calls {@link android.view.View#invalidate invalidate()}. Some animations, such as
+    alpha animations, can then be applied directly onto the layer, which is very efficient
+    for the GPU to do.</li>
+
+    <li><strong>Visual effects</strong>: Use a hardware or software layer type and a {@link
+    android.graphics.Paint} to apply special visual treatments to a view. For instance, you can
+    draw a view in black and white using a {@link
+    android.graphics.ColorMatrixColorFilter}.</li>
+
+    <li><strong>Compatibility</strong>: Use a software layer type to force a view to be rendered in
+    software. If a view that is hardware accelerated (for instance, if your whole
+    application is hardware acclerated), is having rendering problems, this is an easy way to work
+around limitations of the hardware rendering
+    pipeline.</li>
+  </ul>
+
+  <h3 id="layers-anims">View layers and animations</h3>
+
+  <p>Hardware layers can deliver faster and smoother animations when your application
+is hardware accelerated. Running an animation at 60 frames per second is not always possible when
+animating complex views that issue a lot of drawing operations. This can be alleviated by
+using hardware layers to render the view to a hardware texture. The hardware texture can
+then be used to animate the view, eliminating the need for the view to constantly redraw itself
+when it is being animated. The view is not redrawn unless you change the view's
+properties, which calls {@link android.view.View#invalidate invalidate()}, or if you call {@link
+android.view.View#invalidate invalidate()} manually. If you are running an animation in
+your application and do not obtain the smooth results you want, consider enabling hardware layers on
+your animated views.</p>
+
+  <p>When a view is backed by a hardware layer, some of its properties are handled by the way the
+  layer is composited on screen. Setting these properties will be efficient because they do not
+  require the view to be invalidated and redrawn. The following list of properties affect the way
+  the layer is composited. Calling the setter for any of these properties results in optimal
+  invalidation and no redrawing of the targeted view:</p>
+
+  <ul>
+    <li><code>alpha</code>: Changes the layer's opacity</li>
+
+    <li><code>x</code>, <code>y</code>, <code>translationX</code>, <code>translationY</code>:
+Changes the layer's position</li>
+
+    <li><code>scaleX</code>, <code>scaleY</code>: Changes the layer's size</li>
+
+    <li><code>rotation</code>, <code>rotationX</code>, <code>rotationY</code>: Changes the
+    layer's orientation in 3D space</li>
+
+    <li><code>pivotX</code>, <code>pivotY</code>: Changes the layer's transformations origin</li>
+  </ul>
+
+  <p>These properties are the names used when animating a view with an {@link
+  android.animation.ObjectAnimator}. If you want to access these properties, call the appropriate
+  setter or getter. For instance, to modify the alpha property, call {@link
+  android.view.View#setAlpha setAlpha()}. The following code snippet shows the most efficient way
+  to rotate a viewiew in 3D around the Y-axis:</p>
+  <pre>
+view.setLayerType(View.LAYER_TYPE_HARDWARE, null);
+ObjectAnimator.ofFloat(view, "rotationY", 180).start();
+</pre>
+
+  <p>Because hardware layers consume video memory, it is highly recommended that you enable them
+only for the duration of the animation and then disable them after the animation is done. You
+can accomplish this using animation listeners:</p>
+  <pre>
+View.setLayerType(View.LAYER_TYPE_HARDWARE, null);
+ObjectAnimator animator = ObjectAnimator.ofFloat(view, "rotationY", 180);
+animator.addListener(new AnimatorListenerAdapter() {
+    &#064;Override
+    public void onAnimationEnd(Animator animation) {
+        view.setLayerType(View.LAYER_TYPE_NONE, null);
+    }
+});
+animator.start();
+</pre>
+
+  <p>For more information on property animation, see <a href=
+  "{@docRoot}guide/topics/graphics/prop-animation.html">Property Animation</a>.</p>
+
+ <h2 id="tips">Tips and Tricks</h2>
+
+  <p>Switching to hardware accelerated 2D graphics can instantly increase performance, but you
+  should still design your application to use the GPU effectively by following these
+  recommendations:</p>
+
+  <dl>
+    <dt><strong>Reduce the number of views in your application</strong></dt>
+
+    <dd>The more views the system has to draw, the slower it will be. This applies to the software
+    rendering pipeline as well. Reducing views is one of the easiest ways to optimize your UI.</dd>
+
+    <dt><strong>Avoid overdraw</strong></dt>
+
+    <dd>Do not draw too many layers on top of each other. Remove any views that are completely
+    obscured by other opaque views on top of it. If you need to draw several layers blended on top
+    of each other, consider merging them into a single layer. A good rule of thumb with current
+    hardware is to not draw more than 2.5 times the number of pixels on screen per frame
+    (transparent pixels in a bitmap count!).</dd>
+
+    <dt><strong>Don't create render objects in draw methods</strong></dt>
+
+    <dd>A common mistake is to create a new {@link android.graphics.Paint} or a new {@link
+android.graphics.Path} every time a rendering method is invoked. This forces the garbage
+collector to run more often and also bypasses caches and optimizations in the hardware
+pipeline.</dd>
+
+    <dt><strong>Don't modify shapes too often</strong></dt>
+
+    <dd>Complex shapes, paths, and circles for instance, are rendered using texture masks. Every
+    time you create or modify a path, the hardware pipeline creates a new mask, which can be
+    expensive.</dd>
+
+    <dt><strong>Don't modify bitmaps too often</strong></dt>
+
+    <dd>Every time you change the content of a bitmap, it is uploaded again as a GPU texture the
+    next time you draw it.</dd>
+
+    <dt><strong>Use alpha with care</strong></dt>
+
+    <dd>When you make a view translucent using {@link android.view.View#setAlpha setAlpha()},
+    {@link android.view.animation.AlphaAnimation}, or {@link android.animation.ObjectAnimator}, it
+    is rendered in an off-screen buffer which doubles the required fill-rate. When applying alpha
+    on very large views, consider setting the view's layer type to
+    <code>LAYER_TYPE_HARDWARE</code>.</dd>
+  </dl>
diff --git a/docs/html/guide/topics/graphics/index.jd b/docs/html/guide/topics/graphics/index.jd
index f0a923a..ffa9a39 100644
--- a/docs/html/guide/topics/graphics/index.jd
+++ b/docs/html/guide/topics/graphics/index.jd
@@ -3,208 +3,49 @@
 
 <div id="qv-wrapper">
   <div id="qv">
-  <h2>In this document</h2>
+  <h2>Topics</h2>
   <ol>
-    <li><a href="#options">Consider your Options</a></li>
-    <li><a href="#draw-to-view">Simple Graphics Inside a View</a></li>
-    <li><a href="#draw-with-canvas">Draw with a Canvas</a>
-    <ol>
-      <li><a href="#on-view">On a View</a></li>
-      <li><a href="#on-surfaceview">On a SurfaceView</a></li>
-    </ol>
-    </li>
-  </ol>
-  <h2>See also</h2>
-  <ol>
-    <li><a href="{@docRoot}guide/topics/graphics/opengl.html">3D with OpenGL</a></li>
-    <li><a href="{@docRoot}guide/topics/renderscript/index.html">RenderScript</a></li>
+    <li><a href="{@docRoot}guide/topics/graphics/canvas.html">Canvas and Drawables</a></li>
+    <li><a href="{@docRoot}guide/topics/graphics/hardware-accel.html">Hardware Acceleration</a></li>
+    <li><a href="{@docRoot}guide/topics/graphics/opengl.html">OpenGL</a></li>
   </ol>
   </div>
 </div>
 
-<p>Android graphics are powered by a custom 2D graphics library, and the framework provides
-support for high performance 3D graphics in the form of OpenGL ES and RenderScript. The most
-common 2D graphics APIs can be found in the {@link android.graphics.drawable drawable package}.
-OpenGL APIs are available from the Khronos {@link javax.microedition.khronos.opengles OpenGL ES} and
-the {@link android.opengl} packages. The RenderScript APIs are available in the 
-{@link android.renderscript} package.</p>
-
-<p>When starting a project, it's important to consider exactly what your graphical demands will be. 
+<p>When writing an application, it's important to consider exactly what your graphical demands will be.
 Varying graphical tasks are best accomplished with varying techniques. For example, graphics and animations
 for a rather static application should be implemented much differently than graphics and animations
-for an interactive game or 3D rendering.</p>
-
-<p>Here, we'll discuss a few of the options you have for drawing graphics on Android, 
-and which tasks they're best suited for.</p>
-
-<p>If you're specifically looking for information on drawing 3D graphics, this page won't
-help a lot. However, the information below about how to <a href="#draw-with-canvas">Draw with a
-Canvas</a> (and the section on SurfaceView), will give you a quick idea of how you should draw to
-the View hierarchy. For more information on Android's 3D graphics APIs, see
-the <a href="opengl.html">3D with OpenGL</a> and  
-<a href="{@docRoot}guide/topics/renderscript/index.html">RenderScript</a> documents.</p>
-
-
-<h2 id="options">Consider your Options</h2>
-
-<p>When drawing 2D graphics, you'll typically do so in one of two ways:</p>
-<ol type="a">
-  <li>Draw your graphics or animations into a View object from your layout. In this manner, 
-  the drawing (and any animation) of your graphics is handled by the system's 
-  normal View hierarchy drawing process &mdash; you simply define the graphics to go inside the View.</li>
-  <li>Draw your graphics directly to a Canvas. This way, you personally call the appropriate class's 
-  <code>draw()</code> method (passing it your Canvas), or one of the Canvas <code>draw...()</code> methods (like 
-  <code>{@link android.graphics.Canvas#drawPicture(Picture,Rect) drawPicture()}</code>). In doing so, you are also in
-  control of any animation.</li>
-</ol>
-
-<p>Option "a," drawing to a View, is your best choice when you want to draw simple graphics that do not
-need to change dynamically and are not part of a performance-intensive game. For example, you should
-draw your graphics into a View when you want to display a static graphic or predefined animation, within 
-an otherwise static application. Read <a href="#draw-to-view">Simple Graphics Inside a View</a>.</li>
-
-<p>Option "b," drawing to a Canvas, is better when your application needs to regularly re-draw itself.
-Basically, any video game should be drawing to the Canvas on its own. However, there's more than 
-one way to do this: </p>
-<ul>
-  <li>In the same thread as your UI Activity, wherein you create a custom View component in
-  your layout, call <code>{@link android.view.View#invalidate()}</code> and then handle the 
-  <code>{@link android.view.View#onDraw(Canvas) onDraw()}</code> callback..</li>
-  <li>Or, in a separate thread, wherein you manage a {@link android.view.SurfaceView} and 
-  perform draws to the Canvas as fast as your thread is capable 
-  (you do not need to request <code>invalidate()</code>).</li>
-</ul>
-<p>...Begin by reading <a href="#draw-with-canvas">Draw with a Canvas</a>.</p>
-
-<h2 id="draw-to-view">Simple Graphics Inside a View</h2>
-
-<p>If you'll be drawing some simple graphics (images, shapes, colors, pre-defined animations, etc.),
-then you should probably just draw to the background of a View or
-to the content of an {@link android.widget.ImageView} in your layout.
-In this case, you can skip the rest of this document and learn how to
-draw graphics and animations in the <a href="2d-graphics.html">2D Graphics</a> document.
+for an interactive game. Here, we'll discuss a few of the options you have for drawing graphics
+on Android and which tasks they're best suited for.
 </p>
 
+<dl>
+<dt><strong><a href="{@docRoot}guide/topics/graphics/2d-graphics.html">Canvas and
+Drawables</a></strong></dt>
+<dd>Android provides a set of {@link android.view.View} widgets that provide general functionality
+for a wide array of user interfaces. You can also extend these widgets to modify the way they
+look or behave. In addition, you can do your own custom 2D rendering using the various drawing
+methods contained in the {@link android.graphics.Canvas} class or create {@link
+android.graphics.drawable.Drawable} objects for things such as textured buttons or frame-by-frame
+animations.</dd>
 
-<h2 id="draw-with-canvas">Draw with a Canvas</h2>
+<dt><strong><a href="{@docRoot}guide/topics/graphics/hardware-accel.html">Hardware
+Acceleration</a></strong></dt>
+<dd>Beginning in Android 3.0, you can hardware accelerate the majority of
+the drawing done by the Canvas APIs to further increase their performance.</dd>
 
-<p>When you're writing an application in which you would like to perform specialized drawing
-and/or control the animation of graphics,
-you should do so by drawing through a {@link android.graphics.Canvas}. A Canvas works for you as
-a pretense, or interface, to the actual surface upon which your graphics will be drawn &mdash; it
-holds all of your "draw" calls. Via the Canvas, your drawing is actually performed upon an 
-underlying {@link android.graphics.Bitmap}, which is placed into the window.</p>
-
-<p>In the event that you're drawing within the <code>{@link android.view.View#onDraw(Canvas) onDraw()}</code>
-callback method, the Canvas is provided for you and you need only place your drawing calls upon it.
-You can also acquire a Canvas from <code>{@link android.view.SurfaceHolder#lockCanvas() SurfaceHolder.lockCanvas()}</code>,
-when dealing with a SurfaceView object. (Both of these scenarios are discussed in the following sections.)
-However, if you need to create a new Canvas, then you must define the {@link android.graphics.Bitmap} 
-upon which drawing will actually be performed. The Bitmap is always required for a Canvas. You can set up
-a new Canvas like this:</p>
-<pre>
-Bitmap b = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);
-Canvas c = new Canvas(b);
-</pre>
-
-<p>Now your Canvas will draw onto the defined Bitmap. After drawing upon it with the Canvas, you can then carry your 
-Bitmap to another Canvas with one of the <code>{@link android.graphics.Canvas#drawBitmap(Bitmap,Matrix,Paint)
-Canvas.drawBitmap(Bitmap,...)}</code> methods. It's recommended that you ultimately draw your final
-graphics through a Canvas offered to you
-by <code>{@link android.view.View#onDraw(Canvas) View.onDraw()}</code> or 
-<code>{@link android.view.SurfaceHolder#lockCanvas() SurfaceHolder.lockCanvas()}</code> (see the following sections).</p>
-
-<p>The {@link android.graphics.Canvas} class has its own set of drawing methods that you can use, 
-like <code>drawBitmap(...)</code>, <code>drawRect(...)</code>, <code>drawText(...)</code>, and many more.
-Other classes that you might use also have <code>draw()</code> methods. For example, you'll probably
-have some {@link android.graphics.drawable.Drawable} objects that you want to put on the Canvas. Drawable
-has its own <code>{@link android.graphics.drawable.Drawable#draw(Canvas) draw()}</code> method 
-that takes your Canvas as an argument.</p>
-
-
-<h3 id="on-view">On a View</h3>
-
-<p>If your application does not require a significant amount of processing or
-frame-rate speed (perhaps for a chess game, a snake game, 
-or another slowly-animated application), then you should consider creating a custom View component
-and drawing with a Canvas in <code>{@link android.view.View#onDraw(Canvas) View.onDraw()}</code>. 
-The most convenient aspect of doing so is that the Android framework will
-provide you with a pre-defined Canvas to which you will place your drawing calls.</p>
-
-<p>To start, extend the {@link android.view.View} class (or descendant thereof) and define
-the <code>{@link android.view.View#onDraw(Canvas) onDraw()}</code> callback method. This method will be called by the Android 
-framework to request that your View draw itself. This is where you will perform all your calls
-to draw through the {@link android.graphics.Canvas}, which is passed to you through the <code>onDraw()</code> callback.</p>
-
-<p>The Android framework will only call <code>onDraw()</code> as necessary. Each time that 
-your application is prepared to be drawn, you must request your View be invalidated by calling
-<code>{@link android.view.View#invalidate()}</code>. This indicates that you'd like your View to be drawn and
-Android will then call your <code>onDraw()</code> method (though is not guaranteed that the callback will
-be instantaneous). </p>
-
-<p>Inside your View component's <code>onDraw()</code>, use the Canvas given to you for all your drawing,
-using various <code>Canvas.draw...()</code> methods, or other class <code>draw()</code> methods that
-take your Canvas as an argument. Once your <code>onDraw()</code> is complete, the Android framework will 
-use your Canvas to draw a Bitmap handled by the system.</p>
-
-<p class="note"><strong>Note: </strong> In order to request an invalidate from a thread other than your main
-Activity's thread, you must call <code>{@link android.view.View#postInvalidate()}</code>.</p>
-
-<p>Also read <a href="{@docRoot}guide/topics/ui/custom-components.html">Custom Components</a>
-for a guide to extending a View class, and <a href="2d-graphics.html">2D Graphics: Drawables</a> for
-information on using Drawable objects like images from your resources and other primitive shapes.</p>
-
-<p>For a sample application, see the Snake game, in the SDK samples folder:
-<code>&lt;your-sdk-directory>/samples/Snake/</code>.</p>
-
-<h3 id="on-surfaceview">On a SurfaceView</h3>
-
-<p>The {@link android.view.SurfaceView} is a special subclass of View that offers a dedicated
-drawing surface within the View hierarchy. The aim is to offer this drawing surface to
-an application's secondary thread, so that the application isn't required
-to wait until the system's View hierarchy is ready to draw. Instead, a secondary thread
-that has reference to a SurfaceView can draw to its own Canvas at its own pace.</p>
-
-<p>To begin, you need to create a new class that extends {@link android.view.SurfaceView}. The class should also 
-implement {@link android.view.SurfaceHolder.Callback}. This subclass is an interface that will notify you
-with information about the underlying {@link android.view.Surface}, such as when it is created, changed, or destroyed. 
-These events  are important so that you know when you can start drawing, whether you need 
-to make adjustments based on new surface properties, and when to stop drawing and potentially 
-kill some tasks. Inside your SurfaceView class is also a good place to define your secondary Thread class, which will
-perform all the drawing procedures to your Canvas.</p>
-
-<p>Instead of handling the Surface object directly, you should handle it via
-a {@link android.view.SurfaceHolder}. So, when your SurfaceView is initialized, get the SurfaceHolder by calling 
-<code>{@link android.view.SurfaceView#getHolder()}</code>. You should then notify the SurfaceHolder that you'd
-like to receive SurfaceHolder callbacks (from {@link android.view.SurfaceHolder.Callback}) by calling 
-{@link android.view.SurfaceHolder#addCallback(SurfaceHolder.Callback) addCallback()} 
-(pass it <var>this</var>). Then override each of the 
-{@link android.view.SurfaceHolder.Callback} methods inside your SurfaceView class.</p>
-
-<p>In order to draw to the Surface Canvas from within your second thread, you must pass the thread your SurfaceHandler
-and retrieve the Canvas with <code>{@link android.view.SurfaceHolder#lockCanvas() lockCanvas()}</code>. 
-You can now take the Canvas given to you by the SurfaceHolder and do your necessary drawing upon it. 
-Once you're done drawing with the Canvas, call 
-<code>{@link android.view.SurfaceHolder#unlockCanvasAndPost(Canvas) unlockCanvasAndPost()}</code>, passing it
-your Canvas object. The Surface will now draw the Canvas as you left it. Perform this sequence of locking and 
-unlocking the canvas each time you want to redraw.</p>
-
-<p class="note"><strong>Note:</strong> On each pass you retrieve the Canvas from the SurfaceHolder, 
-the previous state of the Canvas will be retained. In order to properly animate your graphics, you must re-paint the 
-entire surface. For example, you can clear the previous state of the Canvas by filling in a color
-with <code>{@link android.graphics.Canvas#drawColor(int) drawColor()}</code> or setting a background image
-with <code>{@link android.graphics.Canvas#drawBitmap(Bitmap,Rect,RectF,Paint) drawBitmap()}</code>. Otherwise,
-you will see traces of the drawings you previously performed.</p>
-
-
-<p>For a sample application, see the Lunar Lander game, in the SDK samples folder:
-<code>&lt;your-sdk-directory>/samples/LunarLander/</code>. Or,
-browse the source in the <a href="{@docRoot}guide/samples/index.html">Sample Code</a> section.</p>
-
-
-
-
-
-
+<dt><strong><a href="{@docRoot}guide/topics/graphics/opengl.html">OpenGL</a></strong></dt>
+<dd>Android supports OpenGL ES 1.0 and 2.0, with Android framework APIs as well as natively
+with the Native Development Kit (NDK). Using the framework APIs is desireable when you want to add a
+few graphical enhancements to your application that are not supported with the Canvas APIs, or if
+you desire platform independence and don't demand high performance. There is a performance hit in
+using the framework APIs compared to the NDK, so for many graphic intensive applications such as
+games, using the NDK is beneficial (It is important to note though that you can still get adequate
+performance using the framework APIs. For example, the Google Body app is developed entirely
+using the framework APIs). OpenGL with the NDK is also useful if you have a lot of native
+code that you want to port over to Android. For more information about using the NDK, read the
+docs in the <code>docs/</code> directory of the <a href="{@docRoot}sdk/ndk/index.html">NDK
+download.</a></dd>
+</dl>
 
 
diff --git a/docs/html/guide/topics/graphics/opengl.jd b/docs/html/guide/topics/graphics/opengl.jd
index b750858..231f4ef 100644
--- a/docs/html/guide/topics/graphics/opengl.jd
+++ b/docs/html/guide/topics/graphics/opengl.jd
@@ -1,4 +1,4 @@
-page.title=3D with OpenGL
+page.title=OpenGL
 parent.title=Graphics
 parent.link=index.html
 @jd:body
@@ -6,7 +6,7 @@
 <div id="qv-wrapper">
   <div id="qv">
     <h2>In this document</h2>
-    
+
     <ol>
       <li><a href="#basics">The Basics</a>
         <ol>
@@ -14,7 +14,7 @@
         </ol>
       <li><a href="#manifest">Declaring OpenGL Requirements</a></li>
       </li>
-      <li><a href="#coordinate-mapping">Mapping Coordinates for Drawn Objects</a>  
+      <li><a href="#coordinate-mapping">Mapping Coordinates for Drawn Objects</a>
         <ol>
           <li><a href="#proj-es1">Projection and camera in ES 1.0</a></li>
           <li><a href="#proj-es1">Projection and camera in ES 2.0</a></li>
@@ -78,8 +78,7 @@
 Kit (NDK). This topic focuses on the Android framework interfaces. For more information about the
 NDK, see the <a href="{@docRoot}sdk/ndk/index.html">Android NDK</a>.
 
-<p>
-  There are two foundational classes in the Android framework that let you create and manipulate
+<p>There are two foundational classes in the Android framework that let you create and manipulate
 graphics with the OpenGL ES API: {@link android.opengl.GLSurfaceView} and {@link
 android.opengl.GLSurfaceView.Renderer}. If your goal is to use OpenGL in your Android application,
 understanding how to implement these classes in an activity should be your first objective.
@@ -89,22 +88,22 @@
   <dt><strong>{@link android.opengl.GLSurfaceView}</strong></dt>
   <dd>This class is a {@link android.view.View} where you can draw and manipulate objects using
     OpenGL API calls and is similar in function to a {@link android.view.SurfaceView}. You can use
-    this class by creating an instance of {@link android.opengl.GLSurfaceView} and adding your 
+    this class by creating an instance of {@link android.opengl.GLSurfaceView} and adding your
     {@link android.opengl.GLSurfaceView.Renderer Renderer} to it. However, if you want to capture
     touch screen events, you should extend the {@link android.opengl.GLSurfaceView} class to
-    implement the touch listeners, as shown in OpenGL Tutorials for 
-    <a href="{@docRoot}resources/tutorials/opengl/opengl-es10.html#touch">ES 1.0</a>, 
+    implement the touch listeners, as shown in OpenGL Tutorials for
+    <a href="{@docRoot}resources/tutorials/opengl/opengl-es10.html#touch">ES 1.0</a>,
     <a href="{@docRoot}resources/tutorials/opengl/opengl-es20.html#touch">ES 2.0</a> and the <a
 href="{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/graphics/TouchRotateActivity
 .html">TouchRotateActivity</a> sample.</dd>
-  
+
   <dt><strong>{@link android.opengl.GLSurfaceView.Renderer}</strong></dt>
   <dd>This interface defines the methods required for drawing graphics in an OpenGL {@link
     android.opengl.GLSurfaceView}. You must provide an implementation of this interface as a
     separate class and attach it to your {@link android.opengl.GLSurfaceView} instance using
     {@link android.opengl.GLSurfaceView#setRenderer(android.opengl.GLSurfaceView.Renderer)
     GLSurfaceView.setRenderer()}.
-    
+
     <p>The {@link android.opengl.GLSurfaceView.Renderer} interface requires that you implement the
       following methods:</p>
     <ul>
@@ -129,7 +128,7 @@
     android.opengl.GLSurfaceView} geometry changes, including changes in size of the {@link
     android.opengl.GLSurfaceView} or orientation of the device screen. For example, the system calls
     this method when the device changes from portrait to landscape orientation. Use this method to
-    respond to changes in the {@link android.opengl.GLSurfaceView} container. 
+    respond to changes in the {@link android.opengl.GLSurfaceView} container.
       </li>
     </ul>
     </dd>
@@ -173,13 +172,13 @@
 </ul>
 
 <p>If you'd like to start building an app with OpenGL right away, have a look at the tutorials for
-<a href="{@docRoot}resources/tutorials/opengl/opengl-es10.html">OpenGL ES 1.0</a> or 
+<a href="{@docRoot}resources/tutorials/opengl/opengl-es10.html">OpenGL ES 1.0</a> or
 <a href="{@docRoot}resources/tutorials/opengl/opengl-es20.html">OpenGL ES 2.0</a>!
 </p>
 
 <h2 id="manifest">Declaring OpenGL Requirements</h2>
 <p>If your application uses OpenGL features that are not available on all devices, you must include
-these requirements in your <a 
+these requirements in your <a
 href="{@docRoot}guide/topics/manifest/manifest-intro.html">AndroidManifest.xml</a></code> file.
 Here are the most common OpenGL manifest declarations:</p>
 
@@ -200,14 +199,14 @@
 compression formats, you must declare the formats your application supports in your manifest file
 using <a href="{@docRoot}guide/topics/manifest/supports-gl-texture-element.html">{@code
 &lt;supports-gl-texture&gt;}</a>. For more information about available texture compression
-formats, see <a href="#textures">Texture compression support</a>. 
+formats, see <a href="#textures">Texture compression support</a>.
 
 <p>Declaring texture compression requirements in your manifest hides your application from users
 with devices that do not support at least one of your declared compression types. For more
 information on how Android Market filtering works for texture compressions, see the <a
 href="{@docRoot}guide/topics/manifest/supports-gl-texture-element.html#market-texture-filtering">
 Android Market and texture compression filtering</a> section of the {@code
-&lt;supports-gl-texture&gt;} documentation.</p> 
+&lt;supports-gl-texture&gt;} documentation.</p>
   </li>
 </ul>
 
@@ -237,7 +236,7 @@
 <h3 id="proj-es1">Projection and camera view in OpenGL ES 1.0</h3>
 <p>In the ES 1.0 API, you apply projection and camera view by creating each matrix and then
 adding them to the OpenGL environment.</p>
-  
+
 <ol>
 <li><strong>Projection matrix</strong> - Create a projection matrix using the geometry of the
 device screen in order to recalculate object coordinates so they are drawn with correct proportions.
@@ -250,19 +249,19 @@
 <pre>
   public void onSurfaceChanged(GL10 gl, int width, int height) {
       gl.glViewport(0, 0, width, height);
-      
+
       // make adjustments for screen ratio
       float ratio = (float) width / height;
       gl.glMatrixMode(GL10.GL_PROJECTION);        // set matrix to projection mode
       gl.glLoadIdentity();                        // reset the matrix to its default state
       gl.glFrustumf(-ratio, ratio, -1, 1, 3, 7);  // apply the projection matrix
-  }  
+  }
 </pre>
 </li>
 
 <li><strong>Camera transformation matrix</strong> - Once you have adjusted the coordinate system
 using a projection matrix, you must also apply a camera view. The following example code shows how
-to modify the {@link        
+to modify the {@link
 android.opengl.GLSurfaceView.Renderer#onDrawFrame(javax.microedition.khronos.opengles.GL10)
 onDrawFrame()} method of a {@link android.opengl.GLSurfaceView.Renderer}
 implementation to apply a model view and use the
@@ -276,12 +275,12 @@
         // Set GL_MODELVIEW transformation mode
         gl.glMatrixMode(GL10.GL_MODELVIEW);
         gl.glLoadIdentity();                      // reset the matrix to its default state
-        
+
         // When using GL_MODELVIEW, you must set the camera view
-        GLU.gluLookAt(gl, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);        
+        GLU.gluLookAt(gl, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
         ...
     }
-</pre>   
+</pre>
 </li>
 </ol>
 
@@ -294,26 +293,26 @@
 <p>In the ES 2.0 API, you apply projection and camera view by first adding a matrix member to
 the vertex shaders of your graphics objects. With this matrix member added, you can then
 generate and apply projection and camera viewing matrices to your objects.</p>
-  
+
 <ol>
 <li><strong>Add matrix to vertex shaders</strong> - Create a variable for the view projection matrix
 and include it as a multiplier of the shader's position. In the following example vertex shader
-code, the included {@code uMVPMatrix} member allows you to apply projection and camera viewing 
+code, the included {@code uMVPMatrix} member allows you to apply projection and camera viewing
 matrices to the coordinates of objects that use this shader.
 
 <pre>
-    private final String vertexShaderCode = 
-        
+    private final String vertexShaderCode =
+
         // This matrix member variable provides a hook to manipulate
         // the coordinates of objects that use this vertex shader
         "uniform mat4 uMVPMatrix;   \n" +
-        
+
         "attribute vec4 vPosition;  \n" +
         "void main(){               \n" +
-        
+
         // the matrix must be included as part of gl_Position
         " gl_Position = uMVPMatrix * vPosition; \n" +
-        
+
         "}  \n";
 </pre>
   <p class="note"><strong>Note:</strong> The example above defines a single transformation matrix
@@ -340,7 +339,7 @@
 </li>
 <li><strong>Create projection and camera viewing matrices</strong> - Generate the projection and
 viewing matrices to be applied the graphic objects. The following example code shows how to modify
-the {@link    
+the {@link
 android.opengl.GLSurfaceView.Renderer#onSurfaceCreated(javax.microedition.khronos.opengles.GL10,
 javax.microedition.khronos.egl.EGLConfig) onSurfaceCreated()} and {@link
 android.opengl.GLSurfaceView.Renderer#onSurfaceChanged(javax.microedition.khronos.opengles.GL10,
@@ -353,16 +352,16 @@
         ...
         // Create a camera view matrix
         Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
-    }    
-    
+    }
+
     public void onSurfaceChanged(GL10 unused, int width, int height) {
         GLES20.glViewport(0, 0, width, height);
-        
+
         float ratio = (float) width / height;
-        
+
         // create a projection matrix from device screen geometry
         Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
-    }  
+    }
 </pre>
 </li>
 
@@ -373,16 +372,16 @@
 onDrawFrame()} method of a {@link android.opengl.GLSurfaceView.Renderer} implementation to combine
 the projection matrix and camera view created in the code above and then apply it to the graphic
 objects to be rendered by OpenGL.
-  
+
 <pre>
     public void onDrawFrame(GL10 unused) {
         ...
         // Combine the projection and camera view matrices
         Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
-        
+
         // Apply the combined projection and camera view transformations
         GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
-        
+
         // Draw objects
         ...
     }
@@ -498,7 +497,7 @@
 supported.</p>
   </li>
   <li>Review the output of this method to determine what OpenGL extensions are supported on the
-device.</li> 
+device.</li>
 </ol>
 
 
@@ -514,7 +513,7 @@
 than the ES 1.0/1.1 APIs. However, the performance difference can vary depending on the Android
 device your OpenGL application is running on, due to differences in the implementation of the OpenGL
 graphics pipeline.</li>
-  <li><strong>Device Compatibility</strong> - Developers should consider the types of devices, 
+  <li><strong>Device Compatibility</strong> - Developers should consider the types of devices,
 Android versions and the OpenGL ES versions available to their customers. For more information
 on OpenGL compatibility across devices, see the <a href="#compatibility">OpenGL Versions and Device
 Compatibility</a> section.</li>
@@ -526,7 +525,7 @@
 direct control of the graphics processing pipeline, developers can create effects that would be
 very difficult to generate using the 1.0/1.1 API.</li>
 </ul>
-  
+
 <p>While performance, compatibility, convenience, control and other factors may influence your
 decision, you should pick an OpenGL API version based on what you think provides the best experience
 for your users.</p>
diff --git a/docs/html/guide/topics/graphics/prop-animation.jd b/docs/html/guide/topics/graphics/prop-animation.jd
new file mode 100644
index 0000000..be24788
--- /dev/null
+++ b/docs/html/guide/topics/graphics/prop-animation.jd
@@ -0,0 +1,953 @@
+page.title=Property Animation
+parent.title=Animation
+parent.link=animation.html
+@jd:body
+
+  <div id="qv-wrapper">
+    <div id="qv">
+      <h2>In this document</h2>
+
+      <ol>
+        <li><a href="#how">How Property Animation Works</a></li>
+
+        <li><a href="#value-animator">Animating with ValueAnimator</a></li>
+
+        <li><a href="#object-animator">Animating with ObjectAnimator</a></li>
+
+        <li><a href="#choreography">Choreographing Multiple Animations with
+        AnimatorSet</a></li>
+
+        <li><a href="#listeners">Animation Listeners</a></li>
+
+        <li><a href="#type-evaluator">Using a TypeEvaluator</a></li>
+
+        <li><a href="#interpolators">Using Interpolators</a></li>
+
+        <li><a href="#keyframes">Specifying Keyframes</a></li>
+
+        <li><a href="#layout">Animating Layout Changes to ViewGroups</a></li>
+
+        <li><a href="#views">Animating Views</a>
+          <ol>
+            <li><a href="#view-prop-animator">ViewPropertyAnimator</a></li>
+          </ol>
+        </li>
+
+        <li><a href="#declaring-xml">Declaring Animations in XML</a></li>
+      </ol>
+
+      <h2>Key classes</h2>
+
+      <ol>
+        <li><code><a href=
+        "/reference/android/animation/ValueAnimator.html">ValueAnimator</a></code></li>
+
+        <li><code><a href=
+        "/reference/android/animation/ObjectAnimator.html">ObjectAnimator</a></code></li>
+
+        <li><code><a href=
+        "/reference/android/animation/TypeEvaluator.html">TypeEvaluator</a></code></li>
+      </ol>
+
+      <h2>Related samples</h2>
+
+      <ol>
+        <li><a href=
+        "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/index.html">API
+        Demos</a></li>
+      </ol>
+    </div>
+  </div>
+  <p>The property animation system is a robust framework that allows you
+  to animate almost anything. You can define an animation to change any object property over time,
+  regardless of whether it draws to the screen or not. A property animation changes a property's
+  (a field in an object) value over a specified length of time. To animate something, you specify the
+  object property that you want to animate, such as an object's position on the screen, how long
+  you want to animate it for, and what values you want to animate between. </p>
+
+  <p>The property animation system lets you define the following characteristics of an
+  animation:</p>
+
+  <ul>
+    <li>Duration: You can specify the duration of an animation. The default length is 300 ms.</li>
+
+    <li>Time interpolation: You can specify how the values for the property are calculated as a
+    function of the animation's current elapsed time.</li>
+
+    <li>Repeat count and behavior: You can specify whether or not to have an animation repeat when
+    it reaches the end of a duration and how many times to repeat the animation. You can also
+    specify whether you want the animation to play back in reverse. Setting it to reverse plays
+    the animation forwards then backwards repeatedly, until the number of repeats is reached.</li>
+
+    <li>Animator sets: You can group animations into logical sets that play together or
+    sequentially or after specified delays.</li>
+
+    <li>Frame refresh delay: You can specify how often to refresh frames of your animation. The
+    default is set to  refresh every 10 ms, but the speed in which your application can refresh frames is
+    ultimately dependent on how busy the system is overall and how fast the system can service the underlying timer.</li>
+  </ul>
+
+
+  <h2 id="how">How Property Animation Works</h2>
+
+  <p>First, let's go over how an animation works with a simple example. Figure 1 depicts a
+  hypothetical object that is animated with its <code>x</code> property, which represents its
+  horizontal location on a screen. The duration of the animation is set to 40 ms and the distance
+  to travel is 40 pixels. Every 10 ms, which is the default frame refresh rate, the object moves
+  horizontally by 10 pixels. At the end of 40ms, the animation stops, and the object ends at
+  horizontal position 40. This is an example of an animation with linear interpolation, meaning the
+  object moves at a constant speed.</p><img src="{@docRoot}images/animation/animation-linear.png">
+
+  <p class="img-caption"><strong>Figure 1.</strong> Example of a linear animation</p>
+
+  <p>You can also specify animations to have a non-linear interpolation. Figure 2 illustrates a
+  hypothetical object that accelerates at the beginning of the animation, and decelerates at the
+  end of the animation. The object still moves 40 pixels in 40 ms, but non-linearly. In the
+  beginning, this animation accelerates up to the halfway point then decelerates from the
+  halfway point until the end of the animation. As Figure 2 shows, the distance traveled
+  at the beginning and end of the animation is less than in the middle.</p><img src=
+  "{@docRoot}images/animation/animation-nonlinear.png">
+
+  <p class="img-caption"><strong>Figure 2.</strong> Example of a non-linear animation</p>
+
+  <p>Let's take a detailed look at how the important components of the property animation system
+  would calculate animations like the ones illustrated above. Figure 3 depicts how the main classes
+  work with one another.</p><img src="{@docRoot}images/animation/valueanimator.png">
+
+  <p class="img-caption"><strong>Figure 3.</strong> How animations are calculated</p>
+
+  <p>The {@link android.animation.ValueAnimator} object keeps track of your animation's timing,
+  such as how long the animation has been running, and the current value of the property that it is
+  animating.</p>
+
+  <p>The {@link android.animation.ValueAnimator} encapsulates a {@link
+  android.animation.TimeInterpolator}, which defines animation interpolation, and a {@link
+  android.animation.TypeEvaluator}, which defines how to calculate values for the property being
+  animated. For example, in Figure 2, the {@link android.animation.TimeInterpolator} used would be
+  {@link android.view.animation.AccelerateDecelerateInterpolator} and the {@link
+  android.animation.TypeEvaluator} would be {@link android.animation.IntEvaluator}.</p>
+
+  <p>To start an animation, create a {@link android.animation.ValueAnimator} and give it the
+  starting and ending values for the property that you want to animate, along with the duration of
+  the animation. When you call {@link android.animation.ValueAnimator#start start()} the animation
+  begins. During the whole animation, the {@link android.animation.ValueAnimator} calculates an <em>elapsed fraction</em>
+  between 0 and 1, based on the duration of the animation and how much time has elapsed. The
+  elapsed fraction represents the percentage of time that the animation has completed, 0 meaning 0%
+  and 1 meaning 100%. For example, in Figure 1, the elapsed fraction at t = 10 ms would be .25
+  because the total duration is t = 40 ms.</p>
+
+  <p>When the {@link android.animation.ValueAnimator} is done calculating an elapsed fraction, it
+  calls the {@link android.animation.TimeInterpolator} that is currently set, to calculate an
+  <em>interpolated fraction</em>. An interpolated fraction maps the elapsed fraction to a new
+  fraction that takes into account the time interpolation that is set. For example, in Figure 2,
+  because the animation slowly accelerates, the interpolated fraction, about .15, is less than the
+  elapsed fraction, .25, at t = 10 ms. In Figure 1, the interpolated fraction is always the same as
+  the elapsed fraction.</p>
+
+  <p>When the interpolated fraction is calculated, {@link android.animation.ValueAnimator} calls
+  the appropriate {@link android.animation.TypeEvaluator}, to calculate the value of the
+  property that you are animating, based on the interpolated fraction, the starting value, and the
+  ending value of the animation. For example, in Figure 2, the interpolated fraction was .15 at t =
+  10 ms, so the value for the property at that time would be .15 X (40 - 0), or 6.</p>
+
+ <!-- <p>When the final value is calculated, the {@link android.animation.ValueAnimator} calls the
+  {@link android.animation.ValueAnimator.AnimatorUpdateListener#onAnimationUpdate
+  onAnimationUpdate()} method. Implement this callback to obtain the property value by
+  calling {@link android.animation.ValueAnimator#getAnimatedValue getAnimatedValue()} and set the
+  value for the property in the object that you are animating. Setting the property doesn't redraw
+  the object on the screen, so you need to call {@link
+  android.view.View#invalidate invalidate()} to refresh the View that the object
+  resides in. If the object is actually a View object, then the system calls {@link
+  android.view.View#invalidate invalidate()} when the property is changed.
+  The system redraws the window and the {@link android.animation.ValueAnimator}
+  repeats the process.</p>-->
+
+  <p>The <code>com.example.android.apis.animation</code> package in the <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/index.html">API
+  Demos</a> sample project provides many examples on how to use the property
+  animation system.</p>
+
+  <h2>API Overview</h2>
+
+  <p>You can find most of the property animation system's APIs in {@link android.animation
+  android.animation}. Because the view animation system already
+  defines many interpolators in {@link android.view.animation android.view.animation}, you can use
+  those interpolators in the property animation system as well. The following tables describe the main
+  components of the property animation system.</p>
+
+  <p>The {@link android.animation.Animator} class provides the basic structure for creating
+  animations. You normally do not use this class directly as it only provides minimal
+  functionality that must be extended to fully support animating values. The following
+  subclasses extend {@link android.animation.Animator}:
+  </p>
+  <p class="table-caption"><strong>Table 1.</strong> Animators</p>
+      <table>
+        <tr>
+          <th>Class</th>
+
+          <th>Description</th>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.ValueAnimator}</td>
+
+          <td>The main timing engine for property animation that also computes the values for the
+          property to be animated. It has all of the core functionality that calculates animation
+          values and contains the timing details of each animation, information about whether an
+          animation repeats, listeners that receive update events, and the ability to set custom
+          types to evaluate. There are two pieces to animating properties: calculating the animated
+          values and setting those values on the object and property that is being animated. {@link
+          android.animation.ValueAnimator} does not carry out the second piece, so you must listen
+          for updates to values calculated by the {@link android.animation.ValueAnimator} and
+          modify the objects that you want to animate with your own logic. See the section about
+          <a href="#value-animator">Animating with ValueAnimator</a> for more information.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.ObjectAnimator}</td>
+
+          <td>A subclass of {@link android.animation.ValueAnimator} that allows you to set a target
+          object and object property to animate. This class updates the property accordingly when
+          it computes a new value for the animation. You want to use
+          {@link android.animation.ObjectAnimator} most of the time,
+          because it makes the process of animating values on target objects much easier. However,
+          you sometimes want to use {@link android.animation.ValueAnimator} directly because {@link
+          android.animation.ObjectAnimator} has a few more restrictions, such as requiring specific
+          acessor methods to be present on the target object.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.AnimatorSet}</td>
+
+          <td>Provides a mechanism to group animations together so that they run in
+          relation to one another. You can set animations to play together, sequentially, or after
+          a specified delay. See the section about <a href="#choreography">Choreographing multiple
+          animations with Animator Sets</a> for more information.</td>
+        </tr>
+      </table>
+
+
+      <p>Evaluators tell the property animation system how to calculate values for a given
+      property. They take the timing data that is provided by an {@link android.animation.Animator}
+      class, the animation's start and end value, and calculate the animated values of the property
+      based on this data. The property animation system provides the following evaluators:</p>
+      <p class="table-caption"><strong>Table 2.</strong> Evaluators</p>
+      <table>
+        <tr>
+          <th>Class/Interface</th>
+
+          <th>Description</th>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.IntEvaluator}</td>
+
+          <td>The default evaluator to calculate values for <code>int</code> properties.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.FloatEvaluator}</td>
+
+          <td>The default evaluator to calculate values for <code>float</code> properties.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.ArgbEvaluator}</td>
+
+          <td>The default evaluator to calculate values for color properties that are represented
+          as hexidecimal values.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.TypeEvaluator}</td>
+
+          <td>An interface that allows you to create your own evaluator. If you are animating an
+          object property that is <em>not</em> an <code>int</code>, <code>float</code>, or color,
+          you must implement the {@link android.animation.TypeEvaluator} interface to specify how
+          to compute the object property's animated values. You can also specify a custom {@link
+          android.animation.TypeEvaluator} for <code>int</code>, <code>float</code>, and color
+          values as well, if you want to process those types differently than the default behavior.
+          See the section about <a href="#type-evaluator">Using a TypeEvaluator</a> for more
+          information on how to write a custom evaluator.</td>
+        </tr>
+      </table>
+
+
+
+
+      <p>A time interpolator defines how specific values in an animation are calculated as a
+      function of time. For example, you can specify animations to happen linearly across the whole
+      animation, meaning the animation moves evenly the entire time, or you can specify animations
+      to use non-linear time, for example, accelerating at the beginning and decelerating at the
+      end of the animation. Table 3 describes the interpolators that are contained in {@link
+      android.view.animation android.view.animation}. If none of the provided interpolators suits
+      your needs, implement the {@link android.animation.TimeInterpolator} interface and create your own. See <a href=
+  "#interpolators">Using interpolators</a> for more information on how to write a custom
+  interpolator.</p>
+      <p class="table-caption"><strong>Table 3.</strong> Interpolators</p>
+      <table>
+        <tr>
+          <th>Class/Interface</th>
+
+          <th>Description</th>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.AccelerateDecelerateInterpolator}</td>
+
+          <td>An interpolator whose rate of change starts and ends slowly but accelerates
+          through the middle.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.AccelerateInterpolator}</td>
+
+          <td>An interpolator whose rate of change starts out slowly and then
+          accelerates.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.AnticipateInterpolator}</td>
+
+          <td>An interpolator whose change starts backward then flings forward.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.AnticipateOvershootInterpolator}</td>
+
+          <td>An interpolator whose change starts backward, flings forward and overshoots
+          the target value, then finally goes back to the final value.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.BounceInterpolator}</td>
+
+          <td>An interpolator whose change bounces at the end.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.CycleInterpolator}</td>
+
+          <td>An interpolator whose animation repeats for a specified number of cycles.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.DecelerateInterpolator}</td>
+
+          <td>An interpolator whose rate of change starts out quickly and and then
+          decelerates.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.LinearInterpolator}</td>
+
+          <td>An interpolator whose rate of change is constant.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.view.animation.OvershootInterpolator}</td>
+
+          <td>An interpolator whose change flings forward and overshoots the last value then
+          comes back.</td>
+        </tr>
+
+        <tr>
+          <td>{@link android.animation.TimeInterpolator}</td>
+
+          <td>An interface that allows you to implement your own interpolator.</td>
+        </tr>
+      </table>
+
+  <h2 id="value-animator">Animating with ValueAnimator</h2>
+
+  <p>The {@link android.animation.ValueAnimator} class lets you animate values of some type for the
+  duration of an animation by specifying a set of <code>int</code>, <code>float</code>, or color
+  values to animate through. You obtain a {@link android.animation.ValueAnimator} by calling one of
+  its factory methods: {@link android.animation.ValueAnimator#ofInt ofInt()}, {@link
+  android.animation.ValueAnimator#ofFloat ofFloat()}, or {@link
+  android.animation.ValueAnimator#ofObject ofObject()}. For example:</p>
+  <pre>
+ValueAnimator animation = ValueAnimator.ofFloat(0f, 1f);
+animation.setDuration(1000);
+animation.start();
+</pre>
+
+  <p>In this code, the {@link android.animation.ValueAnimator} starts calculating the values of the
+  animation, between 0 and 1, for a duration of 1000 ms, when the <code>start()</code> method
+  runs.</p>
+
+  <p>You can also specify a custom type to animate by doing the following:</p>
+  <pre>
+ValueAnimator animation = ValueAnimator.ofObject(new MyTypeEvaluator(), startPropertyValue, endPropertyValue);
+animation.setDuration(1000);
+animation.start();
+</pre>
+
+  <p>In this code, the {@link android.animation.ValueAnimator} starts calculating the values of the
+  animation, between <code>startPropertyValue</code> and <code>endPropertyValue</code> using the
+  logic supplied by <code>MyTypeEvaluator</code> for a duration of 1000 ms, when the {@link
+  android.animation.ValueAnimator#start start()} method runs.</p>
+
+  <p>The previous code snippets, however, has no real effect on an object, because the {@link
+  android.animation.ValueAnimator} does not operate on objects or properties directly. The most likely thing
+  that you want to do is modify the objects that you want to animate with these calculated values. You do
+  this by defining listeners in the {@link android.animation.ValueAnimator} to appropriately handle important events
+  during the animation's lifespan, such as frame updates. When implementing the listeners, you can
+  obtain the calculated value for that specific frame refresh by calling {@link
+  android.animation.ValueAnimator#getAnimatedValue getAnimatedValue()}. For more information on listeners,
+  see the section about <a href="#listeners">Animation Listeners</a>.
+
+  <h2 id="object-animator">Animating with ObjectAnimator</h2>
+
+  <p>The {@link android.animation.ObjectAnimator} is a subclass of the {@link
+  android.animation.ValueAnimator} (discussed in the previous section) and combines the timing
+  engine and value computation of {@link android.animation.ValueAnimator} with the ability to
+  animate a named property of a target object. This makes animating any object much easier, as you
+  no longer need to implement the {@link android.animation.ValueAnimator.AnimatorUpdateListener},
+  because the animated property updates automatically.</p>
+
+  <p>Instantiating an {@link android.animation.ObjectAnimator} is similar to a {@link
+  android.animation.ValueAnimator}, but you also specify the object and the name of that object's property (as
+  a String) along with the values to animate between:</p>
+  <pre>
+ObjectAnimator anim = ObjectAnimator.ofFloat(foo, "alpha", 0f, 1f);
+anim.setDuration(1000);
+anim.start();
+</pre>
+
+  <p>To have the {@link android.animation.ObjectAnimator} update properties correctly, you must do
+  the following:</p>
+
+  <ul>
+    <li>The object property that you are animating must have a setter function (in camel case) in the form of
+    <code>set&lt;propertyName&gt;()</code>. Because the {@link android.animation.ObjectAnimator}
+    automatically updates the property during animation, it must be able to access the property
+    with this setter method. For example, if the property name is <code>foo</code>, you need to
+    have a <code>setFoo()</code> method. If this setter method does not exist, you have three
+    options:
+
+      <ul>
+        <li>Add the setter method to the class if you have the rights to do so.</li>
+
+        <li>Use a wrapper class that you have rights to change and have that wrapper receive the
+        value with a valid setter method and forward it to the original object.</li>
+
+        <li>Use {@link android.animation.ValueAnimator} instead.</li>
+      </ul>
+    </li>
+
+    <li>If you specify only one value for the <code>values...</code> parameter in one of the {@link
+    android.animation.ObjectAnimator} factory methods, it is assumed to be the ending value of the
+    animation. Therefore, the object property that you are animating must have a getter function
+    that is used to obtain the starting value of the animation. The getter function must be in the
+    form of <code>get&lt;propertyName&gt;()</code>. For example, if the property name is
+    <code>foo</code>, you need to have a <code>getFoo()</code> method.</li>
+
+    <li>The getter (if needed) and setter methods of the property that you are animating must
+    operate on the same type as the starting and ending values that you specify to {@link
+    android.animation.ObjectAnimator}. For example, you must have
+    <code>targetObject.setPropName(float)</code> and <code>targetObject.getPropName(float)</code>
+    if you construct the following {@link android.animation.ObjectAnimator}:
+      <pre>
+ObjectAnimator.ofFloat(targetObject, "propName", 1f)
+</pre>
+    </li>
+
+    <li>Depending on what property or object you are animating, you might need to call the {@link
+    android.view.View#invalidate invalidate()} method on a View force the screen to redraw itself with the
+    updated animated values. You do this in the
+    {@link android.animation.ValueAnimator.AnimatorUpdateListener#onAnimationUpdate onAnimationUpdate()}
+    callback. For example, animating the color property of a Drawable object only cause updates to the
+    screen when that object redraws itself. All of the property setters on View, such as
+    {@link android.view.View#setAlpha setAlpha()} and {@link android.view.View#setTranslationX setTranslationX()}
+    invalidate the View properly, so you do not need to invalidate the View when calling these
+    methods with new values. For more information on listeners, see the section about <a href="#listeners">Animation Listeners</a>.
+    </li>
+  </ul>
+
+  <h2 id="choreography">Choreographing Multiple Animations with AnimatorSet</h2>
+
+  <p>In many cases, you want to play an animation that depends on when another animation starts or
+  finishes. The Android system lets you bundle animations together into an {@link
+  android.animation.AnimatorSet}, so that you can specify whether to start animations
+  simultaneously, sequentially, or after a specified delay. You can also nest {@link
+  android.animation.AnimatorSet} objects within each other.</p>
+
+  <p>The following sample code taken from the <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/BouncingBalls.html">Bouncing
+  Balls</a> sample (modified for simplicity) plays the following {@link android.animation.Animator}
+  objects in the following manner:</p>
+
+  <ol>
+    <li>Plays <code>bounceAnim</code>.</li>
+
+    <li>Plays <code>squashAnim1</code>, <code>squashAnim2</code>, <code>stretchAnim1</code>, and
+    <code>stretchAnim2</code> at the same time.</li>
+
+    <li>Plays <code>bounceBackAnim</code>.</li>
+
+    <li>Plays <code>fadeAnim</code>.</li>
+  </ol>
+  <pre>
+AnimatorSet bouncer = new AnimatorSet();
+bouncer.play(bounceAnim).before(squashAnim1);
+bouncer.play(squashAnim1).with(squashAnim2);
+bouncer.play(squashAnim1).with(stretchAnim1);
+bouncer.play(squashAnim1).with(stretchAnim2);
+bouncer.play(bounceBackAnim).after(stretchAnim2);
+ValueAnimator fadeAnim = ObjectAnimator.ofFloat(newBall, "alpha", 1f, 0f);
+fadeAnim.setDuration(250);
+AnimatorSet animatorSet = new AnimatorSet();
+animatorSet.play(bouncer).before(fadeAnim);
+animatorSet.start();
+</pre>
+
+  <p>For a more complete example on how to use animator sets, see the <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/BouncingBalls.html">Bouncing
+  Balls</a> sample in APIDemos.</p>
+
+<h2 id="listeners">Animation Listeners</h2>
+<p>
+You can listen for important events during an animation's duration with the listeners described below.
+</p>
+
+  <ul>
+    <li>{@link android.animation.Animator.AnimatorListener}
+
+      <ul>
+        <li>{@link android.animation.Animator.AnimatorListener#onAnimationStart onAnimationStart()}
+        - Called when the animation starts.</li>
+
+        <li>{@link android.animation.Animator.AnimatorListener#onAnimationEnd onAnimationEnd()} -
+        Called when the animation ends.</li>
+
+        <li>{@link android.animation.Animator.AnimatorListener#onAnimationRepeat
+        onAnimationRepeat()} - Called when the animation repeats itself.</li>
+
+        <li>{@link android.animation.Animator.AnimatorListener#onAnimationCancel
+        onAnimationCancel()} - Called when the animation is canceled. A cancelled animation
+        also calls {@link android.animation.Animator.AnimatorListener#onAnimationEnd onAnimationEnd()},
+        regardless of how they were ended.</li>
+      </ul>
+    </li>
+
+    <li>{@link android.animation.ValueAnimator.AnimatorUpdateListener}
+
+      <ul>
+        <li>
+          <p>{@link android.animation.ValueAnimator.AnimatorUpdateListener#onAnimationUpdate
+          onAnimationUpdate()} - called on every frame of the animation. Listen to this event to
+          use the calculated values generated by {@link android.animation.ValueAnimator} during an
+          animation. To use the value, query the {@link android.animation.ValueAnimator} object
+          passed into the event to get the current animated value with the {@link
+          android.animation.ValueAnimator#getAnimatedValue getAnimatedValue()} method. Implementing this
+          listener is required if you use {@link android.animation.ValueAnimator}. </p>
+
+          <p>
+          Depending on what property or object you are animating, you might need to call
+          {@link android.view.View#invalidate invalidate()} on a View to force that area of the
+          screen to redraw itself with the new animated values. For example, animating the
+          color property of a Drawable object only cause updates to the screen when that object
+          redraws itself. All of the property setters on View,
+          such as {@link android.view.View#setAlpha setAlpha()} and
+          {@link android.view.View#setTranslationX setTranslationX()} invalidate the View
+          properly, so you do not need to invalidate the View when calling these methods with new values.
+          </p>
+
+        </li>
+      </ul>
+    </li>
+  </ul>
+
+<p>You can extend the {@link android.animation.AnimatorListenerAdapter} class instead of
+implementing the {@link android.animation.Animator.AnimatorListener} interface, if you do not
+want to implement all of the methods of the {@link android.animation.Animator.AnimatorListener}
+interface. The {@link android.animation.AnimatorListenerAdapter} class provides empty
+implementations of the methods that you can choose to override.</p>
+  <p>For example, the <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/BouncingBalls.html">Bouncing
+  Balls</a> sample in the API demos creates an {@link android.animation.AnimatorListenerAdapter}
+  for just the {@link android.animation.Animator.AnimatorListener#onAnimationEnd onAnimationEnd()}
+  callback:</p>
+  <pre>
+ValueAnimatorAnimator fadeAnim = ObjectAnimator.ofFloat(newBall, "alpha", 1f, 0f);
+fadeAnim.setDuration(250);
+fadeAnim.addListener(new AnimatorListenerAdapter() {
+public void onAnimationEnd(Animator animation) {
+    balls.remove(((ObjectAnimator)animation).getTarget());
+}
+</pre>
+
+
+  <h2 id="layout">Animating Layout Changes to ViewGroups</h2>
+
+  <p>The property animation system provides the capability to animate changes to ViewGroup objects
+  as well as provide an easy way to animate View objects themselves.</p>
+
+  <p>You can animate layout changes within a ViewGroup with the {@link
+  android.animation.LayoutTransition} class. Views inside a ViewGroup can go through an appearing
+  and disappearing animation when you add them to or remove them from a ViewGroup or when you call
+  a View's {@link android.view.View#setVisibility setVisibility()} method with {@link
+  android.view.View#VISIBLE}, android.view.View#INVISIBLE}, or {@link android.view.View#GONE}. The remaining Views in the
+  ViewGroup can also animate into their new positions when you add or remove Views. You can define
+  the following animations in a {@link android.animation.LayoutTransition} object by calling {@link
+  android.animation.LayoutTransition#setAnimator setAnimator()} and passing in an {@link
+  android.animation.Animator} object with one of the following {@link
+  android.animation.LayoutTransition} constants:</p>
+
+  <ul>
+    <li><code>APPEARING</code> - A flag indicating the animation that runs on items that are
+    appearing in the container.</li>
+
+    <li><code>CHANGE_APPEARING</code> - A flag indicating the animation that runs on items that are
+    changing due to a new item appearing in the container.</li>
+
+    <li><code>DISAPPEARING</code> - A flag indicating the animation that runs on items that are
+    disappearing from the container.</li>
+
+    <li><code>CHANGE_DISAPPEARING</code> - A flag indicating the animation that runs on items that
+    are changing due to an item disappearing from the container.</li>
+  </ul>
+
+  <p>You can define your own custom animations for these four types of events to customize the look
+  of your layout transitions or just tell the animation system to use the default animations.</p>
+
+  <p>The <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/LayoutAnimations.html">
+  LayoutAnimations</a> sample in API Demos shows you how to define animations for layout
+  transitions and then set the animations on the View objects that you want to animate.</p>
+
+  <p>The <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/LayoutAnimationsByDefault.html">
+  LayoutAnimationsByDefault</a> and its corresponding <a href=
+  "{@docRoot}resources/samples/ApiDemos/res/layout/layout_animations_by_default.html">layout_animations_by_default.xml</a>
+  layout resource file show you how to enable the default layout transitions for ViewGroups in XML.
+  The only thing that you need to do is to set the <code>android:animateLayoutchanges</code>
+  attribute to <code>true</code> for the ViewGroup. For example:</p>
+  <pre>
+&lt;LinearLayout
+    android:orientation="vertical"
+    android:layout_width="wrap_content"
+    android:layout_height="match_parent"
+    android:id="@+id/verticalContainer"
+    android:animateLayoutChanges="true" /&gt;
+</pre>
+
+  <p>Setting this attribute to true automatically animates Views that are added or removed from the
+  ViewGroup as well as the remaining Views in the ViewGroup.</p>
+
+  <h2 id="type-evaluator">Using a TypeEvaluator</h2>
+
+  <p>If you want to animate a type that is unknown to the Android system, you can create your own
+  evaluator by implementing the {@link android.animation.TypeEvaluator} interface. The types that
+  are known by the Android system are <code>int</code>, <code>float</code>, or a color, which are
+  supported by the {@link android.animation.IntEvaluator}, {@link
+  android.animation.FloatEvaluator}, and {@link android.animation.ArgbEvaluator} type
+  evaluators.</p>
+
+  <p>There is only one method to implement in the {@link android.animation.TypeEvaluator}
+  interface, the {@link android.animation.TypeEvaluator#evaluate evaluate()} method. This allows
+  the animator that you are using to return an appropriate value for your animated property at the
+  current point of the animation. The {@link android.animation.FloatEvaluator} class demonstrates
+  how to do this:</p>
+  <pre>
+public class FloatEvaluator implements TypeEvaluator {
+
+    public Object evaluate(float fraction, Object startValue, Object endValue) {
+        float startFloat = ((Number) startValue).floatValue();
+        return startFloat + fraction * (((Number) endValue).floatValue() - startFloat);
+    }
+}
+</pre>
+
+  <p class="note"><strong>Note:</strong> When {@link android.animation.ValueAnimator} (or {@link
+  android.animation.ObjectAnimator}) runs, it calculates a current elapsed fraction of the
+  animation (a value between 0 and 1) and then calculates an interpolated version of that depending
+  on what interpolator that you are using. The interpolated fraction is what your {@link
+  android.animation.TypeEvaluator} receives through the <code>fraction</code> parameter, so you do
+  not have to take into account the interpolator when calculating animated values.</p>
+
+  <h2 id="interpolators">Using Interpolators</h2>
+
+  <p>An interpolator define how specific values in an animation are calculated as a function of
+  time. For example, you can specify animations to happen linearly across the whole animation,
+  meaning the animation moves evenly the entire time, or you can specify animations to use
+  non-linear time, for example, using acceleration or deceleration at the beginning or end of the
+  animation.</p>
+
+  <p>Interpolators in the animation system receive a fraction from Animators that represent the
+  elapsed time of the animation. Interpolators modify this fraction to coincide with the type of
+  animation that it aims to provide. The Android system provides a set of common interpolators in
+  the {@link android.view.animation android.view.animation package}. If none of these suit your
+  needs, you can implement the {@link android.animation.TimeInterpolator} interface and create your
+  own.</p>
+
+  <p>As an example, how the default interpolator {@link
+  android.view.animation.AccelerateDecelerateInterpolator} and the {@link
+  android.view.animation.LinearInterpolator} calculate interpolated fractions are compared below.
+  The {@link android.view.animation.LinearInterpolator} has no effect on the elapsed fraction. The {@link
+  android.view.animation.AccelerateDecelerateInterpolator} accelerates into the animation and
+  decelerates out of it. The following methods define the logic for these interpolators:</p>
+
+  <p><strong>AccelerateDecelerateInterpolator</strong></p>
+  <pre>
+public float getInterpolation(float input) {
+    return (float)(Math.cos((input + 1) * Math.PI) / 2.0f) + 0.5f;
+}
+</pre>
+
+  <p><strong>LinearInterpolator</strong></p>
+  <pre>
+public float getInterpolation(float input) {
+    return input;
+}
+</pre>
+
+  <p>The following table represents the approximate values that are calculated by these
+  interpolators for an animation that lasts 1000ms:</p>
+
+  <table>
+    <tr>
+      <th>ms elapsed</th>
+
+      <th>Elapsed fraction/Interpolated fraction (Linear)</th>
+
+      <th>Interpolated fraction (Accelerate/Decelerate)</th>
+    </tr>
+
+    <tr>
+      <td>0</td>
+
+      <td>0</td>
+
+      <td>0</td>
+    </tr>
+
+    <tr>
+      <td>200</td>
+
+      <td>.2</td>
+
+      <td>.1</td>
+    </tr>
+
+    <tr>
+      <td>400</td>
+
+      <td>.4</td>
+
+      <td>.345</td>
+    </tr>
+
+    <tr>
+      <td>600</td>
+
+      <td>.6</td>
+
+      <td>.8</td>
+    </tr>
+
+    <tr>
+      <td>800</td>
+
+      <td>.8</td>
+
+      <td>.9</td>
+    </tr>
+
+    <tr>
+      <td>1000</td>
+
+      <td>1</td>
+
+      <td>1</td>
+    </tr>
+  </table>
+
+  <p>As the table shows, the {@link android.view.animation.LinearInterpolator} changes the values
+  at the same speed, .2 for every 200ms that passes. The {@link
+  android.view.animation.AccelerateDecelerateInterpolator} changes the values faster than {@link
+  android.view.animation.LinearInterpolator} between 200ms and 600ms and slower between 600ms and
+  1000ms.</p>
+
+  <h2 id="keyframes">Specifying Keyframes</h2>
+
+  <p>A {@link android.animation.Keyframe} object consists of a time/value pair that lets you define
+  a specific state at a specific time of an animation. Each keyframe can also have its own
+  interpolator to control the behavior of the animation in the interval between the previous
+  keyframe's time and the time of this keyframe.</p>
+
+  <p>To instantiate a {@link android.animation.Keyframe} object, you must use one of the factory
+  methods, {@link android.animation.Keyframe#ofInt ofInt()}, {@link
+  android.animation.Keyframe#ofFloat ofFloat()}, or {@link android.animation.Keyframe#ofObject
+  ofObject()} to obtain the appropriate type of {@link android.animation.Keyframe}. You then call
+  the {@link android.animation.PropertyValuesHolder#ofKeyframe ofKeyframe()} factory method to
+  obtain a {@link android.animation.PropertyValuesHolder} object. Once you have the object, you can
+  obtain an animator by passing in the {@link android.animation.PropertyValuesHolder} object and
+  the object to animate. The following code snippet demonstrates how to do this:</p>
+  <pre>
+Keyframe kf0 = Keyframe.ofFloat(0f, 0f);
+Keyframe kf1 = Keyframe.ofFloat(.5f, 360f);
+Keyframe kf2 = Keyframe.ofFloat(1f, 0f);
+PropertyValuesHolder pvhRotation = PropertyValuesHolder.ofKeyframe("rotation", kf0, kf1, kf2);
+ObjectAnimator rotationAnim = ObjectAnimator.ofPropertyValuesHolder(target, pvhRotation)
+rotationAnim.setDuration(5000ms);
+</pre>
+
+  <p>For a more complete example on how to use keyframes, see the <a href=
+  "{@docRoot}resources/samples/ApiDemos/src/com/example/android/apis/animation/MultiPropertyAnimation.html">
+  MultiPropertyAnimation</a> sample in APIDemos.</p>
+
+  <h2 id="views">Animating Views</h2>
+
+  <p>The property animation system allow streamlined animation of View objects and offerse
+  a few advantages over the view animation system. The view
+  animation system transformed View objects by changing the way that they were drawn. This was
+  handled in the container of each View, because the View itself had no properties to manipulate.
+  This resulted in the View being animated, but caused no change in the View object itself. This
+  led to behavior such as an object still existing in its original location, even though it was
+  drawn on a different location on the screen. In Android 3.0, new properties and the corresponding
+  getter and setter methods were added to eliminate this drawback.</p>
+  <p>The property animation system
+  can animate Views on the screen by changing the actual properties in the View objects. In
+  addition, Views also automatically call the {@link android.view.View#invalidate invalidate()}
+  method to refresh the screen whenever its properties are changed. The new properties in the {@link
+  android.view.View} class that facilitate property animations are:</p>
+
+  <ul>
+    <li><code>translationX</code> and <code>translationY</code>: These properties control where the
+    View is located as a delta from its left and top coordinates which are set by its layout
+    container.</li>
+
+    <li><code>rotation</code>, <code>rotationX</code>, and <code>rotationY</code>: These properties
+    control the rotation in 2D (<code>rotation</code> property) and 3D around the pivot point.</li>
+
+    <li><code>scaleX</code> and <code>scaleY</code>: These properties control the 2D scaling of a
+    View around its pivot point.</li>
+
+    <li><code>pivotX</code> and <code>pivotY</code>: These properties control the location of the
+    pivot point, around which the rotation and scaling transforms occur. By default, the pivot
+    point is located at the center of the object.</li>
+
+    <li><code>x</code> and <code>y</code>: These are simple utility properties to describe the
+    final location of the View in its container, as a sum of the left and top values and
+    translationX and translationY values.</li>
+
+    <li><code>alpha</code>: Represents the alpha transparency on the View. This value is 1 (opaque)
+    by default, with a value of 0 representing full transparency (not visible).</li>
+  </ul>
+
+  <p>To animate a property of a View object, such as its color or rotation value, all you need to
+  do is create a property animator and specify the View property that you want to
+  animate. For example:</p>
+  <pre>
+ObjectAnimator.ofFloat(myView, "rotation", 0f, 360f);
+</pre>
+
+<p>For more information on creating animators, see the sections on animating with
+<a href="#value-animator">ValueAnimator</a> and <a href="#object-animator">ObjectAnimator</a>.
+</p>
+
+<h3 id="view-prop-animator">Animating with ViewPropertyAnimator</h3>
+<p>The {@link android.view.ViewPropertyAnimator} provides a simple way to animate several
+properties of a {@link android.view.View} in parallel, using a single underlying {@link
+android.animation.Animator}
+object. It behaves much like an {@link android.animation.ObjectAnimator}, because it modifies the
+actual values of the view's properties, but is more efficient when animating many properties at
+once. In addition, the code for using the {@link android.view.ViewPropertyAnimator} is much
+more concise and easier to read. The following code snippets show the differences in using multiple
+{@link android.animation.ObjectAnimator} objects, a single
+{@link android.animation.ObjectAnimator}, and the {@link android.view.ViewPropertyAnimator} when
+simultaneously animating the <code>x</code> and <code>y</code> property of a view.</p>
+
+<p><strong>Multiple ObjectAnimator objects</strong></p>
+<pre>
+ObjectAnimator animX = ObjectAnimator.ofFloat(myView, "x", 50f);
+ObjectAnimator animY = ObjectAnimator.ofFloat(myView, "y", 100f);
+AnimatorSet animSetXY = new AnimatorSet();
+animSetXY.playTogether(animX, animY);
+animSetXY.start();
+</pre>
+
+<p><strong>One ObjectAnimator</strong></p>
+<pre>
+PropertyValuesHolder pvhX = PropertyValuesHolder.ofFloat("x", 50f);
+PropertyValuesHolder pvhY = PropertyValuesHolder.ofFloat("y", 100f);
+ObjectAnimator.ofPropertyValuesHolder(myView, pvhX, pvyY).start();
+</pre>
+
+<p><strong>ViewPropertyAnimator</strong></p>
+<pre>
+myView.animate().x(50f).y(100f);
+</pre>
+
+<p>
+For more detailed information about {@link
+android.view.ViewPropertyAnimator}, see the corresponding Android Developers
+<a href="http://android-developers.blogspot.com/2011/05/introducing-viewpropertyanimator.html">blog
+post</a>.</p>
+
+<h2 id="declaring-xml">Declaring Animations in XML</h2>
+
+  <p>The property animation system lets you declare property animations with XML instead of doing
+  it programmatically. By defining your animations in XML, you can easily reuse your animations
+in multiple activities and more easily edit the animation sequence.</p>
+
+<p>To distinguish animation files that use the new property animation APIs from those that use the
+legacy <a href="{@docRoot}guide/topics/graphics/view-animation.html">view animation</a> framework,
+starting with Android 3.1, you should save the XML files for property animations in the {@code
+res/animator/} directory (instead of {@code res/anim/}). Using the {@code animator} directory name
+is optional, but necessary if you want to use the layout editor tools in the Eclipse ADT plugin (ADT
+11.0.0+), because ADT only searches the {@code res/animator/} directory for property animation
+resources.</p>
+
+<p>The following property animation classes have XML declaration support with the
+  following XML tags:</p>
+
+  <ul>
+    <li>{@link android.animation.ValueAnimator} - <code>&lt;animator&gt;</code></li>
+
+    <li>{@link android.animation.ObjectAnimator} - <code>&lt;objectAnimator&gt;</code></li>
+
+    <li>{@link android.animation.AnimatorSet} - <code>&lt;set&gt;</code></li>
+  </ul>
+
+<p>The following example plays the two sets of object animations sequentially, with the first nested
+set playing two object animations together:</p>
+
+<pre>
+&lt;set android:ordering="sequentially"&gt;
+    &lt;set&gt;
+        &lt;objectAnimator
+            android:propertyName="x"
+            android:duration="500"
+            android:valueTo="400"
+            android:valueType="intType"/&gt;
+        &lt;objectAnimator
+            android:propertyName="y"
+            android:duration="500"
+            android:valueTo="300"
+            android:valueType="intType"/&gt;
+    &lt;/set&gt;
+    &lt;objectAnimator
+        android:propertyName="alpha"
+        android:duration="500"
+        android:valueTo="1f"/&gt;
+&lt;/set&gt;
+</pre>
+  <p>In order to run this animation, you must inflate the XML resources in your code to an {@link
+  android.animation.AnimatorSet} object, and then set the target objects for all of the animations
+  before starting the animation set. Calling {@link android.animation.AnimatorSet#setTarget
+  setTarget()} sets a single target object for all children of the {@link
+  android.animation.AnimatorSet} as a convenience. The following code shows how to do this:</p>
+
+<pre>
+AnimatorSet set = (AnimatorSet) AnimatorInflater.loadAnimator(myContext,
+    R.anim.property_animator);
+set.setTarget(myObject);
+set.start();
+</pre>
+
+<p>For information about the XML syntax for defining property animations, see <a
+href="{@docRoot}guide/topics/resources/animation-resource.html#Property">Animation Resources</a>.
+
diff --git a/docs/html/guide/topics/graphics/view-animation.jd b/docs/html/guide/topics/graphics/view-animation.jd
index eff6f70..3ccda8b 100644
--- a/docs/html/guide/topics/graphics/view-animation.jd
+++ b/docs/html/guide/topics/graphics/view-animation.jd
@@ -1,27 +1,14 @@
 page.title=View Animation
-parent.title=Graphics
-parent.link=index.html
+parent.title=Animation
+parent.link=animation.html
 @jd:body
 
-  <div id="qv-wrapper">
-    <div id="qv">
-      <h2>In this document</h2>
 
-      <ol>       
-       <li><a href="#tween-animation">Tween animation</a></li>
-       <li><a href="#frame-animation">Frame animation</a></li>
-     </ol>
 
-    </div>
-  </div>
-
-  You can use View Animation in any View object to
-  perform tweened animation and frame by frame animation. Tween animation calculates the animation
-  given information such as the start point, end point, size, rotation, and other common aspects of
-  an animation. Frame by frame animation lets you load a series of Drawable resources one after
-  another to create an animation.
-
-  <h2 id="tween-animation">Tween Animation</h2>
+  <p>You can use the view animation system to perform tweened animation on Views. Tween animation
+  calculates the animation with information such as the start point, end point, size, rotation, and
+  other common aspects of an animation.
+  </p>
 
   <p>A tween animation can perform a series of simple transformations (position, size, rotation,
   and transparency) on the contents of a View object. So, if you have a {@link
@@ -126,67 +113,3 @@
   Even so, the animation will still be drawn beyond the bounds of its View and will not be clipped.
   However, clipping <em>will occur</em> if the animation exceeds the bounds of the parent View.</p>
 
-  <h2 id="frame-animation">Frame Animation</h2>
-
-  <p>This is a traditional animation in the sense that it is created with a sequence of different
-  images, played in order, like a roll of film. The {@link
-  android.graphics.drawable.AnimationDrawable} class is the basis for frame animations.</p>
-
-  <p>While you can define the frames of an animation in your code, using the {@link
-  android.graphics.drawable.AnimationDrawable} class API, it's more simply accomplished with a
-  single XML file that lists the frames that compose the animation. Like the tween animation above,
-  the XML file for this kind of animation belongs in the <code>res/drawable/</code> directory of
-  your Android project. In this case, the instructions are the order and duration for each frame of
-  the animation.</p>
-
-  <p>The XML file consists of an <code>&lt;animation-list&gt;</code> element as the root node and a
-  series of child <code>&lt;item&gt;</code> nodes that each define a frame: a drawable resource for
-  the frame and the frame duration. Here's an example XML file for a frame-by-frame animation:</p>
-  <pre>
-&lt;animation-list xmlns:android="http://schemas.android.com/apk/res/android"
-    android:oneshot="true"&gt;
-    &lt;item android:drawable="@drawable/rocket_thrust1" android:duration="200" /&gt;
-    &lt;item android:drawable="@drawable/rocket_thrust2" android:duration="200" /&gt;
-    &lt;item android:drawable="@drawable/rocket_thrust3" android:duration="200" /&gt;
-&lt;/animation-list&gt;
-</pre>
-
-  <p>This animation runs for just three frames. By setting the <code>android:oneshot</code>
-  attribute of the list to <var>true</var>, it will cycle just once then stop and hold on the last
-  frame. If it is set <var>false</var> then the animation will loop. With this XML saved as
-  <code>rocket_thrust.xml</code> in the <code>res/drawable/</code> directory of the project, it can
-  be added as the background image to a View and then called to play. Here's an example Activity,
-  in which the animation is added to an {@link android.widget.ImageView} and then animated when the
-  screen is touched:</p>
-  <pre>
-AnimationDrawable rocketAnimation;
-
-public void onCreate(Bundle savedInstanceState) {
-  super.onCreate(savedInstanceState);
-  setContentView(R.layout.main);
-
-  ImageView rocketImage = (ImageView) findViewById(R.id.rocket_image);
-  rocketImage.setBackgroundResource(R.drawable.rocket_thrust);
-  rocketAnimation = (AnimationDrawable) rocketImage.getBackground();
-}
-
-public boolean onTouchEvent(MotionEvent event) {
-  if (event.getAction() == MotionEvent.ACTION_DOWN) {
-    rocketAnimation.start();
-    return true;
-  }
-  return super.onTouchEvent(event);
-}
-</pre>
-
-  <p>It's important to note that the <code>start()</code> method called on the AnimationDrawable
-  cannot be called during the <code>onCreate()</code> method of your Activity, because the
-  AnimationDrawable is not yet fully attached to the window. If you want to play the animation
-  immediately, without requiring interaction, then you might want to call it from the <code>{@link
-  android.app.Activity#onWindowFocusChanged(boolean) onWindowFocusChanged()}</code> method in your
-  Activity, which will get called when Android brings your window into focus.</p>
-
-  <p>For more information on the XML syntax, available tags and attributes, see <a href=
-  "{@docRoot}guide/topics/resources/animation-resource.html">Animation Resources</a>.</p>
-</body>
-</html>
diff --git a/docs/html/guide/topics/media/audio-capture.jd b/docs/html/guide/topics/media/audio-capture.jd
new file mode 100644
index 0000000..75d294b
--- /dev/null
+++ b/docs/html/guide/topics/media/audio-capture.jd
@@ -0,0 +1,253 @@
+page.title=Audio Capture
+parent.title=Multimedia and Camera 
+parent.link=index.html
+@jd:body
+
+    <div id="qv-wrapper">
+    <div id="qv">
+
+<h2>In this document</h2>
+<ol>
+<li><a href="#audiocapture">Performing Audio Capture</a>
+   <ol>
+      <li><a href='#example'>Code Example</a></li>
+   </ol>
+</li>
+</ol>
+
+<h2>Key classes</h2>
+<ol>
+<li>{@link android.media.MediaRecorder}</li>
+</ol>
+
+<h2>See also</h2>
+<ol>
+  <li><a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
+  <li><a href="{@docRoot}guide/topics/data/data-storage.html">Data Storage</a></li>
+  <li><a href="{@docRoot}guide/topics/media/mediaplayer.html">MediaPlayer</a>
+</ol>
+
+</div>
+</div>
+
+<p>The Android multimedia framework includes support for capturing and encoding a variety of common
+audio formats, so that you can easily integrate audio into your applications. You can record audio
+using the {@link android.media.MediaRecorder} APIs if supported by the device hardware.</p>
+
+<p>This document shows you how to write an application that captures audio from a device
+microphone, save the audio and play it back.</p>
+
+<p class="note"><strong>Note:</strong> The Android Emulator does not have the ability to capture
+audio, but actual devices are likely to provide these capabilities.</p>
+
+<h2 id="audiocapture">Performing Audio Capture</h2>
+
+<p>Audio capture from the device is a bit more complicated than audio and video playback, but still
+fairly simple:</p>
+<ol>
+  <li>Create a new instance of {@link android.media.MediaRecorder android.media.MediaRecorder}.</li>
+  <li>Set the audio source using
+        {@link android.media.MediaRecorder#setAudioSource MediaRecorder.setAudioSource()}. You will
+probably want to use
+  <code>MediaRecorder.AudioSource.MIC</code>.</li>
+  <li>Set output file format using
+        {@link android.media.MediaRecorder#setOutputFormat MediaRecorder.setOutputFormat()}.
+  </li>
+  <li>Set output file name using
+        {@link android.media.MediaRecorder#setOutputFile MediaRecorder.setOutputFile()}.
+  </li>
+  <li>Set the audio encoder using
+        {@link android.media.MediaRecorder#setAudioEncoder MediaRecorder.setAudioEncoder()}.
+  </li>
+  <li>Call {@link android.media.MediaRecorder#prepare MediaRecorder.prepare()}
+   on the MediaRecorder instance.</li>
+  <li>To start audio capture, call
+  {@link android.media.MediaRecorder#start MediaRecorder.start()}. </li>
+  <li>To stop audio capture, call {@link android.media.MediaRecorder#stop MediaRecorder.stop()}.
+  <li>When you are done with the MediaRecorder instance, call
+{@link android.media.MediaRecorder#release MediaRecorder.release()} on it. Calling
+{@link android.media.MediaRecorder#release MediaRecorder.release()} is always recommended to
+free the resource immediately.</li>
+</ol>
+
+<h3 id="example">Example: Record audio and play the recorded audio</h3>
+<p>The example class below illustrates how to set up, start and stop audio capture, and to play the
+recorded audio file.</p>
+<pre>
+/*
+ * The application needs to have the permission to write to external storage
+ * if the output file is written to the external storage, and also the
+ * permission to record audio. These permissions must be set in the
+ * application's AndroidManifest.xml file, with something like:
+ *
+ * &lt;uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /&gt;
+ * &lt;uses-permission android:name="android.permission.RECORD_AUDIO" /&gt;
+ *
+ */
+package com.android.audiorecordtest;
+
+import android.app.Activity;
+import android.widget.LinearLayout;
+import android.os.Bundle;
+import android.os.Environment;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.content.Context;
+import android.util.Log;
+import android.media.MediaRecorder;
+import android.media.MediaPlayer;
+
+import java.io.IOException;
+
+
+public class AudioRecordTest extends Activity
+{
+    private static final String LOG_TAG = "AudioRecordTest";
+    private static String mFileName = null;
+
+    private RecordButton mRecordButton = null;
+    private MediaRecorder mRecorder = null;
+
+    private PlayButton   mPlayButton = null;
+    private MediaPlayer   mPlayer = null;
+
+    private void onRecord(boolean start) {
+        if (start) {
+            startRecording();
+        } else {
+            stopRecording();
+        }
+    }
+
+    private void onPlay(boolean start) {
+        if (start) {
+            startPlaying();
+        } else {
+            stopPlaying();
+        }
+    }
+
+    private void startPlaying() {
+        mPlayer = new MediaPlayer();
+        try {
+            mPlayer.setDataSource(mFileName);
+            mPlayer.prepare();
+            mPlayer.start();
+        } catch (IOException e) {
+            Log.e(LOG_TAG, "prepare() failed");
+        }
+    }
+
+    private void stopPlaying() {
+        mPlayer.release();
+        mPlayer = null;
+    }
+
+    private void startRecording() {
+        mRecorder = new MediaRecorder();
+        mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+        mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
+        mRecorder.setOutputFile(mFileName);
+        mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
+
+        try {
+            mRecorder.prepare();
+        } catch (IOException e) {
+            Log.e(LOG_TAG, "prepare() failed");
+        }
+
+        mRecorder.start();
+    }
+
+    private void stopRecording() {
+        mRecorder.stop();
+        mRecorder.release();
+        mRecorder = null;
+    }
+
+    class RecordButton extends Button {
+        boolean mStartRecording = true;
+
+        OnClickListener clicker = new OnClickListener() {
+            public void onClick(View v) {
+                onRecord(mStartRecording);
+                if (mStartRecording) {
+                    setText("Stop recording");
+                } else {
+                    setText("Start recording");
+                }
+                mStartRecording = !mStartRecording;
+            }
+        };
+
+        public RecordButton(Context ctx) {
+            super(ctx);
+            setText("Start recording");
+            setOnClickListener(clicker);
+        }
+    }
+
+    class PlayButton extends Button {
+        boolean mStartPlaying = true;
+
+        OnClickListener clicker = new OnClickListener() {
+            public void onClick(View v) {
+                onPlay(mStartPlaying);
+                if (mStartPlaying) {
+                    setText("Stop playing");
+                } else {
+                    setText("Start playing");
+                }
+                mStartPlaying = !mStartPlaying;
+            }
+        };
+
+        public PlayButton(Context ctx) {
+            super(ctx);
+            setText("Start playing");
+            setOnClickListener(clicker);
+        }
+    }
+
+    public AudioRecordTest() {
+        mFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
+        mFileName += "/audiorecordtest.3gp";
+    }
+
+    &#64;Override
+    public void onCreate(Bundle icicle) {
+        super.onCreate(icicle);
+
+        LinearLayout ll = new LinearLayout(this);
+        mRecordButton = new RecordButton(this);
+        ll.addView(mRecordButton,
+            new LinearLayout.LayoutParams(
+                ViewGroup.LayoutParams.WRAP_CONTENT,
+                ViewGroup.LayoutParams.WRAP_CONTENT,
+                0));
+        mPlayButton = new PlayButton(this);
+        ll.addView(mPlayButton,
+            new LinearLayout.LayoutParams(
+                ViewGroup.LayoutParams.WRAP_CONTENT,
+                ViewGroup.LayoutParams.WRAP_CONTENT,
+                0));
+        setContentView(ll);
+    }
+
+    &#64;Override
+    public void onPause() {
+        super.onPause();
+        if (mRecorder != null) {
+            mRecorder.release();
+            mRecorder = null;
+        }
+
+        if (mPlayer != null) {
+            mPlayer.release();
+            mPlayer = null;
+        }
+    }
+}
+</pre>
\ No newline at end of file
diff --git a/docs/html/guide/topics/media/camera.jd b/docs/html/guide/topics/media/camera.jd
new file mode 100644
index 0000000..877bded
--- /dev/null
+++ b/docs/html/guide/topics/media/camera.jd
@@ -0,0 +1,1055 @@
+page.title=Camera
+parent.title=Multimedia and Camera
+parent.link=index.html
+@jd:body
+
+<div id="qv-wrapper">
+  <div id="qv">
+  <h2>In this document</h2>
+  <ol>
+    <li><a href="#considerations">Considerations</a></li>
+    <li><a href="#basics">The Basics</a>
+    <li><a href="#manifest">Manifest Declarations</a></li>
+    <li><a href="#intents">Using Existing Camera Apps</a>
+      <ol>
+        <li><a href="#intent-image">Image capture intent</a></li>
+        <li><a href="#intent-video">Video capture intent</a></li>
+        <li><a href="#intent-receive">Receiving camera intent result</a></li>
+      </ol>
+    <li><a href="#custom-camera">Building a Camera App</a>
+      <ol>
+        <li><a href="#detect-camera">Detecting camera hardware</a></li>
+        <li><a href="#access-camera">Accessing cameras</a></li>
+        <li><a href="#check-camera-features">Checking camera features</a></li>
+        <li><a href="#camera-preview">Creating a preview class</a></li>
+        <li><a href="#preview-layout">Placing preview in a layout</a></li>
+        <li><a href="#capture-picture">Capturing pictures</a></li>
+        <li><a href="#capture-video">Capturing videos</a></li>
+        <li><a href="#release-camera">Releasing the camera</a></li>
+      </ol>
+    </li>
+    <li><a href="#saving-media">Saving Media Files</a></li>
+  </ol>
+  <h2>Key Classes</h2>
+  <ol>
+    <li>{@link android.hardware.Camera}</li>
+    <li>{@link android.view.SurfaceView}</li>
+    <li>{@link android.media.MediaRecorder}</li>
+    <li>{@link android.content.Intent}</li>
+  </ol>
+  <h2>See also</h2>
+  <ol>
+    <li><a href="{@docRoot}reference/android/hardware/Camera.html">Camera</a></li>
+    <li><a href="{@docRoot}reference/android/media/MediaRecorder.html">MediaRecorder</a></li>
+    <li><a href="{@docRoot}guide/topics/data/data-storage.html">Data Storage</a></li>
+  </ol>
+  </div>
+</div>
+
+
+<p>The Android framework includes support for various cameras and camera features available on
+devices, allowing you to capture pictures and videos in your applications. This document discusses a
+quick, simple approach to image and video capture and outlines an advanced approach for creating
+custom camera experiences for your users.</p>
+
+<h2 id="considerations">Considerations</h2>
+<p>Before enabling your application to use cameras on Android devices, you should consider a few
+questions about how your app intends to use this hardware feature.</p>
+
+<ul>
+  <li><strong>Camera Requirement</strong> - Is the use of a camera so important to your
+application that you do not want your application installed on a device that does not have a
+camera? If so, you should declare the <a href="#manifest">camera requirement in your
+manifest</a>.</li>
+
+  <li><strong>Quick Picture or Customized Camera</strong> - How will your application use the
+camera? Are you just interested in snapping a quick picture or video clip, or will your application
+provide a new way to use cameras? For a getting a quick snap or clip, consider 
+<a href="#intents">Using Existing Camera Apps</a>. For developing a customized camera feature, check
+out the <a href="#custom-camera">Building a Camera App</a> section.</li>
+
+  <li><strong>Storage</strong> - Are the images or videos your application generates intended to be
+only visible to your application or shared so that other applications such as Gallery or other
+media and social apps can use them? Do you want the pictures and videos to be available even if your
+application is uninstalled? Check out the <a href="#saving-media">Saving Media Files</a> section to
+see how to implement these options.</li>
+</ul>
+
+
+
+<h2 id="basics">The Basics</h2>
+<p>The Android framework supports capturing images and video through the
+{@link android.hardware.Camera} API or camera {@link android.content.Intent}. Here are the relevant
+classes:</p>
+
+<dl>
+  <dt>{@link android.hardware.Camera}</dt>
+  <dd>This class is the primary API for controlling device cameras. This class is used to take
+pictures or videos when you are building a camera application.</a>.</dd>
+
+  <dt>{@link android.view.SurfaceView}</dt>
+  <dd>This class is used to present a live camera preview to the user.</dd>
+
+  <dt>{@link android.media.MediaRecorder}</dt>
+  <dd>This class is used to record video from the camera.</dd>
+
+  <dt>{@link android.content.Intent}</dt>
+  <dd>An intent action type of {@link android.provider.MediaStore#ACTION_IMAGE_CAPTURE
+MediaStore.ACTION_IMAGE_CAPTURE} or {@link android.provider.MediaStore#ACTION_VIDEO_CAPTURE
+MediaStore.ACTION_VIDEO_CAPTURE} can be used to capture images or videos without directly
+using the {@link android.hardware.Camera} object.</dd>
+</dl>
+
+
+<h2 id="manifest">Manifest Declarations</h2>
+<p>Before starting development on your application with the Camera API, you should make sure
+your manifest has the appropriate declarations to allow use of camera hardware and other
+related features.</p>
+
+<ul>
+  <li><strong>Camera Permission</strong> - Your application must request permission to use a device
+camera.
+<pre>
+&lt;uses-permission android:name=&quot;android.permission.CAMERA&quot; /&gt;
+</pre>
+  <p class="note"><strong>Note:</strong> If you are using the camera <a href="#intents">via an
+intent</a>, your application does not need to request this permission.</p>
+  </li>
+  <li><strong>Camera Features</strong> - Your application must also declare use of camera features,
+for example:
+<pre>
+&lt;uses-feature android:name=&quot;android.hardware.camera&quot; /&gt;
+</pre>
+  <p>For a list of camera features, see the manifest <a
+href="{@docRoot}guide/topics/manifest/uses-feature-element.html#features-reference">Features
+Reference</a>.</p>
+  <p>Adding camera features to your manifest causes Android Market to prevent your application from
+being installed to devices that do not include a camera or do not support the camera features you
+specify. For more information about using feature-based filtering with Android Market, see <a
+href="{@docRoot}guide/topics/manifest/uses-feature-element.html#market-feature-filtering">Android
+Market and Feature-Based Filtering</a>.</p>
+  <p>If your application <em>can use</em> a camera or camera feature for proper operation, but does
+not <em>require</em> it, you should specify this in the manifest by including the {@code
+android:required} attribute, and setting it to {@code false}:</p>
+<pre>
+&lt;uses-feature android:name="android.hardware.camera" android:required="false" /&gt;
+</pre>
+
+  </li>
+  <li><strong>Storage Permission</strong> - If your application saves images or videos to the
+device's external storage (SD Card), you must also specify this in the manifest.
+<pre>
+&lt;uses-permission android:name=&quot;android.permission.WRITE_EXTERNAL_STORAGE&quot; /&gt;
+</pre>
+  </li>
+  <li><strong>Audio Recording Permission</strong> - For recording audio with video capture, your
+application must request the audio capture permission.
+<pre>
+&lt;uses-permission android:name="android.permission.RECORD_AUDIO" /&gt;
+</pre>
+  </li>
+</ul>
+
+
+<h2 id="intents">Using Existing Camera Apps</h2>
+<p>A quick way to enable taking pictures or videos in your application without a lot of extra code
+is to use an {@link android.content.Intent} to invoke an existing Android camera application. A
+camera intent makes a request to capture a picture or video clip through an existing camera app and
+then returns control back to your application. This section shows you how to capture an image or
+video using this technique.</p>
+
+<p>The procedure for invoking a camera intent follows these general steps:</p>
+
+<ol>
+  <li><strong>Compose a Camera Intent</strong> - Create an {@link android.content.Intent} that
+requests an image or video, using one of these intent types:
+    <ul>
+      <li>{@link android.provider.MediaStore#ACTION_IMAGE_CAPTURE MediaStore.ACTION_IMAGE_CAPTURE} -
+Intent action type for requesting an image from an existing camera application.</li>
+      <li>{@link android.provider.MediaStore#ACTION_VIDEO_CAPTURE MediaStore.ACTION_VIDEO_CAPTURE} -
+Intent action type for requesting a video from an existing camera application. </li>
+    </ul>
+  </li>
+  <li><strong>Start the Camera Intent</strong> - Use the {@link
+android.app.Activity#startActivityForResult(android.content.Intent, int) startActivityForResult()}
+method to execute the camera intent. After you start the intent, the Camera application user
+interface appears on the device screen and the user can take a picture or video.</li>
+  <li><strong>Receive the Intent Result</strong> - Set up an {@link
+android.app.Activity#onActivityResult(int, int, android.content.Intent) onActivityResult()} method
+in your application to receive the callback and data from the camera intent. When the user
+finishes taking a picture or video (or cancels the operation), the system calls this method.</li>
+</ol>
+
+
+<h3 id="intent-image">Image capture intent</h3>
+<p>Capturing images using a camera intent is quick way to enable your application to take pictures
+with minimal coding. An image capture intent can include the following extra information:</p>
+
+<ul>
+  <li>{@link android.provider.MediaStore#EXTRA_OUTPUT MediaStore.EXTRA_OUTPUT} - This setting
+requires a {@link android.net.Uri} object specifying a path and file name where you'd like to
+save the picture. This setting is optional but strongly recommended. If you do not specify this
+value, the camera application saves the requested picture in the default location with a default
+name, specified in the returned intent's {@link android.content.Intent#getData() Intent.getData()}
+field.</li>
+</ul>
+
+<p>The following example demonstrates how to construct a image capture intent and execute it.
+The {@code getOutputMediaFileUri()} method in this example refers to the sample code shown in <a
+href= "#saving-media">Saving Media Files</a>.</p>
+
+<pre>
+private static final int CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE = 100;
+private Uri fileUri;
+
+&#64;Override
+public void onCreate(Bundle savedInstanceState) {
+    super.onCreate(savedInstanceState);
+    setContentView(R.layout.main);
+
+    // create Intent to take a picture and return control to the calling application
+    Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
+
+    fileUri = getOutputMediaFileUri(MEDIA_TYPE_IMAGE); // create a file to save the image
+    intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri); // set the image file name
+
+    // start the image capture Intent
+    startActivityForResult(intent, CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE);
+}
+</pre>
+
+<p>When the {@link android.app.Activity#startActivityForResult(android.content.Intent, int)
+startActivityForResult()} method is executed, users see a camera application interface.
+After the user finishes taking a picture (or cancels the operation), the user interface returns to
+your application, and you must intercept the {@link
+android.app.Activity#onActivityResult(int, int, android.content.Intent) onActivityResult()}
+method to receive the result of the intent and continue your application execution. For information
+on how to receive the completed intent, see <a href="#intent-receive">Receiving Camera Intent
+Result</a>.</p>
+
+
+<h3 id="intent-video">Video capture intent</h3>
+<p>Capturing video using a camera intent is a quick way to enable your application to take videos
+with minimal coding. A video capture intent can include the following extra information:</p>
+
+<ul>
+  <li>{@link android.provider.MediaStore#EXTRA_OUTPUT MediaStore.EXTRA_OUTPUT} - This setting
+requires a {@link android.net.Uri} specifying a path and file name where you'd like to save the
+video. This setting is optional but strongly recommended. If you do not specify this value, the
+Camera application saves the requested video in the default location with a default name, specified
+in the returned intent's {@link android.content.Intent#getData() Intent.getData()} field.</li>
+  <li>{@link android.provider.MediaStore#EXTRA_VIDEO_QUALITY MediaStore.EXTRA_VIDEO_QUALITY} -
+This value can be 0 for lowest quality and smallest file size or 1 for highest quality and
+larger file size.</li>
+  <li>{@link android.provider.MediaStore#EXTRA_DURATION_LIMIT MediaStore.EXTRA_DURATION_LIMIT} -
+Set this value to limit the length, in seconds, of the video being captured.</li>
+  <li>{@link android.provider.MediaStore#EXTRA_SIZE_LIMIT MediaStore.EXTRA_SIZE_LIMIT} -
+Set this value to limit the file size, in bytes, of the video being captured.
+</li>
+</ul>
+
+<p>The following example demonstrates how to construct a video capture intent and execute it.
+The {@code getOutputMediaFileUri()} method in this example refers to the sample code shown in <a
+href= "#saving-media">Saving Media Files</a>.</p>
+
+<pre>
+private static final int CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE = 200;
+private Uri fileUri;
+
+&#64;Override
+public void onCreate(Bundle savedInstanceState) {
+    super.onCreate(savedInstanceState);
+    setContentView(R.layout.main);
+
+    //create new Intent
+    Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE);
+
+    fileUri = getOutputMediaFileUri(MEDIA_TYPE_VIDEO);  // create a file to save the video
+    intent.putExtra(MediaStore.EXTRA_OUTPUT, fileUri);  // set the image file name
+
+    intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1); // set the video image quality to high
+
+    // start the Video Capture Intent
+    startActivityForResult(intent, CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE);
+}
+</pre>
+
+<p>When the {@link
+android.app.Activity#startActivityForResult(android.content.Intent, int)
+startActivityForResult()} method is executed, users see a modified camera application interface.
+After the user finishes taking a video (or cancels the operation), the user interface
+returns to your application, and you must intercept the {@link
+android.app.Activity#onActivityResult(int, int, android.content.Intent) onActivityResult()}
+method to receive the result of the intent and continue your application execution. For information
+on how to receive the completed intent, see the next section.</p>
+
+<h3 id="intent-receive">Receiving camera intent result</h3>
+<p>Once you have constructed and executed an image or video camera intent, your application must be
+configured to receive the result of the intent. This section shows you how to intercept the callback
+from a camera intent so your application can do further processing of the captured image or
+video.</p>
+
+<p>In order to receive the result of an intent, you must override the {@link
+android.app.Activity#onActivityResult(int, int, android.content.Intent) onActivityResult()} in the
+activity that started the intent. The following example demonstrates how to override {@link
+android.app.Activity#onActivityResult(int, int, android.content.Intent) onActivityResult()} to
+capture the result of the <a href="#intent-image">image camera intent</a> or <a
+href="#intent-video">video camera intent</a> examples shown in the previous sections.</p>
+
+<pre>
+private static final int CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE = 100;
+private static final int CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE = 200;
+
+&#64;Override
+protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+    if (requestCode == CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE) {
+        if (resultCode == RESULT_OK) {
+            // Image captured and saved to fileUri specified in the Intent
+            Toast.makeText(this, "Image saved to:\n" +
+                     data.getData(), Toast.LENGTH_LONG).show();
+        } else if (resultCode == RESULT_CANCELED) {
+            // User cancelled the image capture
+        } else {
+            // Image capture failed, advise user
+        }
+    }
+
+    if (requestCode == CAPTURE_VIDEO_ACTIVITY_REQUEST_CODE) {
+        if (resultCode == RESULT_OK) {
+            // Video captured and saved to fileUri specified in the Intent
+            Toast.makeText(this, "Video saved to:\n" +
+                     data.getData(), Toast.LENGTH_LONG).show();
+        } else if (resultCode == RESULT_CANCELED) {
+            // User cancelled the video capture
+        } else {
+            // Video capture failed, advise user
+        }
+    }
+}
+</pre>
+
+<p>Once your activity receives a successful result, the captured image or video is available in the
+specified location for your application to access.</p>
+
+
+
+<h2 id="custom-camera">Building a Camera App</h2>
+<p>Some developers may require a camera user interface that is customized to the look of their
+application or provides special features. Creating a customized camera activity requires more
+code than <a href="#intents">using an intent</a>, but it can provide a more compelling experience
+for your users.</p>
+
+<p>The general steps for creating a custom camera interface for your application are as follows:</p>
+
+<ul>
+   <li><strong>Detect and Access Camera</strong> - Create code to check for the existence of
+cameras and request access.</li>
+   <li><strong>Create a Preview Class</strong> - Create a camera preview class that extends {@link
+android.view.SurfaceView} and implements the {@link android.view.SurfaceHolder} interface. This
+class previews the live images from the camera.</li>
+   <li><strong>Build a Preview Layout</strong> - Once you have the camera preview class, create a
+view layout that incorporates the preview and the user interface controls you want.</li>
+   <li><strong>Setup Listeners for Capture</strong> - Connect listeners for your interface
+controls to start image or video capture in response to user actions, such as pressing a
+button.</li>
+   <li><strong>Capture and Save Files</strong> - Setup the code for capturing pictures or
+videos and saving the output.</li>
+   <li><strong>Release the Camera</strong> - After using the camera, your application must
+properly release it for use by other applications.</li>
+</ul>
+
+<p>Camera hardware is a shared resource that must be carefully managed so your application does
+not collide with other applications that may also want to use it. The following sections discusses
+how to detect camera hardware, how to request access to a camera and how to release it when your
+application is done using it.</p>
+
+<p class="caution"><strong>Caution:</strong> Remember to release the {@link android.hardware.Camera}
+object by calling the {@link android.hardware.Camera#release() Camera.release()} when your
+application is done using it! If your application does not properly release the camera, all
+subsequent attempts to access the camera, including those by your own application, will fail and may
+cause your or other applications to be shut down.</p>
+
+
+<h3 id="detect-camera">Detecting camera hardware</h3>
+<p>If your application does not specifically require a camera using a manifest declaration, you
+should check to see if a camera is available at runtime. To perform this check, use the {@link
+android.content.pm.PackageManager#hasSystemFeature(java.lang.String)
+PackageManager.hasSystemFeature()} method, as shown in the example code below:</p>
+
+<pre>
+/** Check if this device has a camera */
+private boolean checkCameraHardware(Context context) {
+    if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)){
+        // this device has a camera
+        return true;
+    } else {
+        // no camera on this device
+        return false;
+    }
+}
+</pre>
+
+<p>Android devices can have multiple cameras, for example a back-facing camera for photography and a
+front-facing camera for video calls. Android 2.3 (API Level 9) and later allows you to check the
+number of cameras available on a device using the {@link
+android.hardware.Camera#getNumberOfCameras() Camera.getNumberOfCameras()} method.</p>
+
+<h3 id="access-camera">Accessing cameras</h3>
+<p>If you have determined that the device on which your application is running has a camera, you
+must request to access it by getting an instance of {@link android.hardware.Camera} (unless you
+are using an <a href="#intents">intent to access the camera</a>). </p>
+
+<p>To access the primary camera, use the {@link android.hardware.Camera#open() Camera.open()} method
+and be sure to catch any exceptions, as shown in the code below:</p>
+
+<pre>
+/** A safe way to get an instance of the Camera object. */
+public static Camera getCameraInstance(){
+    Camera c = null;
+    try {
+        c = Camera.open(); // attempt to get a Camera instance
+    }
+    catch (Exception e){
+        // Camera is not available (in use or does not exist)
+    }
+    return c; // returns null if camera is unavailable
+}
+</pre>
+
+<p class="caution"><strong>Caution:</strong> Always check for exceptions when using {@link
+android.hardware.Camera#open() Camera.open()}. Failing to check for exceptions if the camera is in
+use or does not exist will cause your application to be shut down by the system.</p>
+
+<p>On devices running Android 2.3 (API Level 9) or higher, you can access specific cameras using
+{@link android.hardware.Camera#open(int) Camera.open(int)}. The example code above will access
+the first, back-facing camera on a device with more than one camera.</p>
+
+<h3 id="check-camera-features">Checking camera features</h3>
+<p>Once you obtain access to a camera, you can get further information about its capabilties using
+the {@link android.hardware.Camera#getParameters() Camera.getParameters()} method and checking the
+returned {@link android.hardware.Camera.Parameters} object for supported capabilities. When using
+API Level 9 or higher, use the {@link android.hardware.Camera#getCameraInfo(int,
+android.hardware.Camera.CameraInfo) Camera.getCameraInfo()} to determine if a camera is on the front
+or back of the device, and the orientation of the image.</p>
+
+
+
+<h3 id="camera-preview">Creating a preview class</h3>
+<p>For users to effectively take pictures or video, they must be able to see what the device camera
+sees. A camera preview class is a {@link android.view.SurfaceView} that can display the live image
+data coming from a camera, so users can frame and capture a picture or video.</p>
+
+<p>The following example code demonstrates how to create a basic camera preview class that can be
+included in a {@link android.view.View} layout. This class implements {@link
+android.view.SurfaceHolder.Callback SurfaceHolder.Callback} in order to capture the callback events
+for creating and destroying the view, which are needed for assigning the camera preview input.</p>
+
+<pre>
+/** A basic Camera preview class */
+public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
+    private SurfaceHolder mHolder;
+    private Camera mCamera;
+
+    public CameraPreview(Context context, Camera camera) {
+        super(context);
+        mCamera = camera;
+
+        // Install a SurfaceHolder.Callback so we get notified when the
+        // underlying surface is created and destroyed.
+        mHolder = getHolder();
+        mHolder.addCallback(this);
+        // deprecated setting, but required on Android versions prior to 3.0
+        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+    }
+
+    public void surfaceCreated(SurfaceHolder holder) {
+        // The Surface has been created, now tell the camera where to draw the preview.
+        try {
+            mCamera.setPreviewDisplay(holder);
+            mCamera.startPreview();
+        } catch (IOException e) {
+            Log.d(TAG, "Error setting camera preview: " + e.getMessage());
+        }
+    }
+
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        // empty. Take care of releasing the Camera preview in your activity.
+    }
+
+    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+        // If your preview can change or rotate, take care of those events here.
+        // Make sure to stop the preview before resizing or reformatting it.
+
+        if (mHolder.getSurface() == null){
+          // preview surface does not exist
+          return;
+        }
+
+        // stop preview before making changes
+        try {
+            mCamera.stopPreview();
+        } catch (Exception e){
+          // ignore: tried to stop a non-existent preview
+        }
+
+        // make any resize, rotate or reformatting changes here
+
+        // start preview with new settings
+        try {
+            mCamera.setPreviewDisplay(mHolder);
+            mCamera.startPreview();
+
+        } catch (Exception e){
+            Log.d(TAG, "Error starting camera preview: " + e.getMessage());
+        }
+    }
+}
+</pre>
+
+
+<h3 id="preview-layout">Placing preview in a layout</h3>
+<p>A camera preview class, such as the example shown in the previous section, must be placed in the
+layout of an activity along with other user interface controls for taking a picture or video. This
+section shows you how to build a basic layout and activity for the preview.</p>
+
+<p>The following layout code provides a very basic view that can be used to display a camera
+preview. In this example, the {@link android.widget.FrameLayout} element is meant to be the
+container for the camera preview class. This layout type is used so that additional picture
+information or controls can be overlayed on the live camera preview images.</p>
+
+<pre>
+&lt;?xml version=&quot;1.0&quot; encoding=&quot;utf-8&quot;?&gt;
+&lt;LinearLayout xmlns:android=&quot;http://schemas.android.com/apk/res/android&quot;
+    android:orientation=&quot;horizontal&quot;
+    android:layout_width=&quot;fill_parent&quot;
+    android:layout_height=&quot;fill_parent&quot;
+    &gt;
+  &lt;FrameLayout
+    android:id=&quot;@+id/camera_preview&quot;
+    android:layout_width=&quot;fill_parent&quot;
+    android:layout_height=&quot;fill_parent&quot;
+    android:layout_weight=&quot;1&quot;
+    /&gt;
+
+  &lt;Button
+    android:id=&quot;@+id/button_capture&quot;
+    android:text=&quot;Capture&quot;
+    android:layout_width=&quot;wrap_content&quot;
+    android:layout_height=&quot;wrap_content&quot;
+    android:layout_gravity=&quot;center&quot;
+    /&gt;
+&lt;/LinearLayout&gt;
+</pre>
+
+<p>On most devices, the default orientation of the camera preview is landscape. This example layout
+specifies a horizontal (landscape) layout and the code below fixes the orientation of the
+application to landscape. For simplicity in rendering a camera preview, you should change your
+application's preview activity orientation to landscape by adding the following to your
+manifest.</p>
+
+<pre>
+&lt;activity android:name=&quot;.CameraActivity&quot;
+          android:label=&quot;@string/app_name&quot;
+
+          android:screenOrientation=&quot;landscape&quot;&gt;
+          &lt;!-- configure this activity to use landscape orientation --&gt;
+
+          &lt;intent-filter&gt;
+        &lt;action android:name=&quot;android.intent.action.MAIN&quot; /&gt;
+        &lt;category android:name=&quot;android.intent.category.LAUNCHER&quot; /&gt;
+    &lt;/intent-filter&gt;
+&lt;/activity&gt;
+</pre>
+
+<p class="note"><strong>Note:</strong> A camera preview does not have to be in landscape mode.
+Starting in Android 2.2 (API Level 8), you can use the {@link
+android.hardware.Camera#setDisplayOrientation(int) setDisplayOrientation()} method to set the
+rotation of the preview image. In order to change preview orientation as the user re-orients the
+phone, within the {@link
+android.view.SurfaceHolder.Callback#surfaceChanged(android.view.SurfaceHolder, int, int, int)
+surfaceChanged()} method of your preview class, first stop the preview with {@link
+android.hardware.Camera#stopPreview() Camera.stopPreview()} change the orientation and then
+start the preview again with {@link android.hardware.Camera#startPreview()
+Camera.startPreview()}.</p>
+
+<p>In the activity for your camera view, add your preview class to the {@link
+android.widget.FrameLayout} element shown in the example above. Your camera activity must also
+ensure that it releases the camera when it is paused or shut down. The following example shows how
+to modify a camera activity to attach the preview class shown in <a href="#camera-preview">Creating
+a preview class</a>.</p>
+
+<pre>
+public class CameraActivity extends Activity {
+
+    private Camera mCamera;
+    private CameraPreview mPreview;
+
+    &#64;Override
+    public void onCreate(Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.main);
+
+        // Create an instance of Camera
+        mCamera = getCameraInstance();
+
+        // Create our Preview view and set it as the content of our activity.
+        mPreview = new CameraPreview(this, mCamera);
+        FrameLayout preview = (FrameLayout) findViewById(id.camera_preview);
+        preview.addView(mPreview);
+    }
+}
+</pre>
+
+<p class="note"><strong>Note:</strong> The {@code getCameraInstance()} method in the example above
+refers to the example method shown in <a href="#access-camera">Accessing cameras</a>.</p>
+
+
+<h3 id="capture-picture">Capturing pictures</h3>
+<p>Once you have built a preview class and a view layout in which to display it, you are ready to
+start capturing images with your application. In your application code, you must set up listeners
+for your user interface controls to respond to a user action by taking a picture.</p>
+
+<p>In order to retrieve a picture, use the {@link
+android.hardware.Camera#takePicture(android.hardware.Camera.ShutterCallback,
+android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback)
+Camera.takePicture()} method. This method takes three parameters which receive data from the camera.
+In order to receive data in a JPEG format, you must implement an {@link
+android.hardware.Camera.PictureCallback} interface to receive the image data and
+write it to a file. The following code shows a basic implementation of the {@link
+android.hardware.Camera.PictureCallback} interface to save an image received from the camera.</p>
+
+<pre>
+private PictureCallback mPicture = new PictureCallback() {
+
+    &#64;Override
+    public void onPictureTaken(byte[] data, Camera camera) {
+
+        File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
+        if (pictureFile == null){
+            Log.d(TAG, "Error creating media file, check storage permissions: " +
+                e.getMessage());
+            return;
+        }
+
+        try {
+            FileOutputStream fos = new FileOutputStream(pictureFile);
+            fos.write(data);
+            fos.close();
+        } catch (FileNotFoundException e) {
+            Log.d(TAG, "File not found: " + e.getMessage());
+        } catch (IOException e) {
+            Log.d(TAG, "Error accessing file: " + e.getMessage());
+        }
+    }
+};
+</pre>
+
+<p>Trigger capturing an image by calling the {@link
+android.hardware.Camera#takePicture(android.hardware.Camera.ShutterCallback,
+android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback)
+Camera.takePicture()} method. The following example code shows how to call this method from a
+button {@link android.view.View.OnClickListener}.</p>
+
+<pre>
+// Add a listener to the Capture button
+Button captureButton = (Button) findViewById(id.button_capture);
+    captureButton.setOnClickListener(
+        new View.OnClickListener() {
+        &#64;Override
+        public void onClick(View v) {
+            // get an image from the camera
+            mCamera.takePicture(null, null, mPicture);
+        }
+    }
+);
+</pre>
+
+<p class="note"><strong>Note:</strong> The {@code mPicture} member in the following example refers
+to the example code above.</p>
+
+<p class="caution"><strong>Caution:</strong> Remember to release the {@link android.hardware.Camera}
+object by calling the {@link android.hardware.Camera#release() Camera.release()} when your
+application is done using it! For information about how to release the camera, see <a
+href="#release-camera">Releasing the camera</a>.</p>
+
+
+<h3 id="capture-video">Capturing videos</h3>
+
+<p>Video capture using the Android framework requires careful management of the {@link
+android.hardware.Camera} object and coordination with the {@link android.media.MediaRecorder}
+class. When recording video with {@link android.hardware.Camera}, you must manage the {@link
+android.hardware.Camera#lock() Camera.lock()} and {@link android.hardware.Camera#unlock()
+Camera.unlock()} calls to allow {@link android.media.MediaRecorder} access to the camera hardware,
+in addition to the {@link android.hardware.Camera#open() Camera.open()} and {@link
+android.hardware.Camera#release() Camera.release()} calls.</p>
+
+<p class="note"><strong>Note:</strong> Starting with Android 4.0 (API level 14), the {@link
+android.hardware.Camera#lock() Camera.lock()} and {@link android.hardware.Camera#unlock()
+Camera.unlock()} calls are managed for you automatically.</p>
+
+<p>Unlike taking pictures with a device camera, capturing video requires a very particular call
+order. You must follow a specific order of execution to successfully prepare for and capture video
+with your application, as detailed below.</p>
+
+<ol>
+  <li><strong>Open Camera</strong> - Use the {@link android.hardware.Camera#open() Camera.open()}
+to get an instance of the camera object.</li>
+  <li><strong>Connect Preview</strong> - Prepare a live camera image preview by connecting a {@link
+android.view.SurfaceView} to the camera using {@link
+android.hardware.Camera#setPreviewDisplay(android.view.SurfaceHolder) Camera.setPreviewDisplay()}.
+  </li>
+  <li><strong>Start Preview</strong> - Call {@link android.hardware.Camera#startPreview()
+Camera.startPreview()} to begin displaying the live camera images.</li>
+  <li><strong>Start Recording Video</strong> - The following steps must be completed <em>in
+order</em> to successfully record video:
+    <ol style="list-style-type: lower-alpha;">
+      <li><strong>Unlock the Camera</strong> - Unlock the camera for use by {@link
+android.media.MediaRecorder} by calling {@link android.hardware.Camera#unlock()
+Camera.unlock()}.</li>
+      <li><strong>Configure MediaRecorder</strong> - Call in the following {@link
+android.media.MediaRecorder} methods <em>in this order</em>. For more information, see the {@link
+android.media.MediaRecorder} reference documentation.
+        <ol>
+          <li>{@link android.media.MediaRecorder#setCamera(android.hardware.Camera)
+setCamera()} - Set the camera to be used for video capture, use your application's current instance
+of {@link android.hardware.Camera}.</li>
+          <li>{@link android.media.MediaRecorder#setAudioSource(int) setAudioSource()} - Set the
+audio source, use {@link android.media.MediaRecorder.AudioSource#CAMCORDER
+MediaRecorder.AudioSource.CAMCORDER}. </li>
+          <li>{@link android.media.MediaRecorder#setVideoSource(int) setVideoSource()} - Set
+the video source, use {@link android.media.MediaRecorder.VideoSource#CAMERA
+MediaRecorder.VideoSource.CAMERA}.</li>
+          <li>Set the video output format and encoding. For Android 2.2 (API Level 8) and
+higher, use the {@link android.media.MediaRecorder#setProfile(android.media.CamcorderProfile)
+MediaRecorder.setProfile} method, and get a profile instance using {@link
+android.media.CamcorderProfile#get(int) CamcorderProfile.get()}. For versions of Android prior to
+2.2, you must set the video output format and encoding parameters:
+          <ol style="list-style-type: lower-roman;">
+            <li>{@link android.media.MediaRecorder#setOutputFormat(int) setOutputFormat()} - Set
+the output format, specify the default setting or {@link
+android.media.MediaRecorder.OutputFormat#MPEG_4 MediaRecorder.OutputFormat.MPEG_4}.</li>
+            <li>{@link android.media.MediaRecorder#setAudioEncoder(int) setAudioEncoder()} - Set
+the sound encoding type, specify the default setting or {@link
+android.media.MediaRecorder.AudioEncoder#AMR_NB MediaRecorder.AudioEncoder.AMR_NB}.</li>
+            <li>{@link android.media.MediaRecorder#setVideoEncoder(int) setVideoEncoder()} - Set
+the video encoding type, specify the default setting or {@link
+android.media.MediaRecorder.VideoEncoder#MPEG_4_SP MediaRecorder.VideoEncoder.MPEG_4_SP}.</li>
+          </ol>
+          </li>
+          <li>{@link android.media.MediaRecorder#setOutputFile(java.lang.String) setOutputFile()} -
+Set the output file, use {@code getOutputMediaFile(MEDIA_TYPE_VIDEO).toString()} from the example
+method in the <a href="#saving-media">Saving Media Files</a> section.</li>
+          <li>{@link android.media.MediaRecorder#setPreviewDisplay(android.view.Surface)
+setPreviewDisplay()} - Specify the {@link android.view.SurfaceView} preview layout element for
+your application. Use the same object you specified for <strong>Connect Preview</strong>.</li>
+        </ol>
+        <p class="caution"><strong>Caution:</strong> You must call these {@link
+android.media.MediaRecorder} configuration methods <em>in this order</em>, otherwise your
+application will encounter errors and the recording will fail.</p>
+      </li>
+      <li><strong>Prepare MediaRecorder</strong> - Prepare the {@link android.media.MediaRecorder}
+with provided configuration settings by calling {@link android.media.MediaRecorder#prepare()
+MediaRecorder.prepare()}.</li>
+      <li><strong>Start MediaRecorder</strong> - Start recording video by calling {@link
+android.media.MediaRecorder#start() MediaRecorder.start()}.</li>
+    </ol>
+  </li>
+  <li><strong>Stop Recording Video</strong> - Call the following methods <em>in order</em>, to
+successfully complete a video recording:
+    <ol style="list-style-type: lower-alpha;">
+      <li><strong>Stop MediaRecorder</strong> - Stop recording video by calling {@link
+android.media.MediaRecorder#stop() MediaRecorder.stop()}.</li>
+      <li><strong>Reset MediaRecorder</strong> - Optionally, remove the configuration settings from
+the recorder by calling {@link android.media.MediaRecorder#reset() MediaRecorder.reset()}.</li>
+      <li><strong>Release MediaRecorder</strong> - Release the {@link android.media.MediaRecorder}
+by calling {@link android.media.MediaRecorder#release() MediaRecorder.release()}.</li>
+      <li><strong>Lock the Camera</strong> - Lock the camera so that future {@link
+android.media.MediaRecorder} sessions can use it by calling {@link android.hardware.Camera#lock()
+Camera.lock()}. Starting with Android 4.0 (API level 14), this call is not required unless the
+{@link android.media.MediaRecorder#prepare() MediaRecorder.prepare()} call fails.</li>
+    </ol>
+  </li>
+  <li><strong>Stop the Preview</strong> - When your activity has finished using the camera, stop the
+preview using {@link android.hardware.Camera#stopPreview() Camera.stopPreview()}.</li>
+  <li><strong>Release Camera</strong> - Release the camera so that other applications can use
+it by calling {@link android.hardware.Camera#release() Camera.release()}.</li>
+</ol>
+
+<p class="note"><strong>Note:</strong> It is possible to use {@link android.media.MediaRecorder}
+without creating a camera preview first and skip the first few steps of this process. However,
+since users typically prefer to see a preview before starting a recording, that process is not
+discussed here.</p>
+
+<h4 id="configuring-mediarecorder">Configuring MediaRecorder</h4>
+<p>When using the {@link android.media.MediaRecorder} class to record video, you must perform
+configuration steps in a <em>specific order</em> and then call the {@link
+android.media.MediaRecorder#prepare() MediaRecorder.prepare()} method to check and implement the
+configuration. The following example code demonstrates how to properly configure and prepare the
+{@link android.media.MediaRecorder} class for video recording.</p>
+
+<pre>
+private boolean prepareVideoRecorder(){
+
+    mCamera = getCameraInstance();
+    mMediaRecorder = new MediaRecorder();
+
+    // Step 1: Unlock and set camera to MediaRecorder
+    mCamera.unlock();
+    mMediaRecorder.setCamera(mCamera);
+
+    // Step 2: Set sources
+    mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
+    mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
+
+    // Step 3: Set a CamcorderProfile (requires API Level 8 or higher)
+    mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
+
+    // Step 4: Set output file
+    mMediaRecorder.setOutputFile(getOutputMediaFile(MEDIA_TYPE_VIDEO).toString());
+
+    // Step 5: Set the preview output
+    mMediaRecorder.setPreviewDisplay(mPreview.getHolder().getSurface());
+
+    // Step 6: Prepare configured MediaRecorder
+    try {
+        mMediaRecorder.prepare();
+    } catch (IllegalStateException e) {
+        Log.d(TAG, "IllegalStateException preparing MediaRecorder: " + e.getMessage());
+        releaseMediaRecorder();
+        return false;
+    } catch (IOException e) {
+        Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
+        releaseMediaRecorder();
+        return false;
+    }
+    return true;
+}
+</pre>
+
+<p>Prior to Android 2.2 (API Level 8), you must set the output format and encoding formats
+parameters directly, instead of using {@link android.media.CamcorderProfile}. This approach is
+demonstrated in the following code:</p>
+
+<pre>
+    // Step 3: Set output format and encoding (for versions prior to API Level 8)
+    mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
+    mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
+    mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT);
+</pre>
+
+<p>The following video recording parameters for {@link android.media.MediaRecorder} are given
+default settings, however, you may want to adjust these settings for your application:</p>
+
+<ul>
+  <li>{@link android.media.MediaRecorder#setVideoEncodingBitRate(int)
+setVideoEncodingBitRate()}</li>
+  <li>{@link android.media.MediaRecorder#setVideoSize(int, int) setVideoSize()}</li>
+  <li>{@link android.media.MediaRecorder#setVideoFrameRate(int) setVideoFrameRate()}</li>
+  <li>{@link android.media.MediaRecorder#setAudioEncodingBitRate(int)
+setAudioEncodingBitRate()}</li>  <li>{@link android.media.MediaRecorder#setAudioChannels(int)
+setAudioChannels()}</li>
+  <li>{@link android.media.MediaRecorder#setAudioSamplingRate(int) setAudioSamplingRate()}</li>
+</ul>
+
+<h4 id="start-stop-mediarecorder">Starting and Stopping MediaRecorder</h4>
+<p>When starting and stopping video recording using the {@link android.media.MediaRecorder} class,
+you must follow a specific order, as listed below.</p>
+
+<ol>
+  <li>Unlock the camera with {@link android.hardware.Camera#unlock() Camera.unlock()}</li>
+  <li>Configure {@link android.media.MediaRecorder} as shown in the code example above</li>
+  <li>Start recording using {@link android.media.MediaRecorder#start()
+MediaRecorder.start()}</li>
+  <li>Record the video</li>
+  <li>Stop recording using {@link
+android.media.MediaRecorder#stop() MediaRecorder.stop()}</li>
+  <li>Release the media recorder with {@link android.media.MediaRecorder#release()
+MediaRecorder.release()}</li>
+  <li>Lock the camera using {@link android.hardware.Camera#lock() Camera.lock()}</li>
+</ol>
+
+<p>The following example code demonstrates how to wire up a button to properly start and stop
+video recording using the camera and the {@link android.media.MediaRecorder} class.</p>
+
+<p class="note"><strong>Note:</strong> When completing a video recording, do not release the camera
+or else your preview will be stopped.</p>
+
+<pre>
+private boolean isRecording = false;
+
+// Add a listener to the Capture button
+Button captureButton = (Button) findViewById(id.button_capture);
+captureButton.setOnClickListener(
+    new View.OnClickListener() {
+        &#64;Override
+        public void onClick(View v) {
+            if (isRecording) {
+                // stop recording and release camera
+                mMediaRecorder.stop();  // stop the recording
+                releaseMediaRecorder(); // release the MediaRecorder object
+                mCamera.lock();         // take camera access back from MediaRecorder
+
+                // inform the user that recording has stopped
+                setCaptureButtonText("Capture");
+                isRecording = false;
+            } else {
+                // initialize video camera
+                if (prepareVideoRecorder()) {
+                    // Camera is available and unlocked, MediaRecorder is prepared,
+                    // now you can start recording
+                    mMediaRecorder.start();
+
+                    // inform the user that recording has started
+                    setCaptureButtonText("Stop");
+                    isRecording = true;
+                } else {
+                    // prepare didn't work, release the camera
+                    releaseMediaRecorder();
+                    // inform user
+                }
+            }
+        }
+    }
+);
+</pre>
+
+<p class="note"><strong>Note:</strong> In the above example, the {@code prepareVideoRecorder()}
+method refers to the example code shown in <a
+href="#configuring-mediarecorder">Configuring MediaRecorder</a>. This method takes care of locking
+the camera, configuring and preparing the {@link android.media.MediaRecorder} instance.</p>
+
+
+<h3 id="release-camera">Releasing the camera</h3>
+<p>Cameras are a resource that is shared by applications on a device. Your application can make
+use of the camera after getting an instance of {@link android.hardware.Camera}, and you must be
+particularly careful to release the camera object when your application stops using it, and as
+soon as your application is paused ({@link android.app.Activity#onPause() Activity.onPause()}). If
+your application does not properly release the camera, all subsequent attempts to access the camera,
+including those by your own application, will fail and may cause your or other applications to be
+shut down.</p>
+
+<p>To release an instance of the {@link android.hardware.Camera} object, use the {@link
+android.hardware.Camera#release() Camera.release()} method, as shown in the example code below.</p>
+
+<pre>
+public class CameraActivity extends Activity {
+    private Camera mCamera;
+    private SurfaceView mPreview;
+    private MediaRecorder mMediaRecorder;
+
+    ...
+    
+    &#64;Override
+    protected void onPause() {
+        super.onPause();
+        releaseMediaRecorder();       // if you are using MediaRecorder, release it first
+        releaseCamera();              // release the camera immediately on pause event
+    }
+
+    private void releaseMediaRecorder(){
+        if (mMediaRecorder != null) {
+            mMediaRecorder.reset();   // clear recorder configuration
+            mMediaRecorder.release(); // release the recorder object
+            mMediaRecorder = null;
+            mCamera.lock();           // lock camera for later use
+        }
+    }
+
+    private void releaseCamera(){
+        if (mCamera != null){
+            mCamera.release();        // release the camera for other applications
+            mCamera = null;
+        }
+    }
+}
+</pre>
+
+<p class="caution"><strong>Caution:</strong> If your application does not properly release the
+camera, all subsequent attempts to access the camera, including those by your own application, will
+fail and may cause your or other applications to be shut down.</p>
+
+
+<h2 id="saving-media">Saving Media Files</h2>
+<p>Media files created by users such as pictures and videos should be saved to a device's external
+storage directory (SD Card) to conserve system space and to allow users to access these files
+without their device. There are many possible directory locations to save media files on a device,
+however there are only two standard locations you should consider as a developer:</p>
+
+<ul>
+  <li><strong>{@link android.os.Environment#getExternalStoragePublicDirectory(java.lang.String)
+Environment.getExternalStoragePublicDirectory}({@link android.os.Environment#DIRECTORY_PICTURES
+Environment.DIRECTORY_PICTURES})</strong> - This method returns the standard, shared and recommended
+location for saving pictures and videos. This directory is shared (public), so other applications
+can easily discover, read, change and delete files saved in this location. If your application is
+uninstalled by the user, media files saved to this location will not be removed. To avoid
+interfering with users existing pictures and videos, you should create a sub-directory for your
+application's media files within this directory, as shown in the code sample below. This method is
+available in Android 2.2 (API Level 8), for equivalent calls in earlier API versions, see <a
+href="{@docRoot}guide/topics/data/data-storage.html#SavingSharedFiles">Saving Shared Files</a>.</li>
+  <li><strong>{@link android.content.Context#getExternalFilesDir(java.lang.String)
+Context.getExternalFilesDir}({@link android.os.Environment#DIRECTORY_PICTURES
+Environment.DIRECTORY_PICTURES})</strong> - This method returns a standard location for saving
+pictures and videos which are associated with your application. If your application is uninstalled,
+any files saved in this location are removed. Security is not enforced for files in this
+location and other applications may read, change and delete them.</li>
+</ul>
+
+<p>The following example code demonstrates how to create a {@link java.io.File} or {@link
+android.net.Uri} location for a media file that can be used when invoking a device's camera with
+an {@link android.content.Intent} or as part of a <a href="#custom-camera">Building a Camera
+App</a>.</p>
+
+<pre>
+public static final int MEDIA_TYPE_IMAGE = 1;
+public static final int MEDIA_TYPE_VIDEO = 2;
+
+/** Create a file Uri for saving an image or video */
+private static Uri getOutputMediaFileUri(int type){
+      return Uri.fromFile(getOutputMediaFile(type));
+}
+
+/** Create a File for saving an image or video */
+private static Uri getOutputMediaFile(int type){
+    // To be safe, you should check that the SDCard is mounted
+    // using Environment.getExternalStorageState() before doing this.
+
+    File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
+              Environment.DIRECTORY_PICTURES), "MyCameraApp");
+    // This location works best if you want the created images to be shared
+    // between applications and persist after your app has been uninstalled.
+
+    // Create the storage directory if it does not exist
+    if (! mediaStorageDir.exists()){
+        if (! mediaStorageDir.mkdirs()){
+            Log.d("MyCameraApp", "failed to create directory");
+            return null;
+        }
+    }
+
+    // Create a media file name
+    String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
+    File mediaFile;
+    if (type == MEDIA_TYPE_IMAGE){
+        mediaFile = new File(mediaStorageDir.getPath() + File.separator +
+        "IMG_"+ timeStamp + ".jpg");
+    } else if(type == MEDIA_TYPE_VIDEO) {
+        mediaFile = new File(mediaStorageDir.getPath() + File.separator +
+        "VID_"+ timeStamp + ".mp4");
+    } else {
+        return null;
+    }
+
+    return mediaFile;
+}
+</pre>
+
+<p class="note"><strong>Note:</strong> {@link
+android.os.Environment#getExternalStoragePublicDirectory(java.lang.String)
+Environment.getExternalStoragePublicDirectory()} is available in Android 2.2 (API Level 8) or
+higher. If you are targeting devices with earlier versions of Android, use {@link
+android.os.Environment#getExternalStorageDirectory() Environment.getExternalStorageDirectory()}
+instead. For more information, see <a
+href="{@docRoot}guide/topics/data/data-storage.html#SavingSharedFiles">Saving Shared Files</a>.</p>
+
+<p>For more information about saving files on an Android device, see <a
+href="{@docRoot}guide/topics/data/data-storage.html">Data Storage</a>.</p>
\ No newline at end of file
diff --git a/docs/html/guide/topics/media/index.jd b/docs/html/guide/topics/media/index.jd
index 06e6208..7c1754f 100644
--- a/docs/html/guide/topics/media/index.jd
+++ b/docs/html/guide/topics/media/index.jd
@@ -1,971 +1,62 @@
-page.title=Media
+page.title=Multimedia and Camera
 @jd:body
 
     <div id="qv-wrapper">
     <div id="qv">
 
-<h2>Quickview</h2>
-<ul>
-<li>MediaPlayer APIs allow you to play and record media</li>
-<li>You can handle data from raw resources, files, and streams</li>
-<li>The platform supports a variety of media formats. See <a
-href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
-</ul>
-
-<h2>In this document</h2>
+<h2>Topics</h2>
 <ol>
-<li><a href="#mediaplayer">Using MediaPlayer</a>
-   <ol>
-      <li><a href='#preparingasync'>Asynchronous Preparation</a></li>
-      <li><a href='#managestate'>Managing State</a></li>
-      <li><a href='#releaseplayer'>Releasing the MediaPlayer</a></li>
-   </ol>
-</li>
-<li><a href="#mpandservices">Using a Service with MediaPlayer</a>
-   <ol>
-      <li><a href="#asyncprepare">Running asynchronously</a></li>
-      <li><a href="#asyncerror">Handling asynchronous errors</a></li>
-      <li><a href="#wakelocks">Using wake locks</a></li>
-      <li><a href="#foregroundserv">Running as a foreground service</a></li>
-      <li><a href="#audiofocus">Handling audio focus</a></li>
-      <li><a href="#cleanup">Performing cleanup</a></li>
-   </ol>
-</li>
-<li><a href="#noisyintent">Handling the AUDIO_BECOMING_NOISY Intent</a>
-<li><a href="#viacontentresolver">Retrieving Media from a Content Resolver</a>
-<li><a href="#jetcontent">Playing JET content</a>
-<li><a href="#audiocapture">Performing Audio Capture</a>
+<li><a href="{@docRoot}guide/topics/media/mediaplayer.html">MediaPlayer</a></li>
+<li><a href="{@docRoot}guide/topics/media/jetplayer.html">JetPlayer</a></li>
+<li><a href="{@docRoot}guide/topics/media/camera.html">Camera</a></li>
+<li><a href="{@docRoot}guide/topics/media/audio-capture.html">Audio Capture</a></li>
 </ol>
 
 <h2>Key classes</h2>
 <ol>
 <li>{@link android.media.MediaPlayer}</li>
+<li>{@link android.media.JetPlayer}</li>
+<li>{@link android.hardware.Camera}</li>
 <li>{@link android.media.MediaRecorder}</li>
 <li>{@link android.media.AudioManager}</li>
-<li>{@link android.media.JetPlayer}</li>
 <li>{@link android.media.SoundPool}</li>
 </ol>
 
 <h2>See also</h2>
 <ol>
-<li><a href="{@docRoot}guide/topics/data/data-storage.html">Data Storage</a></li>
-<li><a href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User Manual</a></li>
+<li></li>
+<li><a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
+<li><a href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User
+Manual</a></li>
 </ol>
 
 </div>
 </div>
 
-<p>The Android multimedia framework includes support for encoding and decoding a
-variety of common media types, so that you can easily integrate audio,
-video and images into your applications. You can play audio or video from media files stored in your 
-application's resources (raw resources), from standalone files in the filesystem, or from a data
-stream arriving over a network connection, all using {@link android.media.MediaPlayer} APIs.</p>
+<p>The Android multimedia framework includes support for capturing and playing audio, video and
+images in a variety of common media types, so that you can easily integrate them into your
+applications. You can play audio or video from media files stored in your application's resources,
+from standalone files in the file system, or from a data stream arriving over a
+network connection, all using the {@link android.media.MediaPlayer} or {@link
+android.media.JetPlayer} APIs. You can also record audio, video and take pictures using the {@link
+android.media.MediaRecorder} and {@link android.hardware.Camera} APIs if supported by the device
+hardware.</p>
 
-<p>You can also record audio and video using the {@link android.media.MediaRecorder} APIs if
-supported by the device hardware. Note that the emulator doesn't have hardware to capture audio or
-video, but actual mobile devices are likely to provide these capabilities.</p>
+<p>The following topics show you how to use the Android framework to implement multimedia capture
+and playback.</p>
 
-<p>This document shows you how to write a media-playing application that interacts with the user and
-the system in order to obtain good performance and a pleasant user experience.</p>
+<dl>
+  <dt><strong><a href="{@docRoot}guide/topics/media/mediaplayer.html">MediaPlayer</a></strong></dt>
+  <dd>How to play audio and video in your application.</dd>
 
-<p class="note"><strong>Note:</strong> You can play back the audio data only to the standard output
-device. Currently, that is the mobile device speaker or a Bluetooth headset. You cannot play sound
-files in the conversation audio during a call.</p>
+  <dt><strong><a href="{@docRoot}guide/topics/media/jetplayer.html">JetPlayer</a></strong></dt>
+  <dd>How to play interactive audio and video in your application using content created with
+JetCreator.</dd>
 
+  <dt><strong><a href="{@docRoot}guide/topics/media/camera.html">Camera</a></strong></dt>
+  <dd>How to use a device camera to take pictures or video in your application.</dd>
 
-<h2 id="mediaplayer">Using MediaPlayer</h2>
-
-<p>One of the most important components of the media framework is the
-{@link android.media.MediaPlayer MediaPlayer}
-class. An object of this class can fetch, decode, and play both audio and video
-with minimal setup. It supports several different media sources such as:
-<ul>
-   <li>Local resources</li>
-   <li>Internal URIs, such as one you might obtain from a Content Resolver</li>
-   <li>External URLs (streaming)</li>
-</ul>
-</p>
-
-<p>For a list of media formats that Android supports,
-see the <a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media
-Formats</a> document. </p>
-
-<p>Here is an example
-of how to play audio that's available as a local raw resource (saved in your application's
-{@code res/raw/} directory):</p>
-
-<pre>MediaPlayer mediaPlayer = MediaPlayer.create(context, R.raw.sound_file_1);
-mediaPlayer.start(); // no need to call prepare(); create() does that for you
-</pre>
-
-<p>In this case, a "raw" resource is a file that the system does not
-try to parse in any particular way. However, the content of this resource should not
-be raw audio. It should be a properly encoded and formatted media file in one 
-of the supported formats.</p>
-
-<p>And here is how you might play from a URI available locally in the system
-(that you obtained through a Content Resolver, for instance):</p>
-
-<pre>Uri myUri = ....; // initialize Uri here
-MediaPlayer mediaPlayer = new MediaPlayer();
-mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
-mediaPlayer.setDataSource(getApplicationContext(), myUri);
-mediaPlayer.prepare();
-mediaPlayer.start();</pre>
-
-<p>Playing from a remote URL via HTTP streaming looks like this:</p>
-
-<pre>String url = "http://........"; // your URL here
-MediaPlayer mediaPlayer = new MediaPlayer();
-mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
-mediaPlayer.setDataSource(url);
-mediaPlayer.prepare(); // might take long! (for buffering, etc)
-mediaPlayer.start();</pre>
-
-<p class="note"><strong>Note:</strong>
-If you're passing a URL to stream an online media file, the file must be capable of
-progressive download.</p>
-
-<p class="caution"><strong>Caution:</strong> You must either catch or pass
-{@link java.lang.IllegalArgumentException} and {@link java.io.IOException} when using
-{@link android.media.MediaPlayer#setDataSource setDataSource()}, because
-the file you are referencing might not exist.</p>
-
-<h3 id='#preparingasync'>Asynchronous Preparation</h3>
-
-<p>Using {@link android.media.MediaPlayer MediaPlayer} can be straightforward in
-principle. However, it's important to keep in mind that a few more things are
-necessary to integrate it correctly with a typical Android application. For
-example, the call to {@link android.media.MediaPlayer#prepare prepare()} can
-take a long time to execute, because
-it might involve fetching and decoding media data. So, as is the case with any
-method that may take long to execute, you should <strong>never call it from your
-application's UI thread</strong>. Doing that will cause the UI to hang until the method returns,
-which is a very bad user experience and can cause an ANR (Application Not Responding) error. Even if
-you expect your resource to load quickly, remember that anything that takes more than a tenth
-of a second to respond in the UI will cause a noticeable pause and will give
-the user the impression that your application is slow.</p>
-
-<p>To avoid hanging your UI thread, spawn another thread to
-prepare the {@link android.media.MediaPlayer} and notify the main thread when done. However, while
-you could write the threading logic
-yourself, this pattern is so common when using {@link android.media.MediaPlayer} that the framework
-supplies a convenient way to accomplish this task by using the
-{@link android.media.MediaPlayer#prepareAsync prepareAsync()} method. This method
-starts preparing the media in the background and returns immediately. When the media
-is done preparing, the {@link android.media.MediaPlayer.OnPreparedListener#onPrepared onPrepared()}
-method of the {@link android.media.MediaPlayer.OnPreparedListener
-MediaPlayer.OnPreparedListener}, configured through
-{@link android.media.MediaPlayer#setOnPreparedListener setOnPreparedListener()} is called.</p>
-
-<h3 id='#managestate'>Managing State</h3>
-
-<p>Another aspect of a {@link android.media.MediaPlayer} that you should keep in mind is
-that it's state-based. That is, the {@link android.media.MediaPlayer} has an internal state
-that you must always be aware of when writing your code, because certain operations
-are only valid when then player is in specific states. If you perform an operation while in the
-wrong state, the system may throw an exception or cause other undesireable behaviors.</p>
-
-<p>The documentation in the
-{@link android.media.MediaPlayer MediaPlayer} class shows a complete state diagram,
-that clarifies which methods move the {@link android.media.MediaPlayer} from one state to another.
-For example, when you create a new {@link android.media.MediaPlayer}, it is in the <em>Idle</em>
-state. At that point, you should initialize it by calling
-{@link android.media.MediaPlayer#setDataSource setDataSource()}, bringing it
-to the <em>Initialized</em> state. After that, you have to prepare it using either the
-{@link android.media.MediaPlayer#prepare prepare()} or
-{@link android.media.MediaPlayer#prepareAsync prepareAsync()} method. When
-the {@link android.media.MediaPlayer} is done preparing, it will then enter the <em>Prepared</em>
-state, which means you can call {@link android.media.MediaPlayer#start start()}
-to make it play the media. At that point, as the diagram illustrates,
-you can move between the <em>Started</em>, <em>Paused</em> and <em>PlaybackCompleted</em> states by
-calling such methods as
-{@link android.media.MediaPlayer#start start()},
-{@link android.media.MediaPlayer#pause pause()}, and
-{@link android.media.MediaPlayer#seekTo seekTo()},
-amongst others. When you
-call {@link android.media.MediaPlayer#stop stop()}, however, notice that you
-cannot call {@link android.media.MediaPlayer#start start()} again until you
-prepare the {@link android.media.MediaPlayer} again.</p>
-
-<p>Always keep <a href='{@docRoot}images/mediaplayer_state_diagram.gif'>the state diagram</a> 
-in mind when writing code that interacts with a
-{@link android.media.MediaPlayer} object, because calling its methods from the wrong state is a
-common cause of bugs.</p>
-
-<h3 id='#releaseplayer'>Releasing the MediaPlayer</h3>
-
-<p>A {@link android.media.MediaPlayer MediaPlayer} can consume valuable
-system resources.
-Therefore, you should always take extra precautions to make sure you are not
-hanging on to a {@link android.media.MediaPlayer} instance longer than necessary. When you
-are done with it, you should always call
-{@link android.media.MediaPlayer#release release()} to make sure any
-system resources allocated to it are properly released. For example, if you are
-using a {@link android.media.MediaPlayer} and your activity receives a call to {@link
-android.app.Activity#onStop onStop()}, you must release the {@link android.media.MediaPlayer},
-because it
-makes little sense to hold on to it while your activity is not interacting with
-the user (unless you are playing media in the background, which is discussed in the next section).
-When your activity is resumed or restarted, of course, you need to
-create a new {@link android.media.MediaPlayer} and prepare it again before resuming playback.</p>
-
-<p>Here's how you should release and then nullify your {@link android.media.MediaPlayer}:</p>
-<pre>
-mediaPlayer.release();
-mediaPlayer = null;
-</pre>
-
-<p>As an example, consider the problems that could happen if you
-forgot to release the {@link android.media.MediaPlayer} when your activity is stopped, but create a
-new one when the activity starts again. As you may know, when the user changes the
-screen orientation (or changes the device configuration in another way), 
-the system handles that by restarting the activity (by default), so you might quickly
-consume all of the system resources as the user
-rotates the device back and forth between portrait and landscape, because at each
-orientation change, you create a new {@link android.media.MediaPlayer} that you never
-release. (For more information about runtime restarts, see <a
-href="{@docRoot}guide/topics/resources/runtime-changes.html">Handling Runtime Changes</a>.)</p>
-
-<p>You may be wondering what happens if you want to continue playing
-"background media" even when the user leaves your activity, much in the same
-way that the built-in Music application behaves. In this case, what you need is
-a {@link android.media.MediaPlayer MediaPlayer} controlled by a {@link android.app.Service}, as
-discussed in <a href="mpandservices">Using a Service with MediaPlayer</a>.</p>
-
-<h2 id="mpandservices">Using a Service with MediaPlayer</h2>
-
-<p>If you want your media to play in the background even when your application
-is not onscreen&mdash;that is, you want it to continue playing while the user is
-interacting with other applications&mdash;then you must start a
-{@link android.app.Service Service} and control the
-{@link android.media.MediaPlayer MediaPlayer} instance from there.
-You should be careful about this setup, because the user and the system have expectations
-about how an application running a background service should interact with the rest of the
-system. If your application does not fulfil those expectations, the user may
-have a bad experience. This section describes the main issues that you should be
-aware of and offers suggestions about how to approach them.</p>
-
-
-<h3 id="asyncprepare">Running asynchronously</h3>
-
-<p>First of all, like an {@link android.app.Activity Activity}, all work in a
-{@link android.app.Service Service} is done in a single thread by
-default&mdash;in fact, if you're running an activity and a service from the same application, they
-use the same thread (the "main thread") by default. Therefore, services need to
-process incoming intents quickly
-and never perform lengthy computations when responding to them. If any heavy
-work or blocking calls are expected, you must do those tasks asynchronously: either from
-another thread you implement yourself, or using the framework's many facilities
-for asynchronous processing.</p>
-
-<p>For instance, when using a {@link android.media.MediaPlayer} from your main thread,
-you should call {@link android.media.MediaPlayer#prepareAsync prepareAsync()} rather than
-{@link android.media.MediaPlayer#prepare prepare()}, and implement
-a {@link android.media.MediaPlayer.OnPreparedListener MediaPlayer.OnPreparedListener}
-in order to be notified when the preparation is complete and you can start playing.
-For example:</p>
-
-<pre>
-public class MyService extends Service implements MediaPlayer.OnPreparedListener {
-    private static final ACTION_PLAY = "com.example.action.PLAY";
-    MediaPlayer mMediaPlayer = null;
-
-    public int onStartCommand(Intent intent, int flags, int startId) {
-        ...
-        if (intent.getAction().equals(ACTION_PLAY)) {
-            mMediaPlayer = ... // initialize it here
-            mMediaPlayer.setOnPreparedListener(this);
-            mMediaPlayer.prepareAsync(); // prepare async to not block main thread
-        }
-    }
-
-    /** Called when MediaPlayer is ready */
-    public void onPrepared(MediaPlayer player) {
-        player.start();
-    }
-}
-</pre>
-
-
-<h3 id="asyncerror">Handling asynchronous errors</h3>
-
-<p>On synchronous operations, errors would normally
-be signaled with an exception or an error code, but whenever you use asynchronous
-resources, you should make sure your application is notified
-of errors appropriately. In the case of a {@link android.media.MediaPlayer MediaPlayer},
-you can accomplish this by implementing a
-{@link android.media.MediaPlayer.OnErrorListener MediaPlayer.OnErrorListener} and
-setting it in your {@link android.media.MediaPlayer} instance:</p>
-
-<pre>
-public class MyService extends Service implements MediaPlayer.OnErrorListener {
-    MediaPlayer mMediaPlayer;
-
-    public void initMediaPlayer() {
-        // ...initialize the MediaPlayer here...
-
-        mMediaPlayer.setOnErrorListener(this);
-    }
-
-    &#64;Override
-    public boolean onError(MediaPlayer mp, int what, int extra) {
-        // ... react appropriately ...
-        // The MediaPlayer has moved to the Error state, must be reset!
-    }
-}
-</pre>
-
-<p>It's important to remember that when an error occurs, the {@link android.media.MediaPlayer}
-moves to the <em>Error</em> state (see the documentation for the
-{@link android.media.MediaPlayer MediaPlayer} class for the full state diagram)
-and you must reset it before you can use it again.
-
-
-<h3 id="wakelocks">Using wake locks</h3>
-
-<p>When designing applications that play media
-in the background, the device may go to sleep
-while your service is running. Because the Android system tries to conserve
-battery while the device is sleeping, the system tries to shut off any 
-of the phone's features that are
-not necessary, including the CPU and the WiFi hardware.
-However, if your service is playing or streaming music, you want to prevent
-the system from interfering with your playback.</p>
-
-<p>In order to ensure that your service continues to run under
-those conditions, you have to use "wake locks." A wake lock is a way to signal to
-the system that your application is using some feature that should
-stay available even if the phone is idle.</p>
-
-<p class="caution"><strong>Notice:</strong> You should always use wake locks sparingly and hold them
-only for as long as truly necessary, because they significantly reduce the battery life of the
-device.</p>
-
-<p>To ensure that the CPU continues running while your {@link android.media.MediaPlayer} is
-playing, call the {@link android.media.MediaPlayer#setWakeMode
-setWakeMode()} method when initializing your {@link android.media.MediaPlayer}. Once you do,
-the {@link android.media.MediaPlayer} holds the specified lock while playing and releases the lock
-when paused or stopped:</p>
-
-<pre>
-mMediaPlayer = new MediaPlayer();
-// ... other initialization here ...
-mMediaPlayer.setWakeMode(getApplicationContext(), PowerManager.PARTIAL_WAKE_LOCK);
-</pre>
-
-<p>However, the wake lock acquired in this example guarantees only that the CPU remains awake. If
-you are streaming media over the
-network and you are using Wi-Fi, you probably want to hold a
-{@link android.net.wifi.WifiManager.WifiLock WifiLock} as
-well, which you must acquire and release manually. So, when you start preparing the
-{@link android.media.MediaPlayer} with the remote URL, you should create and acquire the Wi-Fi lock.
-For example:</p>
-
-<pre>
-WifiLock wifiLock = ((WifiManager) getSystemService(Context.WIFI_SERVICE))
-    .createWifiLock(WifiManager.WIFI_MODE_FULL, "mylock");
-
-wifiLock.acquire();
-</pre>
-
-<p>When you pause or stop your media, or when you no longer need the
-network, you should release the lock:</p>
-
-<pre>
-wifiLock.release();
-</pre>
-
-
-<h3 id="foregroundserv">Running as a foreground service</h3>
-
-<p>Services are often used for performing background tasks, such as fetching emails,
-synchronizing data, downloading content, amongst other possibilities. In these
-cases, the user is not actively aware of the service's execution, and probably
-wouldn't even notice if some of these services were interrupted and later restarted.</p>
-
-<p>But consider the case of a service that is playing music. Clearly this is a service that the user
-is actively aware of and the experience would be severely affected by any interruptions.
-Additionally, it's a service that the user will likely wish to interact with during its execution.
-In this case, the service should run as a "foreground service." A
-foreground service holds a higher level of importance within the system&mdash;the system will
-almost never kill the service, because it is of immediate importance to the user. When running
-in the foreground, the service also must provide a status bar notification to ensure that users are
-aware of the running service and allow them to open an activity that can interact with the
-service.</p>
-
-<p>In order to turn your service into a foreground service, you must create a
-{@link android.app.Notification Notification} for the status bar and call
-{@link android.app.Service#startForeground startForeground()} from the {@link
-android.app.Service}. For example:</p>
-
-<pre>String songName;
-// assign the song name to songName
-PendingIntent pi = PendingIntent.getActivity(getApplicationContext(), 0,
-                new Intent(getApplicationContext(), MainActivity.class),
-                PendingIntent.FLAG_UPDATE_CURRENT);
-Notification notification = new Notification();
-notification.tickerText = text;
-notification.icon = R.drawable.play0;
-notification.flags |= Notification.FLAG_ONGOING_EVENT;
-notification.setLatestEventInfo(getApplicationContext(), "MusicPlayerSample",
-                "Playing: " + songName, pi);
-startForeground(NOTIFICATION_ID, notification);
-</pre>
-
-<p>While your service is running in the foreground, the notification you
-configured is visible in the notification area of the device. If the user
-selects the notification, the system invokes the {@link android.app.PendingIntent} you supplied. In
-the example above, it opens an activity ({@code MainActivity}).</p>
-
-<p>Figure 1 shows how your notification appears to the user:</p>
-
-<img src='images/notification1.png' />
-&nbsp;&nbsp;
-<img src='images/notification2.png' />
-<p class="img-caption"><strong>Figure 1.</strong> Screenshots of a foreground service's notification, showing the notification icon in the status bar (left) and the expanded view (right).</p>
-
-<p>You should only hold on to the "foreground service" status while your
-service is actually performing something the user is actively aware of. Once
-that is no longer true, you should release it by calling
-{@link android.app.Service#stopForeground stopForeground()}:</p>
-
-<pre>
-stopForeground(true);
-</pre>
-
-<p>For more information, see the documentation about <a
-href="{@docRoot}guide/topics/fundamentals/services.html#Foreground">Services</a> and
-<a href="{@docRoot}guide/topics/ui/notifiers/notifications.html">Status Bar Notifications</a>.</p>
-
-
-<h3 id="audiofocus">Handling audio focus</h3>
-
-<p>Even though only one activity can run at any given time, Android is a
-multi-tasking environment. This poses a particular challenge to applications
-that use audio, because there is only one audio output and there may be several
-media services competing for its use. Before Android 2.2, there was no built-in
-mechanism to address this issue, which could in some cases lead to a bad user
-experience. For example, when a user is listening to
-music and another application needs to notify the user of something very important,
-the user might not hear the notification tone due to the loud music. Starting with
-Android 2.2, the platform offers a way for applications to negotiate their
-use of the device's audio output. This mechanism is called Audio Focus.</p>
-
-<p>When your application needs to output audio such as music or a notification, 
-you should always request audio focus. Once it has focus, it can use the sound output freely, but it should
-always listen for focus changes. If it is notified that it has lost the audio
-focus, it should immediately either kill the audio or lower it to a quiet level
-(known as "ducking"&mdash;there is a flag that indicates which one is appropriate) and only resume
-loud playback after it receives focus again.</p>
-
-<p>Audio Focus is cooperative in nature. That is, applications are expected
-(and highly encouraged) to comply with the audio focus guidelines, but the
-rules are not enforced by the system. If an application wants to play loud
-music even after losing audio focus, nothing in the system will prevent that.
-However, the user is more likely to have a bad experience and will be more
-likely to uninstall the misbehaving application.</p>
-
-<p>To request audio focus, you must call
-{@link android.media.AudioManager#requestAudioFocus requestAudioFocus()} from the {@link
-android.media.AudioManager}, as the example below demonstrates:</p>
-
-<pre>
-AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
-int result = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC,
-    AudioManager.AUDIOFOCUS_GAIN);
-
-if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
-    // could not get audio focus.
-}
-</pre>
-
-<p>The first parameter to {@link android.media.AudioManager#requestAudioFocus requestAudioFocus()}
-is an {@link android.media.AudioManager.OnAudioFocusChangeListener
-AudioManager.OnAudioFocusChangeListener},
-whose {@link android.media.AudioManager.OnAudioFocusChangeListener#onAudioFocusChange
-onAudioFocusChange()} method is called whenever there is a change in audio focus. Therefore, you
-should also implement this interface on your service and activities. For example:</p>
-
-<pre>
-class MyService extends Service
-                implements AudioManager.OnAudioFocusChangeListener {
-    // ....
-    public void onAudioFocusChange(int focusChange) {
-        // Do something based on focus change...
-    }
-}
-</pre>
-
-<p>The <code>focusChange</code> parameter tells you how the audio focus has changed, and
-can be one of the following values (they are all constants defined in
-{@link android.media.AudioManager AudioManager}):</p>
-
-<ul>
-<li>{@link android.media.AudioManager#AUDIOFOCUS_GAIN}: You have gained the audio focus.</li>
-
-<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS}: You have lost the audio focus for a
-presumably long time.
-You must stop all audio playback. Because you should expect not to have focus back
-for a long time, this would be a good place to clean up your resources as much
-as possible. For example, you should release the {@link android.media.MediaPlayer}.</li>
-
-<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT}: You have
-temporarily lost audio focus, but should receive it back shortly. You must stop
-all audio playback, but you can keep your resources because you will probably get
-focus back shortly.</li>
-
-<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}: You have temporarily
-lost audio focus,
-but you are allowed to continue to play audio quietly (at a low volume) instead
-of killing audio completely.</li>
-</ul>
-
-<p>Here is an example implementation:</p>
-
-<pre>
-public void onAudioFocusChange(int focusChange) {
-    switch (focusChange) {
-        case AudioManager.AUDIOFOCUS_GAIN:
-            // resume playback
-            if (mMediaPlayer == null) initMediaPlayer();
-            else if (!mMediaPlayer.isPlaying()) mMediaPlayer.start();
-            mMediaPlayer.setVolume(1.0f, 1.0f);
-            break;
-
-        case AudioManager.AUDIOFOCUS_LOSS:
-            // Lost focus for an unbounded amount of time: stop playback and release media player
-            if (mMediaPlayer.isPlaying()) mMediaPlayer.stop();
-            mMediaPlayer.release();
-            mMediaPlayer = null;
-            break;
-
-        case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
-            // Lost focus for a short time, but we have to stop
-            // playback. We don't release the media player because playback
-            // is likely to resume
-            if (mMediaPlayer.isPlaying()) mMediaPlayer.pause();
-            break;
-
-        case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
-            // Lost focus for a short time, but it's ok to keep playing
-            // at an attenuated level
-            if (mMediaPlayer.isPlaying()) mMediaPlayer.setVolume(0.1f, 0.1f);
-            break;
-    }
-}
-</pre>
-
-<p>Keep in mind that the audio focus APIs are available only with API level 8 (Android 2.2)
-and above, so if you want to support previous
-versions of Android, you should adopt a backward compatibility strategy that
-allows you to use this feature if available, and fall back seamlessly if not.</p>
-
-<p>You can achieve backward compatibility either by calling the audio focus methods by reflection
-or by implementing all the audio focus features in a separate class (say,
-<code>AudioFocusHelper</code>). Here is an example of such a class:</p>
-
-<pre>
-public class AudioFocusHelper implements AudioManager.OnAudioFocusChangeListener {
-    AudioManager mAudioManager;
-
-    // other fields here, you'll probably hold a reference to an interface
-    // that you can use to communicate the focus changes to your Service
-
-    public AudioFocusHelper(Context ctx, /* other arguments here */) {
-        mAudioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
-        // ...
-    }
-
-    public boolean requestFocus() {
-        return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
-            mAudioManager.requestAudioFocus(mContext, AudioManager.STREAM_MUSIC,
-            AudioManager.AUDIOFOCUS_GAIN);
-    }
-
-    public boolean abandonFocus() {
-        return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
-            mAudioManager.abandonAudioFocus(this);
-    }
-
-    &#64;Override
-    public void onAudioFocusChange(int focusChange) {
-        // let your service know about the focus change
-    }
-}
-</pre>
-
-
-<p>You can create an instance of <code>AudioFocusHelper</code> class only if you detect that
-the system is running API level 8 or above. For example:</p>
-
-<pre>
-if (android.os.Build.VERSION.SDK_INT &gt;= 8) {
-    mAudioFocusHelper = new AudioFocusHelper(getApplicationContext(), this);
-} else {
-    mAudioFocusHelper = null;
-}
-</pre>
-
-
-<h3 id="cleanup">Performing cleanup</h3>
-
-<p>As mentioned earlier, a {@link android.media.MediaPlayer} object can consume a significant
-amount of system resources, so you should keep it only for as long as you need and call
-{@link android.media.MediaPlayer#release release()} when you are done with it. It's important
-to call this cleanup method explicitly rather than rely on system garbage collection because
-it might take some time before the garbage collector reclaims the {@link android.media.MediaPlayer},
-as it's only sensitive to memory needs and not to shortage of other media-related resources.
-So, in the case when you're using a service, you should always override the
-{@link android.app.Service#onDestroy onDestroy()} method to make sure you are releasing
-the {@link android.media.MediaPlayer}:</p>
-
-<pre>
-public class MyService extends Service {
-   MediaPlayer mMediaPlayer;
-   // ...
-
-   &#64;Override
-   public void onDestroy() {
-       if (mMediaPlayer != null) mMediaPlayer.release();
-   }
-}
-</pre>
-
-<p>You should always look for other opportunities to release your {@link android.media.MediaPlayer}
-as well, apart from releasing it when being shut down. For example, if you expect not
-to be able to play media for an extended period of time (after losing audio focus, for example),
-you should definitely release your existing {@link android.media.MediaPlayer} and create it again
-later. On the
-other hand, if you only expect to stop playback for a very short time, you should probably
-hold on to your {@link android.media.MediaPlayer} to avoid the overhead of creating and preparing it
-again.</p>
-
-
-
-<h2 id="noisyintent">Handling the AUDIO_BECOMING_NOISY Intent</h2>
-
-<p>Many well-written applications that play audio automatically stop playback when an event
-occurs that causes the audio to become noisy (ouput through external speakers). For instance,
-this might happen when a user is listening to music through headphones and accidentally
-disconnects the headphones from the device. However, this behavior does not happen automatically.
-If you don't implement this feature, audio plays out of the device's external speakers, which
-might not be what the user wants.</p>
-
-<p>You can ensure your app stops playing music in these situations by handling
-the {@link android.media.AudioManager#ACTION_AUDIO_BECOMING_NOISY} intent, for which you can register a receiver by
-adding the following to your manifest:</p>
-
-<pre>
-&lt;receiver android:name=".MusicIntentReceiver"&gt;
-   &lt;intent-filter&gt;
-      &lt;action android:name="android.media.AUDIO_BECOMING_NOISY" /&gt;
-   &lt;/intent-filter&gt;
-&lt;/receiver&gt;
-</pre>
-
-<p>This registers the <code>MusicIntentReceiver</code> class as a broadcast receiver for that
-intent. You should then implement this class:</p>
-
-<pre>
-public class MusicIntentReceiver implements android.content.BroadcastReceiver {
-   &#64;Override
-   public void onReceive(Context ctx, Intent intent) {
-      if (intent.getAction().equals(
-                    android.media.AudioManager.ACTION_AUDIO_BECOMING_NOISY)) {
-          // signal your service to stop playback
-          // (via an Intent, for instance)
-      }
-   }
-}
-</pre>
-
-
-
-
-<h2 id="viacontentresolver">Retrieving Media from a Content Resolver</h2>
-
-<p>Another feature that may be useful in a media player application is the ability to
-retrieve music that the user has on the device. You can do that by querying the {@link
-android.content.ContentResolver} for external media:</p>
-
-<pre>
-ContentResolver contentResolver = getContentResolver();
-Uri uri = android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
-Cursor cursor = contentResolver.query(uri, null, null, null, null);
-if (cursor == null) {
-    // query failed, handle error.
-} else if (!cursor.moveToFirst()) {
-    // no media on the device
-} else {
-    int titleColumn = cursor.getColumnIndex(android.provider.MediaStore.Audio.Media.TITLE);
-    int idColumn = cursor.getColumnIndex(android.provider.MediaStore.Audio.Media._ID);
-    do {
-       long thisId = cursor.getLong(idColumn);
-       String thisTitle = cursor.getString(titleColumn);
-       // ...process entry...
-    } while (cursor.moveToNext());
-}
-</pre>
-
-<p>To use this with the {@link android.media.MediaPlayer}, you can do this:</p>
-
-<pre>
-long id = /* retrieve it from somewhere */;
-Uri contentUri = ContentUris.withAppendedId(
-        android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, id);
-
-mMediaPlayer = new MediaPlayer();
-mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
-mMediaPlayer.setDataSource(getApplicationContext(), contentUri);
-
-// ...prepare and start...
-</pre>
-
-
-
-<h2 id="jetcontent">Playing JET content</h2>
-
-<p>The Android platform includes a JET engine that lets you add interactive playback of JET audio
-content in your applications. You can create JET content for interactive playback using the
-JetCreator authoring application that ships with the SDK. To play and manage JET content from your
-application, use the {@link android.media.JetPlayer JetPlayer} class.</p>
-
-<p>For a description of JET concepts and instructions on how to use the JetCreator authoring tool,
-see the <a href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User
-Manual</a>. The tool is available on Windows, OS X, and Linux platforms (Linux does not
-support auditioning of imported assets like with the Windows and OS X versions).
-</p>
-
-<p>Here's an example of how to set up JET playback from a <code>.jet</code> file stored on the SD card:</p>
-
-<pre>
-JetPlayer jetPlayer = JetPlayer.getJetPlayer();
-jetPlayer.loadJetFile("/sdcard/level1.jet");
-byte segmentId = 0;
-
-// queue segment 5, repeat once, use General MIDI, transpose by -1 octave
-jetPlayer.queueJetSegment(5, -1, 1, -1, 0, segmentId++);
-// queue segment 2
-jetPlayer.queueJetSegment(2, -1, 0, 0, 0, segmentId++);
-
-jetPlayer.play();
-</pre>
-
-<p>The SDK includes an example application &mdash; JetBoy &mdash; that shows how to use {@link
-android.media.JetPlayer JetPlayer} to create an interactive music soundtrack in your game. It also
-illustrates how to use JET events to synchronize music and game logic. The application is located at
-<code>&lt;sdk&gt;/platforms/android-1.5/samples/JetBoy</code>.</p>
-
-
-<h2 id="audiocapture">Performing Audio Capture</h2>
-
-<p>Audio capture from the device is a bit more complicated than audio and video playback, but still fairly simple:</p>
-<ol>
-  <li>Create a new instance of {@link android.media.MediaRecorder android.media.MediaRecorder}.</li>
-  <li>Set the audio source using
-        {@link android.media.MediaRecorder#setAudioSource MediaRecorder.setAudioSource()}. You will probably want to use
-  <code>MediaRecorder.AudioSource.MIC</code>.</li>
-  <li>Set output file format using
-        {@link android.media.MediaRecorder#setOutputFormat MediaRecorder.setOutputFormat()}.
-  </li>
-  <li>Set output file name using
-        {@link android.media.MediaRecorder#setOutputFile MediaRecorder.setOutputFile()}.
-  </li>
-  <li>Set the audio encoder using
-        {@link android.media.MediaRecorder#setAudioEncoder MediaRecorder.setAudioEncoder()}.
-  </li>
-  <li>Call {@link android.media.MediaRecorder#prepare MediaRecorder.prepare()}
-   on the MediaRecorder instance.</li>
-  <li>To start audio capture, call
-  {@link android.media.MediaRecorder#start MediaRecorder.start()}. </li>
-  <li>To stop audio capture, call {@link android.media.MediaRecorder#stop MediaRecorder.stop()}.
-  <li>When you are done with the MediaRecorder instance, call
-{@link android.media.MediaRecorder#release MediaRecorder.release()} on it. Calling
-{@link android.media.MediaRecorder#release MediaRecorder.release()} is always recommended to
-free the resource immediately.</li>
-</ol>
-
-<h3>Example: Record audio and play the recorded audio</h3>
-<p>The example class below illustrates how to set up, start and stop audio capture, and to play the recorded audio file.</p>
-<pre>
-/*
- * The application needs to have the permission to write to external storage
- * if the output file is written to the external storage, and also the
- * permission to record audio. These permissions must be set in the
- * application's AndroidManifest.xml file, with something like:
- *
- * &lt;uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /&gt;
- * &lt;uses-permission android:name="android.permission.RECORD_AUDIO" /&gt;
- *
- */
-package com.android.audiorecordtest;
-
-import android.app.Activity;
-import android.widget.LinearLayout;
-import android.os.Bundle;
-import android.os.Environment;
-import android.view.ViewGroup;
-import android.widget.Button;
-import android.view.View;
-import android.view.View.OnClickListener;
-import android.content.Context;
-import android.util.Log;
-import android.media.MediaRecorder;
-import android.media.MediaPlayer;
-
-import java.io.IOException;
-
-
-public class AudioRecordTest extends Activity
-{
-    private static final String LOG_TAG = "AudioRecordTest";
-    private static String mFileName = null;
-
-    private RecordButton mRecordButton = null;
-    private MediaRecorder mRecorder = null;
-
-    private PlayButton   mPlayButton = null;
-    private MediaPlayer   mPlayer = null;
-
-    private void onRecord(boolean start) {
-        if (start) {
-            startRecording();
-        } else {
-            stopRecording();
-        }
-    }
-
-    private void onPlay(boolean start) {
-        if (start) {
-            startPlaying();
-        } else {
-            stopPlaying();
-        }
-    }
-
-    private void startPlaying() {
-        mPlayer = new MediaPlayer();
-        try {
-            mPlayer.setDataSource(mFileName);
-            mPlayer.prepare();
-            mPlayer.start();
-        } catch (IOException e) {
-            Log.e(LOG_TAG, "prepare() failed");
-        }
-    }
-
-    private void stopPlaying() {
-        mPlayer.release();
-        mPlayer = null;
-    }
-
-    private void startRecording() {
-        mRecorder = new MediaRecorder();
-        mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
-        mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
-        mRecorder.setOutputFile(mFileName);
-        mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
-
-        try {
-            mRecorder.prepare();
-        } catch (IOException e) {
-            Log.e(LOG_TAG, "prepare() failed");
-        }
-
-        mRecorder.start();
-    }
-
-    private void stopRecording() {
-        mRecorder.stop();
-        mRecorder.release();
-        mRecorder = null;
-    }
-
-    class RecordButton extends Button {
-        boolean mStartRecording = true;
-
-        OnClickListener clicker = new OnClickListener() {
-            public void onClick(View v) {
-                onRecord(mStartRecording);
-                if (mStartRecording) {
-                    setText("Stop recording");
-                } else {
-                    setText("Start recording");
-                }
-                mStartRecording = !mStartRecording;
-            }
-        };
-
-        public RecordButton(Context ctx) {
-            super(ctx);
-            setText("Start recording");
-            setOnClickListener(clicker);
-        }
-    }
-
-    class PlayButton extends Button {
-        boolean mStartPlaying = true;
-
-        OnClickListener clicker = new OnClickListener() {
-            public void onClick(View v) {
-                onPlay(mStartPlaying);
-                if (mStartPlaying) {
-                    setText("Stop playing");
-                } else {
-                    setText("Start playing");
-                }
-                mStartPlaying = !mStartPlaying;
-            }
-        };
-
-        public PlayButton(Context ctx) {
-            super(ctx);
-            setText("Start playing");
-            setOnClickListener(clicker);
-        }
-    }
-
-    public AudioRecordTest() {
-        mFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
-        mFileName += "/audiorecordtest.3gp";
-    }
-
-    &#64;Override
-    public void onCreate(Bundle icicle) {
-        super.onCreate(icicle);
-
-        LinearLayout ll = new LinearLayout(this);
-        mRecordButton = new RecordButton(this);
-        ll.addView(mRecordButton,
-            new LinearLayout.LayoutParams(
-                ViewGroup.LayoutParams.WRAP_CONTENT,
-                ViewGroup.LayoutParams.WRAP_CONTENT,
-                0));
-        mPlayButton = new PlayButton(this);
-        ll.addView(mPlayButton,
-            new LinearLayout.LayoutParams(
-                ViewGroup.LayoutParams.WRAP_CONTENT,
-                ViewGroup.LayoutParams.WRAP_CONTENT,
-                0));
-        setContentView(ll);
-    }
-
-    &#64;Override
-    public void onPause() {
-        super.onPause();
-        if (mRecorder != null) {
-            mRecorder.release();
-            mRecorder = null;
-        }
-
-        if (mPlayer != null) {
-            mPlayer.release();
-            mPlayer = null;
-        }
-    }
-}
-</pre>
-
-
-
+  <dt><strong><a href="{@docRoot}guide/topics/media/audio-capture.html">Audio
+Capture</a></strong></dt>
+  <dd>How to record sound in your application.</dd>
+</dl>
\ No newline at end of file
diff --git a/docs/html/guide/topics/media/jetplayer.jd b/docs/html/guide/topics/media/jetplayer.jd
new file mode 100644
index 0000000..f3d55f9
--- /dev/null
+++ b/docs/html/guide/topics/media/jetplayer.jd
@@ -0,0 +1,70 @@
+page.title=JetPlayer
+parent.title=Multimedia and Camera 
+parent.link=index.html
+@jd:body
+
+    <div id="qv-wrapper">
+    <div id="qv">
+
+<h2>In this document</h2>
+<ol>
+<li><a href="#jetcontent">Playing JET content</a>
+</ol>
+
+<h2>Key classes</h2>
+<ol>
+<li>{@link android.media.JetPlayer}</li>
+</ol>
+
+<h2>Related Samples</h2>
+<ol>
+<li><a href="{@docRoot}resources/samples/JetBoy/index.html">JetBoy</a></li>
+</ol>
+
+<h2>See also</h2>
+<ol>
+<li><a href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User
+Manual</a></li>
+<li><a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
+<li><a href="{@docRoot}guide/topics/data/data-storage.html">Data Storage</a></li>
+<li><a href="{@docRoot}guide/topics/media/mediaplayer.html">MediaPlayer</a></li>
+</ol>
+
+</div>
+</div>
+
+<p>The Android platform includes a JET engine that lets you add interactive playback of JET audio
+content in your applications. You can create JET content for interactive playback using the
+JetCreator authoring application that ships with the SDK. To play and manage JET content from your
+application, use the {@link android.media.JetPlayer JetPlayer} class.</p>
+
+
+<h2 id="jetcontent">Playing JET content</h2>
+
+<p>This section shows you how to write, set up and play JET content. For a description of JET
+concepts and instructions on how to use the JetCreator authoring tool, see the <a
+href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User
+Manual</a>. The tool is available on Windows, OS X, and Linux platforms (Linux does not
+support auditioning of imported assets like with the Windows and OS X versions).
+</p>
+
+<p>Here's an example of how to set up JET playback from a <code>.jet</code> file stored on the SD
+card:</p>
+
+<pre>
+JetPlayer jetPlayer = JetPlayer.getJetPlayer();
+jetPlayer.loadJetFile("/sdcard/level1.jet");
+byte segmentId = 0;
+
+// queue segment 5, repeat once, use General MIDI, transpose by -1 octave
+jetPlayer.queueJetSegment(5, -1, 1, -1, 0, segmentId++);
+// queue segment 2
+jetPlayer.queueJetSegment(2, -1, 0, 0, 0, segmentId++);
+
+jetPlayer.play();
+</pre>
+
+<a>The SDK includes an example application &mdash; JetBoy &mdash; that shows how to use {@link
+android.media.JetPlayer JetPlayer} to create an interactive music soundtrack in your game. It also
+illustrates how to use JET events to synchronize music and game logic. The application is located at
+<a href="{@docRoot}resources/samples/JetBoy/index.html">JetBoy</a>.</p>
\ No newline at end of file
diff --git a/docs/html/guide/topics/media/mediaplayer.jd b/docs/html/guide/topics/media/mediaplayer.jd
new file mode 100644
index 0000000..b3ca7dd
--- /dev/null
+++ b/docs/html/guide/topics/media/mediaplayer.jd
@@ -0,0 +1,747 @@
+page.title=Media Playback
+parent.title=Multimedia and Camera 
+parent.link=index.html
+@jd:body
+
+    <div id="qv-wrapper">
+    <div id="qv">
+
+<h2>In this document</h2>
+<ol>
+<li><a href="#basics">The Basics</a>
+<li><a href="#manifest">Manifest Declarations</a></li>
+<li><a href="#mediaplayer">Using MediaPlayer</a>
+   <ol>
+      <li><a href='#preparingasync'>Asynchronous Preparation</a></li>
+      <li><a href='#managestate'>Managing State</a></li>
+      <li><a href='#releaseplayer'>Releasing the MediaPlayer</a></li>
+   </ol>
+</li>
+<li><a href="#mpandservices">Using a Service with MediaPlayer</a>
+   <ol>
+      <li><a href="#asyncprepare">Running asynchronously</a></li>
+      <li><a href="#asyncerror">Handling asynchronous errors</a></li>
+      <li><a href="#wakelocks">Using wake locks</a></li>
+      <li><a href="#foregroundserv">Running as a foreground service</a></li>
+      <li><a href="#audiofocus">Handling audio focus</a></li>
+      <li><a href="#cleanup">Performing cleanup</a></li>
+   </ol>
+</li>
+<li><a href="#noisyintent">Handling the AUDIO_BECOMING_NOISY Intent</a>
+<li><a href="#viacontentresolver">Retrieving Media from a Content Resolver</a>
+</ol>
+
+<h2>Key classes</h2>
+<ol>
+<li>{@link android.media.MediaPlayer}</li>
+<li>{@link android.media.AudioManager}</li>
+<li>{@link android.media.SoundPool}</li>
+</ol>
+
+<h2>See also</h2>
+<ol>
+<li><a href="{@docRoot}guide/topics/media/jetplayer.html">JetPlayer</a></li>
+<li><a href="{@docRoot}guide/topics/media/audio-capture.html">Audio Capture</a></li>
+<li><a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
+<li><a href="{@docRoot}guide/topics/data/data-storage.html">Data Storage</a></li>
+</ol>
+
+</div>
+</div>
+
+<p>The Android multimedia framework includes support for playing variety of common media types, so
+that you can easily integrate audio, video and images into your applications. You can play audio or
+video from media files stored in your application's resources (raw resources), from standalone files
+in the filesystem, or from a data stream arriving over a network connection, all using {@link
+android.media.MediaPlayer} APIs.</p>
+
+<p>This document shows you how to write a media-playing application that interacts with the user and
+the system in order to obtain good performance and a pleasant user experience.</p>
+
+<p class="note"><strong>Note:</strong> You can play back the audio data only to the standard output
+device. Currently, that is the mobile device speaker or a Bluetooth headset. You cannot play sound
+files in the conversation audio during a call.</p>
+
+<h2 id="basics">The Basics</h2>
+<p>The following classes are used to play sound and video in the Android framework:</p>
+
+<dl>
+  <dt>{@link android.media.MediaPlayer}</dt>
+  <dd>This class is the primary API for playing sound and video.</dd>
+  <dt>{@link android.media.AudioManager}</dt>
+  <dd>This class manages audio sources and audio output on a device.</dd>
+</dl>
+
+<h2 id="manifest">Manifest Declarations</h2>
+<p>Before starting development on your application using MediaPlayer, make sure your manifest has
+the appropriate declarations to allow use of related features.</p>
+
+<ul>
+  <li><strong>Internet Permission</strong> - If you are using MediaPlayer to stream network-based
+content, your application must request network access.
+<pre>
+&lt;uses-permission android:name="android.permission.INTERNET" /&gt;
+</pre>
+  </li>
+  <li><strong>Wake Lock Permission</strong> - If your player application needs to keep the screen
+from dimming or the processor from sleeping, or uses the {@link
+android.media.MediaPlayer#setScreenOnWhilePlaying(boolean) MediaPlayer.setScreenOnWhilePlaying()} or
+{@link android.media.MediaPlayer#setWakeMode(android.content.Context, int)
+MediaPlayer.setWakeMode()} methods, you must request this permission.
+<pre>
+&lt;uses-permission android:name="android.permission.WAKE_LOCK" /&gt;
+</pre>
+  </li>
+</ul>
+
+<h2 id="mediaplayer">Using MediaPlayer</h2>
+<p>One of the most important components of the media framework is the
+{@link android.media.MediaPlayer MediaPlayer}
+class. An object of this class can fetch, decode, and play both audio and video
+with minimal setup. It supports several different media sources such as:
+<ul>
+   <li>Local resources</li>
+   <li>Internal URIs, such as one you might obtain from a Content Resolver</li>
+   <li>External URLs (streaming)</li>
+</ul>
+</p>
+
+<p>For a list of media formats that Android supports,
+see the <a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media
+Formats</a> document. </p>
+
+<p>Here is an example
+of how to play audio that's available as a local raw resource (saved in your application's
+{@code res/raw/} directory):</p>
+
+<pre>MediaPlayer mediaPlayer = MediaPlayer.create(context, R.raw.sound_file_1);
+mediaPlayer.start(); // no need to call prepare(); create() does that for you
+</pre>
+
+<p>In this case, a "raw" resource is a file that the system does not
+try to parse in any particular way. However, the content of this resource should not
+be raw audio. It should be a properly encoded and formatted media file in one 
+of the supported formats.</p>
+
+<p>And here is how you might play from a URI available locally in the system
+(that you obtained through a Content Resolver, for instance):</p>
+
+<pre>Uri myUri = ....; // initialize Uri here
+MediaPlayer mediaPlayer = new MediaPlayer();
+mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+mediaPlayer.setDataSource(getApplicationContext(), myUri);
+mediaPlayer.prepare();
+mediaPlayer.start();</pre>
+
+<p>Playing from a remote URL via HTTP streaming looks like this:</p>
+
+<pre>String url = "http://........"; // your URL here
+MediaPlayer mediaPlayer = new MediaPlayer();
+mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+mediaPlayer.setDataSource(url);
+mediaPlayer.prepare(); // might take long! (for buffering, etc)
+mediaPlayer.start();</pre>
+
+<p class="note"><strong>Note:</strong>
+If you're passing a URL to stream an online media file, the file must be capable of
+progressive download.</p>
+
+<p class="caution"><strong>Caution:</strong> You must either catch or pass
+{@link java.lang.IllegalArgumentException} and {@link java.io.IOException} when using
+{@link android.media.MediaPlayer#setDataSource setDataSource()}, because
+the file you are referencing might not exist.</p>
+
+<h3 id='preparingasync'>Asynchronous Preparation</h3>
+
+<p>Using {@link android.media.MediaPlayer MediaPlayer} can be straightforward in
+principle. However, it's important to keep in mind that a few more things are
+necessary to integrate it correctly with a typical Android application. For
+example, the call to {@link android.media.MediaPlayer#prepare prepare()} can
+take a long time to execute, because
+it might involve fetching and decoding media data. So, as is the case with any
+method that may take long to execute, you should <strong>never call it from your
+application's UI thread</strong>. Doing that will cause the UI to hang until the method returns,
+which is a very bad user experience and can cause an ANR (Application Not Responding) error. Even if
+you expect your resource to load quickly, remember that anything that takes more than a tenth
+of a second to respond in the UI will cause a noticeable pause and will give
+the user the impression that your application is slow.</p>
+
+<p>To avoid hanging your UI thread, spawn another thread to
+prepare the {@link android.media.MediaPlayer} and notify the main thread when done. However, while
+you could write the threading logic
+yourself, this pattern is so common when using {@link android.media.MediaPlayer} that the framework
+supplies a convenient way to accomplish this task by using the
+{@link android.media.MediaPlayer#prepareAsync prepareAsync()} method. This method
+starts preparing the media in the background and returns immediately. When the media
+is done preparing, the {@link android.media.MediaPlayer.OnPreparedListener#onPrepared onPrepared()}
+method of the {@link android.media.MediaPlayer.OnPreparedListener
+MediaPlayer.OnPreparedListener}, configured through
+{@link android.media.MediaPlayer#setOnPreparedListener setOnPreparedListener()} is called.</p>
+
+<h3 id='managestate'>Managing State</h3>
+
+<p>Another aspect of a {@link android.media.MediaPlayer} that you should keep in mind is
+that it's state-based. That is, the {@link android.media.MediaPlayer} has an internal state
+that you must always be aware of when writing your code, because certain operations
+are only valid when then player is in specific states. If you perform an operation while in the
+wrong state, the system may throw an exception or cause other undesireable behaviors.</p>
+
+<p>The documentation in the
+{@link android.media.MediaPlayer MediaPlayer} class shows a complete state diagram,
+that clarifies which methods move the {@link android.media.MediaPlayer} from one state to another.
+For example, when you create a new {@link android.media.MediaPlayer}, it is in the <em>Idle</em>
+state. At that point, you should initialize it by calling
+{@link android.media.MediaPlayer#setDataSource setDataSource()}, bringing it
+to the <em>Initialized</em> state. After that, you have to prepare it using either the
+{@link android.media.MediaPlayer#prepare prepare()} or
+{@link android.media.MediaPlayer#prepareAsync prepareAsync()} method. When
+the {@link android.media.MediaPlayer} is done preparing, it will then enter the <em>Prepared</em>
+state, which means you can call {@link android.media.MediaPlayer#start start()}
+to make it play the media. At that point, as the diagram illustrates,
+you can move between the <em>Started</em>, <em>Paused</em> and <em>PlaybackCompleted</em> states by
+calling such methods as
+{@link android.media.MediaPlayer#start start()},
+{@link android.media.MediaPlayer#pause pause()}, and
+{@link android.media.MediaPlayer#seekTo seekTo()},
+amongst others. When you
+call {@link android.media.MediaPlayer#stop stop()}, however, notice that you
+cannot call {@link android.media.MediaPlayer#start start()} again until you
+prepare the {@link android.media.MediaPlayer} again.</p>
+
+<p>Always keep <a href='{@docRoot}images/mediaplayer_state_diagram.gif'>the state diagram</a> 
+in mind when writing code that interacts with a
+{@link android.media.MediaPlayer} object, because calling its methods from the wrong state is a
+common cause of bugs.</p>
+
+<h3 id='releaseplayer'>Releasing the MediaPlayer</h3>
+
+<p>A {@link android.media.MediaPlayer MediaPlayer} can consume valuable
+system resources.
+Therefore, you should always take extra precautions to make sure you are not
+hanging on to a {@link android.media.MediaPlayer} instance longer than necessary. When you
+are done with it, you should always call
+{@link android.media.MediaPlayer#release release()} to make sure any
+system resources allocated to it are properly released. For example, if you are
+using a {@link android.media.MediaPlayer} and your activity receives a call to {@link
+android.app.Activity#onStop onStop()}, you must release the {@link android.media.MediaPlayer},
+because it
+makes little sense to hold on to it while your activity is not interacting with
+the user (unless you are playing media in the background, which is discussed in the next section).
+When your activity is resumed or restarted, of course, you need to
+create a new {@link android.media.MediaPlayer} and prepare it again before resuming playback.</p>
+
+<p>Here's how you should release and then nullify your {@link android.media.MediaPlayer}:</p>
+<pre>
+mediaPlayer.release();
+mediaPlayer = null;
+</pre>
+
+<p>As an example, consider the problems that could happen if you
+forgot to release the {@link android.media.MediaPlayer} when your activity is stopped, but create a
+new one when the activity starts again. As you may know, when the user changes the
+screen orientation (or changes the device configuration in another way), 
+the system handles that by restarting the activity (by default), so you might quickly
+consume all of the system resources as the user
+rotates the device back and forth between portrait and landscape, because at each
+orientation change, you create a new {@link android.media.MediaPlayer} that you never
+release. (For more information about runtime restarts, see <a
+href="{@docRoot}guide/topics/resources/runtime-changes.html">Handling Runtime Changes</a>.)</p>
+
+<p>You may be wondering what happens if you want to continue playing
+"background media" even when the user leaves your activity, much in the same
+way that the built-in Music application behaves. In this case, what you need is
+a {@link android.media.MediaPlayer MediaPlayer} controlled by a {@link android.app.Service}, as
+discussed in <a href="mpandservices">Using a Service with MediaPlayer</a>.</p>
+
+<h2 id="mpandservices">Using a Service with MediaPlayer</h2>
+
+<p>If you want your media to play in the background even when your application
+is not onscreen&mdash;that is, you want it to continue playing while the user is
+interacting with other applications&mdash;then you must start a
+{@link android.app.Service Service} and control the
+{@link android.media.MediaPlayer MediaPlayer} instance from there.
+You should be careful about this setup, because the user and the system have expectations
+about how an application running a background service should interact with the rest of the
+system. If your application does not fulfil those expectations, the user may
+have a bad experience. This section describes the main issues that you should be
+aware of and offers suggestions about how to approach them.</p>
+
+
+<h3 id="asyncprepare">Running asynchronously</h3>
+
+<p>First of all, like an {@link android.app.Activity Activity}, all work in a
+{@link android.app.Service Service} is done in a single thread by
+default&mdash;in fact, if you're running an activity and a service from the same application, they
+use the same thread (the "main thread") by default. Therefore, services need to
+process incoming intents quickly
+and never perform lengthy computations when responding to them. If any heavy
+work or blocking calls are expected, you must do those tasks asynchronously: either from
+another thread you implement yourself, or using the framework's many facilities
+for asynchronous processing.</p>
+
+<p>For instance, when using a {@link android.media.MediaPlayer} from your main thread,
+you should call {@link android.media.MediaPlayer#prepareAsync prepareAsync()} rather than
+{@link android.media.MediaPlayer#prepare prepare()}, and implement
+a {@link android.media.MediaPlayer.OnPreparedListener MediaPlayer.OnPreparedListener}
+in order to be notified when the preparation is complete and you can start playing.
+For example:</p>
+
+<pre>
+public class MyService extends Service implements MediaPlayer.OnPreparedListener {
+    private static final ACTION_PLAY = "com.example.action.PLAY";
+    MediaPlayer mMediaPlayer = null;
+
+    public int onStartCommand(Intent intent, int flags, int startId) {
+        ...
+        if (intent.getAction().equals(ACTION_PLAY)) {
+            mMediaPlayer = ... // initialize it here
+            mMediaPlayer.setOnPreparedListener(this);
+            mMediaPlayer.prepareAsync(); // prepare async to not block main thread
+        }
+    }
+
+    /** Called when MediaPlayer is ready */
+    public void onPrepared(MediaPlayer player) {
+        player.start();
+    }
+}
+</pre>
+
+
+<h3 id="asyncerror">Handling asynchronous errors</h3>
+
+<p>On synchronous operations, errors would normally
+be signaled with an exception or an error code, but whenever you use asynchronous
+resources, you should make sure your application is notified
+of errors appropriately. In the case of a {@link android.media.MediaPlayer MediaPlayer},
+you can accomplish this by implementing a
+{@link android.media.MediaPlayer.OnErrorListener MediaPlayer.OnErrorListener} and
+setting it in your {@link android.media.MediaPlayer} instance:</p>
+
+<pre>
+public class MyService extends Service implements MediaPlayer.OnErrorListener {
+    MediaPlayer mMediaPlayer;
+
+    public void initMediaPlayer() {
+        // ...initialize the MediaPlayer here...
+
+        mMediaPlayer.setOnErrorListener(this);
+    }
+
+    &#64;Override
+    public boolean onError(MediaPlayer mp, int what, int extra) {
+        // ... react appropriately ...
+        // The MediaPlayer has moved to the Error state, must be reset!
+    }
+}
+</pre>
+
+<p>It's important to remember that when an error occurs, the {@link android.media.MediaPlayer}
+moves to the <em>Error</em> state (see the documentation for the
+{@link android.media.MediaPlayer MediaPlayer} class for the full state diagram)
+and you must reset it before you can use it again.
+
+
+<h3 id="wakelocks">Using wake locks</h3>
+
+<p>When designing applications that play media
+in the background, the device may go to sleep
+while your service is running. Because the Android system tries to conserve
+battery while the device is sleeping, the system tries to shut off any 
+of the phone's features that are
+not necessary, including the CPU and the WiFi hardware.
+However, if your service is playing or streaming music, you want to prevent
+the system from interfering with your playback.</p>
+
+<p>In order to ensure that your service continues to run under
+those conditions, you have to use "wake locks." A wake lock is a way to signal to
+the system that your application is using some feature that should
+stay available even if the phone is idle.</p>
+
+<p class="caution"><strong>Notice:</strong> You should always use wake locks sparingly and hold them
+only for as long as truly necessary, because they significantly reduce the battery life of the
+device.</p>
+
+<p>To ensure that the CPU continues running while your {@link android.media.MediaPlayer} is
+playing, call the {@link android.media.MediaPlayer#setWakeMode
+setWakeMode()} method when initializing your {@link android.media.MediaPlayer}. Once you do,
+the {@link android.media.MediaPlayer} holds the specified lock while playing and releases the lock
+when paused or stopped:</p>
+
+<pre>
+mMediaPlayer = new MediaPlayer();
+// ... other initialization here ...
+mMediaPlayer.setWakeMode(getApplicationContext(), PowerManager.PARTIAL_WAKE_LOCK);
+</pre>
+
+<p>However, the wake lock acquired in this example guarantees only that the CPU remains awake. If
+you are streaming media over the
+network and you are using Wi-Fi, you probably want to hold a
+{@link android.net.wifi.WifiManager.WifiLock WifiLock} as
+well, which you must acquire and release manually. So, when you start preparing the
+{@link android.media.MediaPlayer} with the remote URL, you should create and acquire the Wi-Fi lock.
+For example:</p>
+
+<pre>
+WifiLock wifiLock = ((WifiManager) getSystemService(Context.WIFI_SERVICE))
+    .createWifiLock(WifiManager.WIFI_MODE_FULL, "mylock");
+
+wifiLock.acquire();
+</pre>
+
+<p>When you pause or stop your media, or when you no longer need the
+network, you should release the lock:</p>
+
+<pre>
+wifiLock.release();
+</pre>
+
+
+<h3 id="foregroundserv">Running as a foreground service</h3>
+
+<p>Services are often used for performing background tasks, such as fetching emails,
+synchronizing data, downloading content, amongst other possibilities. In these
+cases, the user is not actively aware of the service's execution, and probably
+wouldn't even notice if some of these services were interrupted and later restarted.</p>
+
+<p>But consider the case of a service that is playing music. Clearly this is a service that the user
+is actively aware of and the experience would be severely affected by any interruptions.
+Additionally, it's a service that the user will likely wish to interact with during its execution.
+In this case, the service should run as a "foreground service." A
+foreground service holds a higher level of importance within the system&mdash;the system will
+almost never kill the service, because it is of immediate importance to the user. When running
+in the foreground, the service also must provide a status bar notification to ensure that users are
+aware of the running service and allow them to open an activity that can interact with the
+service.</p>
+
+<p>In order to turn your service into a foreground service, you must create a
+{@link android.app.Notification Notification} for the status bar and call
+{@link android.app.Service#startForeground startForeground()} from the {@link
+android.app.Service}. For example:</p>
+
+<pre>String songName;
+// assign the song name to songName
+PendingIntent pi = PendingIntent.getActivity(getApplicationContext(), 0,
+                new Intent(getApplicationContext(), MainActivity.class),
+                PendingIntent.FLAG_UPDATE_CURRENT);
+Notification notification = new Notification();
+notification.tickerText = text;
+notification.icon = R.drawable.play0;
+notification.flags |= Notification.FLAG_ONGOING_EVENT;
+notification.setLatestEventInfo(getApplicationContext(), "MusicPlayerSample",
+                "Playing: " + songName, pi);
+startForeground(NOTIFICATION_ID, notification);
+</pre>
+
+<p>While your service is running in the foreground, the notification you
+configured is visible in the notification area of the device. If the user
+selects the notification, the system invokes the {@link android.app.PendingIntent} you supplied. In
+the example above, it opens an activity ({@code MainActivity}).</p>
+
+<p>Figure 1 shows how your notification appears to the user:</p>
+
+<img src='images/notification1.png' />
+&nbsp;&nbsp;
+<img src='images/notification2.png' />
+<p class="img-caption"><strong>Figure 1.</strong> Screenshots of a foreground service's
+notification, showing the notification icon in the status bar (left) and the expanded view
+(right).</p>
+
+<p>You should only hold on to the "foreground service" status while your
+service is actually performing something the user is actively aware of. Once
+that is no longer true, you should release it by calling
+{@link android.app.Service#stopForeground stopForeground()}:</p>
+
+<pre>
+stopForeground(true);
+</pre>
+
+<p>For more information, see the documentation about <a
+href="{@docRoot}guide/topics/fundamentals/services.html#Foreground">Services</a> and
+<a href="{@docRoot}guide/topics/ui/notifiers/notifications.html">Status Bar Notifications</a>.</p>
+
+
+<h3 id="audiofocus">Handling audio focus</h3>
+
+<p>Even though only one activity can run at any given time, Android is a
+multi-tasking environment. This poses a particular challenge to applications
+that use audio, because there is only one audio output and there may be several
+media services competing for its use. Before Android 2.2, there was no built-in
+mechanism to address this issue, which could in some cases lead to a bad user
+experience. For example, when a user is listening to
+music and another application needs to notify the user of something very important,
+the user might not hear the notification tone due to the loud music. Starting with
+Android 2.2, the platform offers a way for applications to negotiate their
+use of the device's audio output. This mechanism is called Audio Focus.</p>
+
+<p>When your application needs to output audio such as music or a notification, 
+you should always request audio focus. Once it has focus, it can use the sound output freely, but it
+should
+always listen for focus changes. If it is notified that it has lost the audio
+focus, it should immediately either kill the audio or lower it to a quiet level
+(known as "ducking"&mdash;there is a flag that indicates which one is appropriate) and only resume
+loud playback after it receives focus again.</p>
+
+<p>Audio Focus is cooperative in nature. That is, applications are expected
+(and highly encouraged) to comply with the audio focus guidelines, but the
+rules are not enforced by the system. If an application wants to play loud
+music even after losing audio focus, nothing in the system will prevent that.
+However, the user is more likely to have a bad experience and will be more
+likely to uninstall the misbehaving application.</p>
+
+<p>To request audio focus, you must call
+{@link android.media.AudioManager#requestAudioFocus requestAudioFocus()} from the {@link
+android.media.AudioManager}, as the example below demonstrates:</p>
+
+<pre>
+AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+int result = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC,
+    AudioManager.AUDIOFOCUS_GAIN);
+
+if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+    // could not get audio focus.
+}
+</pre>
+
+<p>The first parameter to {@link android.media.AudioManager#requestAudioFocus requestAudioFocus()}
+is an {@link android.media.AudioManager.OnAudioFocusChangeListener
+AudioManager.OnAudioFocusChangeListener},
+whose {@link android.media.AudioManager.OnAudioFocusChangeListener#onAudioFocusChange
+onAudioFocusChange()} method is called whenever there is a change in audio focus. Therefore, you
+should also implement this interface on your service and activities. For example:</p>
+
+<pre>
+class MyService extends Service
+                implements AudioManager.OnAudioFocusChangeListener {
+    // ....
+    public void onAudioFocusChange(int focusChange) {
+        // Do something based on focus change...
+    }
+}
+</pre>
+
+<p>The <code>focusChange</code> parameter tells you how the audio focus has changed, and
+can be one of the following values (they are all constants defined in
+{@link android.media.AudioManager AudioManager}):</p>
+
+<ul>
+<li>{@link android.media.AudioManager#AUDIOFOCUS_GAIN}: You have gained the audio focus.</li>
+
+<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS}: You have lost the audio focus for a
+presumably long time.
+You must stop all audio playback. Because you should expect not to have focus back
+for a long time, this would be a good place to clean up your resources as much
+as possible. For example, you should release the {@link android.media.MediaPlayer}.</li>
+
+<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT}: You have
+temporarily lost audio focus, but should receive it back shortly. You must stop
+all audio playback, but you can keep your resources because you will probably get
+focus back shortly.</li>
+
+<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}: You have temporarily
+lost audio focus,
+but you are allowed to continue to play audio quietly (at a low volume) instead
+of killing audio completely.</li>
+</ul>
+
+<p>Here is an example implementation:</p>
+
+<pre>
+public void onAudioFocusChange(int focusChange) {
+    switch (focusChange) {
+        case AudioManager.AUDIOFOCUS_GAIN:
+            // resume playback
+            if (mMediaPlayer == null) initMediaPlayer();
+            else if (!mMediaPlayer.isPlaying()) mMediaPlayer.start();
+            mMediaPlayer.setVolume(1.0f, 1.0f);
+            break;
+
+        case AudioManager.AUDIOFOCUS_LOSS:
+            // Lost focus for an unbounded amount of time: stop playback and release media player
+            if (mMediaPlayer.isPlaying()) mMediaPlayer.stop();
+            mMediaPlayer.release();
+            mMediaPlayer = null;
+            break;
+
+        case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
+            // Lost focus for a short time, but we have to stop
+            // playback. We don't release the media player because playback
+            // is likely to resume
+            if (mMediaPlayer.isPlaying()) mMediaPlayer.pause();
+            break;
+
+        case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
+            // Lost focus for a short time, but it's ok to keep playing
+            // at an attenuated level
+            if (mMediaPlayer.isPlaying()) mMediaPlayer.setVolume(0.1f, 0.1f);
+            break;
+    }
+}
+</pre>
+
+<p>Keep in mind that the audio focus APIs are available only with API level 8 (Android 2.2)
+and above, so if you want to support previous
+versions of Android, you should adopt a backward compatibility strategy that
+allows you to use this feature if available, and fall back seamlessly if not.</p>
+
+<p>You can achieve backward compatibility either by calling the audio focus methods by reflection
+or by implementing all the audio focus features in a separate class (say,
+<code>AudioFocusHelper</code>). Here is an example of such a class:</p>
+
+<pre>
+public class AudioFocusHelper implements AudioManager.OnAudioFocusChangeListener {
+    AudioManager mAudioManager;
+
+    // other fields here, you'll probably hold a reference to an interface
+    // that you can use to communicate the focus changes to your Service
+
+    public AudioFocusHelper(Context ctx, /* other arguments here */) {
+        mAudioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
+        // ...
+    }
+
+    public boolean requestFocus() {
+        return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
+            mAudioManager.requestAudioFocus(mContext, AudioManager.STREAM_MUSIC,
+            AudioManager.AUDIOFOCUS_GAIN);
+    }
+
+    public boolean abandonFocus() {
+        return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
+            mAudioManager.abandonAudioFocus(this);
+    }
+
+    &#64;Override
+    public void onAudioFocusChange(int focusChange) {
+        // let your service know about the focus change
+    }
+}
+</pre>
+
+
+<p>You can create an instance of <code>AudioFocusHelper</code> class only if you detect that
+the system is running API level 8 or above. For example:</p>
+
+<pre>
+if (android.os.Build.VERSION.SDK_INT &gt;= 8) {
+    mAudioFocusHelper = new AudioFocusHelper(getApplicationContext(), this);
+} else {
+    mAudioFocusHelper = null;
+}
+</pre>
+
+
+<h3 id="cleanup">Performing cleanup</h3>
+
+<p>As mentioned earlier, a {@link android.media.MediaPlayer} object can consume a significant
+amount of system resources, so you should keep it only for as long as you need and call
+{@link android.media.MediaPlayer#release release()} when you are done with it. It's important
+to call this cleanup method explicitly rather than rely on system garbage collection because
+it might take some time before the garbage collector reclaims the {@link android.media.MediaPlayer},
+as it's only sensitive to memory needs and not to shortage of other media-related resources.
+So, in the case when you're using a service, you should always override the
+{@link android.app.Service#onDestroy onDestroy()} method to make sure you are releasing
+the {@link android.media.MediaPlayer}:</p>
+
+<pre>
+public class MyService extends Service {
+   MediaPlayer mMediaPlayer;
+   // ...
+
+   &#64;Override
+   public void onDestroy() {
+       if (mMediaPlayer != null) mMediaPlayer.release();
+   }
+}
+</pre>
+
+<p>You should always look for other opportunities to release your {@link android.media.MediaPlayer}
+as well, apart from releasing it when being shut down. For example, if you expect not
+to be able to play media for an extended period of time (after losing audio focus, for example),
+you should definitely release your existing {@link android.media.MediaPlayer} and create it again
+later. On the
+other hand, if you only expect to stop playback for a very short time, you should probably
+hold on to your {@link android.media.MediaPlayer} to avoid the overhead of creating and preparing it
+again.</p>
+
+
+
+<h2 id="noisyintent">Handling the AUDIO_BECOMING_NOISY Intent</h2>
+
+<p>Many well-written applications that play audio automatically stop playback when an event
+occurs that causes the audio to become noisy (ouput through external speakers). For instance,
+this might happen when a user is listening to music through headphones and accidentally
+disconnects the headphones from the device. However, this behavior does not happen automatically.
+If you don't implement this feature, audio plays out of the device's external speakers, which
+might not be what the user wants.</p>
+
+<p>You can ensure your app stops playing music in these situations by handling
+the {@link android.media.AudioManager#ACTION_AUDIO_BECOMING_NOISY} intent, for which you can
+register a receiver by
+adding the following to your manifest:</p>
+
+<pre>
+&lt;receiver android:name=".MusicIntentReceiver"&gt;
+   &lt;intent-filter&gt;
+      &lt;action android:name="android.media.AUDIO_BECOMING_NOISY" /&gt;
+   &lt;/intent-filter&gt;
+&lt;/receiver&gt;
+</pre>
+
+<p>This registers the <code>MusicIntentReceiver</code> class as a broadcast receiver for that
+intent. You should then implement this class:</p>
+
+<pre>
+public class MusicIntentReceiver implements android.content.BroadcastReceiver {
+   &#64;Override
+   public void onReceive(Context ctx, Intent intent) {
+      if (intent.getAction().equals(
+                    android.media.AudioManager.ACTION_AUDIO_BECOMING_NOISY)) {
+          // signal your service to stop playback
+          // (via an Intent, for instance)
+      }
+   }
+}
+</pre>
+
+
+
+
+<h2 id="viacontentresolver">Retrieving Media from a Content Resolver</h2>
+
+<p>Another feature that may be useful in a media player application is the ability to
+retrieve music that the user has on the device. You can do that by querying the {@link
+android.content.ContentResolver} for external media:</p>
+
+<pre>
+ContentResolver contentResolver = getContentResolver();
+Uri uri = android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
+Cursor cursor = contentResolver.query(uri, null, null, null, null);
+if (cursor == null) {
+    // query failed, handle error.
+} else if (!cursor.moveToFirst()) {
+    // no media on the device
+} else {
+    int titleColumn = cursor.getColumnIndex(android.provider.MediaStore.Audio.Media.TITLE);
+    int idColumn = cursor.getColumnIndex(android.provider.MediaStore.Audio.Media._ID);
+    do {
+       long thisId = cursor.getLong(idColumn);
+       String thisTitle = cursor.getString(titleColumn);
+       // ...process entry...
+    } while (cursor.moveToNext());
+}
+</pre>
+
+<p>To use this with the {@link android.media.MediaPlayer}, you can do this:</p>
+
+<pre>
+long id = /* retrieve it from somewhere */;
+Uri contentUri = ContentUris.withAppendedId(
+        android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, id);
+
+mMediaPlayer = new MediaPlayer();
+mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+mMediaPlayer.setDataSource(getApplicationContext(), contentUri);
+
+// ...prepare and start...
+</pre>
\ No newline at end of file
diff --git a/docs/html/sdk/oem-usb.jd b/docs/html/sdk/oem-usb.jd
index ad3be4a..88bf008 100644
--- a/docs/html/sdk/oem-usb.jd
+++ b/docs/html/sdk/oem-usb.jd
@@ -55,6 +55,14 @@
 </tr>
   <tr>
     <td>
+      Fujitsu
+    </td>
+    <td><a
+href="http://www.fmworld.net/product/phone/sp/android/develop/">http://www.fmworld.net/product/phone/sp/android/develop/</a>
+    </td>
+  </tr>
+  <tr>
+    <td>
       Fujitsu Toshiba
     </td>
     <td><a
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index a520a6a..cd2c0a3 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -504,6 +504,25 @@
     // Example value: "true" or "false". Read only.
     static const char KEY_VIDEO_SNAPSHOT_SUPPORTED[];
 
+    // The state of the video stabilization. If set to true, both the
+    // preview stream and the recorded video stream are stabilized by
+    // the camera. Only valid to set if KEY_VIDEO_STABILIZATION_SUPPORTED is
+    // set to true.
+    //
+    // The value of this key can be changed any time the camera is
+    // open. If preview or recording is active, it is acceptable for
+    // there to be a slight video glitch when video stabilization is
+    // toggled on and off.
+    //
+    // This only stabilizes video streams (between-frames stabilization), and
+    // has no effect on still image capture.
+    static const char KEY_VIDEO_STABILIZATION[];
+
+    // Returns true if video stabilization is supported. That is, applications
+    // can set KEY_VIDEO_STABILIZATION to true and have a stabilized preview
+    // stream and record stabilized videos.
+    static const char KEY_VIDEO_STABILIZATION_SUPPORTED[];
+
     // Value for KEY_ZOOM_SUPPORTED or KEY_SMOOTH_ZOOM_SUPPORTED.
     static const char TRUE[];
     static const char FALSE[];
diff --git a/include/surfaceflinger/ISurfaceComposer.h b/include/surfaceflinger/ISurfaceComposer.h
index e0f4cf9..ea022a6 100644
--- a/include/surfaceflinger/ISurfaceComposer.h
+++ b/include/surfaceflinger/ISurfaceComposer.h
@@ -80,6 +80,7 @@
         eOrientation90          = 1,
         eOrientation180         = 2,
         eOrientation270         = 3,
+        eOrientationUnchanged   = 4,
         eOrientationSwapMask    = 0x01
     };
     
@@ -101,15 +102,8 @@
     virtual sp<IMemoryHeap> getCblk() const = 0;
 
     /* open/close transactions. requires ACCESS_SURFACE_FLINGER permission */
-    virtual void setTransactionState(const Vector<ComposerState>& state) = 0;
-
-    /* [un]freeze display. requires ACCESS_SURFACE_FLINGER permission */
-    virtual status_t freezeDisplay(DisplayID dpy, uint32_t flags) = 0;
-    virtual status_t unfreezeDisplay(DisplayID dpy, uint32_t flags) = 0;
-
-    /* Set display orientation. requires ACCESS_SURFACE_FLINGER permission
-     * No flags are currently defined.  Set flags to 0. */
-    virtual int setOrientation(DisplayID dpy, int orientation, uint32_t flags) = 0;
+    virtual void setTransactionState(const Vector<ComposerState>& state,
+            int orientation) = 0;
 
     /* signal that we're done booting.
      * Requires ACCESS_SURFACE_FLINGER permission
diff --git a/include/surfaceflinger/SurfaceComposerClient.h b/include/surfaceflinger/SurfaceComposerClient.h
index ace0735..14e5b23 100644
--- a/include/surfaceflinger/SurfaceComposerClient.h
+++ b/include/surfaceflinger/SurfaceComposerClient.h
@@ -195,4 +195,3 @@
 }; // namespace android
 
 #endif // ANDROID_SF_SURFACE_COMPOSER_CLIENT_H
-
diff --git a/include/utils/Singleton.h b/include/utils/Singleton.h
index e1ee8eb..a42ce21 100644
--- a/include/utils/Singleton.h
+++ b/include/utils/Singleton.h
@@ -20,12 +20,13 @@
 #include <stdint.h>
 #include <sys/types.h>
 #include <utils/threads.h>
+#include <cutils/compiler.h>
 
 namespace android {
 // ---------------------------------------------------------------------------
 
 template <typename TYPE>
-class Singleton
+class ANDROID_API Singleton
 {
 public:
     static TYPE& getInstance() {
diff --git a/libs/camera/CameraParameters.cpp b/libs/camera/CameraParameters.cpp
index 0dcab6b..c6087b4 100644
--- a/libs/camera/CameraParameters.cpp
+++ b/libs/camera/CameraParameters.cpp
@@ -88,6 +88,8 @@
 const char CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW[] = "max-num-detected-faces-sw";
 const char CameraParameters::KEY_RECORDING_HINT[] = "recording-hint";
 const char CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED[] = "video-snapshot-supported";
+const char CameraParameters::KEY_VIDEO_STABILIZATION[] = "video-stabilization";
+const char CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED[] = "video-stabilization-supported";
 
 const char CameraParameters::TRUE[] = "true";
 const char CameraParameters::FALSE[] = "false";
diff --git a/libs/gui/ISurfaceComposer.cpp b/libs/gui/ISurfaceComposer.cpp
index 030a83e..eb90147 100644
--- a/libs/gui/ISurfaceComposer.cpp
+++ b/libs/gui/ISurfaceComposer.cpp
@@ -78,7 +78,8 @@
         return interface_cast<IMemoryHeap>(reply.readStrongBinder());
     }
 
-    virtual void setTransactionState(const Vector<ComposerState>& state)
+    virtual void setTransactionState(const Vector<ComposerState>& state,
+            int orientation)
     {
         Parcel data, reply;
         data.writeInterfaceToken(ISurfaceComposer::getInterfaceDescriptor());
@@ -88,38 +89,8 @@
         for ( ; b != e ; ++b ) {
             b->write(data);
         }
-        remote()->transact(BnSurfaceComposer::SET_TRANSACTION_STATE, data, &reply);
-    }
-
-    virtual status_t freezeDisplay(DisplayID dpy, uint32_t flags)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(ISurfaceComposer::getInterfaceDescriptor());
-        data.writeInt32(dpy);
-        data.writeInt32(flags);
-        remote()->transact(BnSurfaceComposer::FREEZE_DISPLAY, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual status_t unfreezeDisplay(DisplayID dpy, uint32_t flags)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(ISurfaceComposer::getInterfaceDescriptor());
-        data.writeInt32(dpy);
-        data.writeInt32(flags);
-        remote()->transact(BnSurfaceComposer::UNFREEZE_DISPLAY, data, &reply);
-        return reply.readInt32();
-    }
-
-    virtual int setOrientation(DisplayID dpy, int orientation, uint32_t flags)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(ISurfaceComposer::getInterfaceDescriptor());
-        data.writeInt32(dpy);
         data.writeInt32(orientation);
-        data.writeInt32(flags);
-        remote()->transact(BnSurfaceComposer::SET_ORIENTATION, data, &reply);
-        return reply.readInt32();
+        remote()->transact(BnSurfaceComposer::SET_TRANSACTION_STATE, data, &reply);
     }
 
     virtual void bootFinished()
@@ -232,26 +203,8 @@
                 s.read(data);
                 state.add(s);
             }
-            setTransactionState(state);
-        } break;
-        case SET_ORIENTATION: {
-            CHECK_INTERFACE(ISurfaceComposer, data, reply);
-            DisplayID dpy = data.readInt32();
             int orientation = data.readInt32();
-            uint32_t flags = data.readInt32();
-            reply->writeInt32( setOrientation(dpy, orientation, flags) );
-        } break;
-        case FREEZE_DISPLAY: {
-            CHECK_INTERFACE(ISurfaceComposer, data, reply);
-            DisplayID dpy = data.readInt32();
-            uint32_t flags = data.readInt32();
-            reply->writeInt32( freezeDisplay(dpy, flags) );
-        } break;
-        case UNFREEZE_DISPLAY: {
-            CHECK_INTERFACE(ISurfaceComposer, data, reply);
-            DisplayID dpy = data.readInt32();
-            uint32_t flags = data.readInt32();
-            reply->writeInt32( unfreezeDisplay(dpy, flags) );
+            setTransactionState(state, orientation);
         } break;
         case BOOT_FINISHED: {
             CHECK_INTERFACE(ISurfaceComposer, data, reply);
diff --git a/libs/gui/SurfaceComposerClient.cpp b/libs/gui/SurfaceComposerClient.cpp
index 00a4bf6..5f3d608 100644
--- a/libs/gui/SurfaceComposerClient.cpp
+++ b/libs/gui/SurfaceComposerClient.cpp
@@ -91,8 +91,10 @@
 
     mutable Mutex               mLock;
     SortedVector<ComposerState> mStates;
+    int                         mOrientation;
 
-    Composer() : Singleton<Composer>() { }
+    Composer() : Singleton<Composer>(),
+        mOrientation(ISurfaceComposer::eOrientationUnchanged) { }
 
     void closeGlobalTransactionImpl();
 
@@ -119,6 +121,7 @@
     status_t setFreezeTint(
             const sp<SurfaceComposerClient>& client, SurfaceID id,
             uint32_t tint);
+    status_t setOrientation(int orientation);
 
     static void closeGlobalTransaction() {
         Composer::getInstance().closeGlobalTransactionImpl();
@@ -133,14 +136,18 @@
     sp<ISurfaceComposer> sm(getComposerService());
 
     Vector<ComposerState> transaction;
+    int orientation;
 
     { // scope for the lock
         Mutex::Autolock _l(mLock);
         transaction = mStates;
         mStates.clear();
+
+        orientation = mOrientation;
+        mOrientation = ISurfaceComposer::eOrientationUnchanged;
     }
 
-   sm->setTransactionState(transaction);
+   sm->setTransactionState(transaction, orientation);
 }
 
 layer_state_t* Composer::getLayerStateLocked(
@@ -260,6 +267,12 @@
     return NO_ERROR;
 }
 
+status_t Composer::setOrientation(int orientation) {
+    Mutex::Autolock _l(mLock);
+    mOrientation = orientation;
+    return NO_ERROR;
+}
+
 // ---------------------------------------------------------------------------
 
 SurfaceComposerClient::SurfaceComposerClient()
@@ -427,6 +440,12 @@
     return getComposer().setMatrix(this, id, dsdx, dtdx, dsdy, dtdy);
 }
 
+status_t SurfaceComposerClient::setOrientation(DisplayID dpy,
+        int orientation, uint32_t flags)
+{
+    return Composer::getInstance().setOrientation(orientation);
+}
+
 // ----------------------------------------------------------------------------
 
 status_t SurfaceComposerClient::getDisplayInfo(
@@ -491,21 +510,14 @@
 
 status_t SurfaceComposerClient::freezeDisplay(DisplayID dpy, uint32_t flags)
 {
-    sp<ISurfaceComposer> sm(getComposerService());
-    return sm->freezeDisplay(dpy, flags);
+    // This has been made a no-op because it can cause Gralloc buffer deadlocks.
+    return NO_ERROR;
 }
 
 status_t SurfaceComposerClient::unfreezeDisplay(DisplayID dpy, uint32_t flags)
 {
-    sp<ISurfaceComposer> sm(getComposerService());
-    return sm->unfreezeDisplay(dpy, flags);
-}
-
-int SurfaceComposerClient::setOrientation(DisplayID dpy,
-        int orientation, uint32_t flags)
-{
-    sp<ISurfaceComposer> sm(getComposerService());
-    return sm->setOrientation(dpy, orientation, flags);
+    // This has been made a no-op because it can cause Gralloc buffer deadlocks.
+    return NO_ERROR;
 }
 
 // ----------------------------------------------------------------------------
@@ -572,4 +584,3 @@
 
 // ----------------------------------------------------------------------------
 }; // namespace android
-
diff --git a/libs/hwui/Android.mk b/libs/hwui/Android.mk
index a98e4cd..9bfc94c 100644
--- a/libs/hwui/Android.mk
+++ b/libs/hwui/Android.mk
@@ -39,6 +39,7 @@
 		external/skia/include/utils
 
 	LOCAL_CFLAGS += -DUSE_OPENGL_RENDERER
+	LOCAL_CFLAGS += -fvisibility=hidden
 	LOCAL_MODULE_CLASS := SHARED_LIBRARIES
 	LOCAL_SHARED_LIBRARIES := libcutils libutils libGLESv2 libskia libui
 	LOCAL_MODULE := libhwui
diff --git a/libs/hwui/Caches.h b/libs/hwui/Caches.h
index cdcbf21..9b0d7c6 100644
--- a/libs/hwui/Caches.h
+++ b/libs/hwui/Caches.h
@@ -23,6 +23,8 @@
 
 #include <utils/Singleton.h>
 
+#include <cutils/compiler.h>
+
 #include "Extensions.h"
 #include "FontRenderer.h"
 #include "GammaFontRenderer.h"
@@ -82,7 +84,7 @@
 // Caches
 ///////////////////////////////////////////////////////////////////////////////
 
-class Caches: public Singleton<Caches> {
+class ANDROID_API Caches: public Singleton<Caches> {
     Caches();
     ~Caches();
 
diff --git a/libs/hwui/DisplayListLogBuffer.h b/libs/hwui/DisplayListLogBuffer.h
index bf16f29..5d689bb 100644
--- a/libs/hwui/DisplayListLogBuffer.h
+++ b/libs/hwui/DisplayListLogBuffer.h
@@ -18,6 +18,7 @@
 #define ANDROID_HWUI_DISPLAY_LIST_LOG_BUFFER_H
 
 #include <utils/Singleton.h>
+
 #include <stdio.h>
 
 namespace android {
diff --git a/libs/hwui/DisplayListRenderer.h b/libs/hwui/DisplayListRenderer.h
index 8cd7fea..ab475bf 100644
--- a/libs/hwui/DisplayListRenderer.h
+++ b/libs/hwui/DisplayListRenderer.h
@@ -26,6 +26,8 @@
 #include <SkTDArray.h>
 #include <SkTSearch.h>
 
+#include <cutils/compiler.h>
+
 #include "DisplayListLogBuffer.h"
 #include "OpenGLRenderer.h"
 #include "utils/Functor.h"
@@ -58,7 +60,7 @@
 class DisplayList {
 public:
     DisplayList(const DisplayListRenderer& recorder);
-    ~DisplayList();
+    ANDROID_API ~DisplayList();
 
     // IMPORTANT: Update the intialization of OP_NAMES in the .cpp file
     //            when modifying this file
@@ -107,13 +109,13 @@
 
     void initFromDisplayListRenderer(const DisplayListRenderer& recorder, bool reusing = false);
 
-    size_t getSize();
+    ANDROID_API size_t getSize();
 
     bool replay(OpenGLRenderer& renderer, Rect& dirty, uint32_t level = 0);
 
     void output(OpenGLRenderer& renderer, uint32_t level = 0);
 
-    static void outputLogBuffer(int fd);
+    ANDROID_API static void outputLogBuffer(int fd);
 
     void setRenderable(bool renderable) {
         mIsRenderable = renderable;
@@ -230,75 +232,76 @@
  */
 class DisplayListRenderer: public OpenGLRenderer {
 public:
-    DisplayListRenderer();
-    ~DisplayListRenderer();
+    ANDROID_API DisplayListRenderer();
+    virtual ~DisplayListRenderer();
 
-    DisplayList* getDisplayList(DisplayList* displayList);
+    ANDROID_API DisplayList* getDisplayList(DisplayList* displayList);
 
-    void setViewport(int width, int height);
-    void prepareDirty(float left, float top, float right, float bottom, bool opaque);
-    void finish();
+    virtual void setViewport(int width, int height);
+    virtual void prepareDirty(float left, float top, float right, float bottom, bool opaque);
+    virtual void finish();
 
-    bool callDrawGLFunction(Functor *functor, Rect& dirty);
+    virtual bool callDrawGLFunction(Functor *functor, Rect& dirty);
 
-    void interrupt();
-    void resume();
+    virtual void interrupt();
+    virtual void resume();
 
-    int save(int flags);
-    void restore();
-    void restoreToCount(int saveCount);
+    virtual int save(int flags);
+    virtual void restore();
+    virtual void restoreToCount(int saveCount);
 
-    int saveLayer(float left, float top, float right, float bottom,
+    virtual int saveLayer(float left, float top, float right, float bottom,
             SkPaint* p, int flags);
-    int saveLayerAlpha(float left, float top, float right, float bottom,
+    virtual int saveLayerAlpha(float left, float top, float right, float bottom,
                 int alpha, int flags);
 
-    void translate(float dx, float dy);
-    void rotate(float degrees);
-    void scale(float sx, float sy);
-    void skew(float sx, float sy);
+    virtual void translate(float dx, float dy);
+    virtual void rotate(float degrees);
+    virtual void scale(float sx, float sy);
+    virtual void skew(float sx, float sy);
 
-    void setMatrix(SkMatrix* matrix);
-    void concatMatrix(SkMatrix* matrix);
+    virtual void setMatrix(SkMatrix* matrix);
+    virtual void concatMatrix(SkMatrix* matrix);
 
-    bool clipRect(float left, float top, float right, float bottom, SkRegion::Op op);
+    virtual bool clipRect(float left, float top, float right, float bottom, SkRegion::Op op);
 
-    bool drawDisplayList(DisplayList* displayList, uint32_t width, uint32_t height,
+    virtual bool drawDisplayList(DisplayList* displayList, uint32_t width, uint32_t height,
             Rect& dirty, uint32_t level = 0);
-    void drawLayer(Layer* layer, float x, float y, SkPaint* paint);
-    void drawBitmap(SkBitmap* bitmap, float left, float top, SkPaint* paint);
-    void drawBitmap(SkBitmap* bitmap, SkMatrix* matrix, SkPaint* paint);
-    void drawBitmap(SkBitmap* bitmap, float srcLeft, float srcTop,
+    virtual void drawLayer(Layer* layer, float x, float y, SkPaint* paint);
+    virtual void drawBitmap(SkBitmap* bitmap, float left, float top, SkPaint* paint);
+    virtual void drawBitmap(SkBitmap* bitmap, SkMatrix* matrix, SkPaint* paint);
+    virtual void drawBitmap(SkBitmap* bitmap, float srcLeft, float srcTop,
             float srcRight, float srcBottom, float dstLeft, float dstTop,
             float dstRight, float dstBottom, SkPaint* paint);
-    void drawBitmapMesh(SkBitmap* bitmap, int meshWidth, int meshHeight,
+    virtual void drawBitmapMesh(SkBitmap* bitmap, int meshWidth, int meshHeight,
             float* vertices, int* colors, SkPaint* paint);
-    void drawPatch(SkBitmap* bitmap, const int32_t* xDivs, const int32_t* yDivs,
+    virtual void drawPatch(SkBitmap* bitmap, const int32_t* xDivs, const int32_t* yDivs,
             const uint32_t* colors, uint32_t width, uint32_t height, int8_t numColors,
             float left, float top, float right, float bottom, SkPaint* paint);
-    void drawColor(int color, SkXfermode::Mode mode);
-    void drawRect(float left, float top, float right, float bottom, SkPaint* paint);
-    void drawRoundRect(float left, float top, float right, float bottom,
+    virtual void drawColor(int color, SkXfermode::Mode mode);
+    virtual void drawRect(float left, float top, float right, float bottom, SkPaint* paint);
+    virtual void drawRoundRect(float left, float top, float right, float bottom,
             float rx, float ry, SkPaint* paint);
-    void drawCircle(float x, float y, float radius, SkPaint* paint);
-    void drawOval(float left, float top, float right, float bottom, SkPaint* paint);
-    void drawArc(float left, float top, float right, float bottom,
+    virtual void drawCircle(float x, float y, float radius, SkPaint* paint);
+    virtual void drawOval(float left, float top, float right, float bottom, SkPaint* paint);
+    virtual void drawArc(float left, float top, float right, float bottom,
             float startAngle, float sweepAngle, bool useCenter, SkPaint* paint);
-    void drawPath(SkPath* path, SkPaint* paint);
-    void drawLines(float* points, int count, SkPaint* paint);
-    void drawPoints(float* points, int count, SkPaint* paint);
-    void drawText(const char* text, int bytesCount, int count, float x, float y, SkPaint* paint);
+    virtual void drawPath(SkPath* path, SkPaint* paint);
+    virtual void drawLines(float* points, int count, SkPaint* paint);
+    virtual void drawPoints(float* points, int count, SkPaint* paint);
+    virtual void drawText(const char* text, int bytesCount, int count, float x, float y,
+            SkPaint* paint);
 
-    void resetShader();
-    void setupShader(SkiaShader* shader);
+    virtual void resetShader();
+    virtual void setupShader(SkiaShader* shader);
 
-    void resetColorFilter();
-    void setupColorFilter(SkiaColorFilter* filter);
+    virtual void resetColorFilter();
+    virtual void setupColorFilter(SkiaColorFilter* filter);
 
-    void resetShadow();
-    void setupShadow(float radius, float dx, float dy, int color);
+    virtual void resetShadow();
+    virtual void setupShadow(float radius, float dx, float dy, int color);
 
-    void reset();
+    ANDROID_API void reset();
 
     const SkWriter32& writeStream() const {
         return mWriter;
diff --git a/libs/hwui/LayerRenderer.cpp b/libs/hwui/LayerRenderer.cpp
index 07995085..dfcc5ea 100644
--- a/libs/hwui/LayerRenderer.cpp
+++ b/libs/hwui/LayerRenderer.cpp
@@ -31,6 +31,12 @@
 // Rendering
 ///////////////////////////////////////////////////////////////////////////////
 
+LayerRenderer::LayerRenderer(Layer* layer): mLayer(layer) {
+}
+
+LayerRenderer::~LayerRenderer() {
+}
+
 void LayerRenderer::prepareDirty(float left, float top, float right, float bottom, bool opaque) {
     LAYER_RENDERER_LOGD("Rendering into layer, fbo = %d", mLayer->getFbo());
 
diff --git a/libs/hwui/LayerRenderer.h b/libs/hwui/LayerRenderer.h
index 2246573..6104301 100644
--- a/libs/hwui/LayerRenderer.h
+++ b/libs/hwui/LayerRenderer.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HWUI_LAYER_RENDERER_H
 #define ANDROID_HWUI_LAYER_RENDERER_H
 
+#include <cutils/compiler.h>
+
 #include "OpenGLRenderer.h"
 #include "Layer.h"
 
@@ -42,27 +44,24 @@
 
 class LayerRenderer: public OpenGLRenderer {
 public:
-    LayerRenderer(Layer* layer): mLayer(layer) {
-    }
+    ANDROID_API LayerRenderer(Layer* layer);
+    virtual ~LayerRenderer();
 
-    ~LayerRenderer() {
-    }
+    virtual void prepareDirty(float left, float top, float right, float bottom, bool opaque);
+    virtual void finish();
 
-    void prepareDirty(float left, float top, float right, float bottom, bool opaque);
-    void finish();
+    virtual bool hasLayer();
+    virtual Region* getRegion();
+    virtual GLint getTargetFbo();
 
-    bool hasLayer();
-    Region* getRegion();
-    GLint getTargetFbo();
-
-    static Layer* createTextureLayer(bool isOpaque);
-    static Layer* createLayer(uint32_t width, uint32_t height, bool isOpaque = false);
-    static bool resizeLayer(Layer* layer, uint32_t width, uint32_t height);
-    static void updateTextureLayer(Layer* layer, uint32_t width, uint32_t height,
+    ANDROID_API static Layer* createTextureLayer(bool isOpaque);
+    ANDROID_API static Layer* createLayer(uint32_t width, uint32_t height, bool isOpaque = false);
+    ANDROID_API static bool resizeLayer(Layer* layer, uint32_t width, uint32_t height);
+    ANDROID_API static void updateTextureLayer(Layer* layer, uint32_t width, uint32_t height,
             bool isOpaque, GLenum renderTarget, float* transform);
-    static void destroyLayer(Layer* layer);
-    static void destroyLayerDeferred(Layer* layer);
-    static bool copyLayer(Layer* layer, SkBitmap* bitmap);
+    ANDROID_API static void destroyLayer(Layer* layer);
+    ANDROID_API static void destroyLayerDeferred(Layer* layer);
+    ANDROID_API static bool copyLayer(Layer* layer, SkBitmap* bitmap);
 
 private:
     void generateMesh();
diff --git a/libs/hwui/Matrix.h b/libs/hwui/Matrix.h
index 56fd37d..22220a9 100644
--- a/libs/hwui/Matrix.h
+++ b/libs/hwui/Matrix.h
@@ -19,6 +19,8 @@
 
 #include <SkMatrix.h>
 
+#include <cutils/compiler.h>
+
 #include "Rect.h"
 
 namespace android {
@@ -28,7 +30,7 @@
 // Classes
 ///////////////////////////////////////////////////////////////////////////////
 
-class Matrix4 {
+class ANDROID_API Matrix4 {
 public:
     float data[16];
 
diff --git a/libs/hwui/OpenGLRenderer.h b/libs/hwui/OpenGLRenderer.h
index 14b22b3..2fc88e1 100644
--- a/libs/hwui/OpenGLRenderer.h
+++ b/libs/hwui/OpenGLRenderer.h
@@ -31,6 +31,8 @@
 #include <utils/RefBase.h>
 #include <utils/Vector.h>
 
+#include <cutils/compiler.h>
+
 #include "Debug.h"
 #include "Extensions.h"
 #include "Matrix.h"
@@ -57,12 +59,12 @@
  */
 class OpenGLRenderer {
 public:
-    OpenGLRenderer();
+    ANDROID_API OpenGLRenderer();
     virtual ~OpenGLRenderer();
 
     virtual void setViewport(int width, int height);
 
-    void prepare(bool opaque);
+    ANDROID_API void prepare(bool opaque);
     virtual void prepareDirty(float left, float top, float right, float bottom, bool opaque);
     virtual void finish();
 
@@ -72,7 +74,7 @@
 
     virtual bool callDrawGLFunction(Functor *functor, Rect& dirty);
 
-    int getSaveCount() const;
+    ANDROID_API int getSaveCount() const;
     virtual int save(int flags);
     virtual void restore();
     virtual void restoreToCount(int saveCount);
@@ -87,12 +89,12 @@
     virtual void scale(float sx, float sy);
     virtual void skew(float sx, float sy);
 
-    void getMatrix(SkMatrix* matrix);
+    ANDROID_API void getMatrix(SkMatrix* matrix);
     virtual void setMatrix(SkMatrix* matrix);
     virtual void concatMatrix(SkMatrix* matrix);
 
-    const Rect& getClipBounds();
-    bool quickReject(float left, float top, float right, float bottom);
+    ANDROID_API const Rect& getClipBounds();
+    ANDROID_API bool quickReject(float left, float top, float right, float bottom);
     virtual bool clipRect(float left, float top, float right, float bottom, SkRegion::Op op);
 
     virtual bool drawDisplayList(DisplayList* displayList, uint32_t width, uint32_t height,
diff --git a/libs/hwui/ResourceCache.h b/libs/hwui/ResourceCache.h
index 2a38910..8cf466b 100644
--- a/libs/hwui/ResourceCache.h
+++ b/libs/hwui/ResourceCache.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HWUI_RESOURCE_CACHE_H
 #define ANDROID_HWUI_RESOURCE_CACHE_H
 
+#include <cutils/compiler.h>
+
 #include <SkBitmap.h>
 #include <SkiaColorFilter.h>
 #include <SkiaShader.h>
@@ -49,7 +51,7 @@
     ResourceType resourceType;
 };
 
-class ResourceCache {
+class ANDROID_API ResourceCache {
     KeyedVector<void *, ResourceReference *>* mCache;
 public:
     ResourceCache();
diff --git a/libs/hwui/SkiaColorFilter.h b/libs/hwui/SkiaColorFilter.h
index 1bf475c..2feb834 100644
--- a/libs/hwui/SkiaColorFilter.h
+++ b/libs/hwui/SkiaColorFilter.h
@@ -20,6 +20,8 @@
 #include <GLES2/gl2.h>
 #include <SkColorFilter.h>
 
+#include <cutils/compiler.h>
+
 #include "ProgramCache.h"
 #include "Extensions.h"
 
@@ -45,7 +47,7 @@
         kBlend,
     };
 
-    SkiaColorFilter(SkColorFilter *skFilter, Type type, bool blend);
+    ANDROID_API SkiaColorFilter(SkColorFilter *skFilter, Type type, bool blend);
     virtual ~SkiaColorFilter();
 
     virtual void describe(ProgramDescription& description, const Extensions& extensions) = 0;
@@ -79,7 +81,7 @@
  * A color filter that multiplies the source color with a matrix and adds a vector.
  */
 struct SkiaColorMatrixFilter: public SkiaColorFilter {
-    SkiaColorMatrixFilter(SkColorFilter *skFilter, float* matrix, float* vector);
+    ANDROID_API SkiaColorMatrixFilter(SkColorFilter *skFilter, float* matrix, float* vector);
     ~SkiaColorMatrixFilter();
 
     void describe(ProgramDescription& description, const Extensions& extensions);
@@ -95,7 +97,7 @@
  * another fixed value. Ignores the alpha channel of both arguments.
  */
 struct SkiaLightingFilter: public SkiaColorFilter {
-    SkiaLightingFilter(SkColorFilter *skFilter, int multiply, int add);
+    ANDROID_API SkiaLightingFilter(SkColorFilter *skFilter, int multiply, int add);
 
     void describe(ProgramDescription& description, const Extensions& extensions);
     void setupProgram(Program* program);
@@ -110,7 +112,7 @@
  * and PorterDuff blending mode.
  */
 struct SkiaBlendFilter: public SkiaColorFilter {
-    SkiaBlendFilter(SkColorFilter *skFilter, int color, SkXfermode::Mode mode);
+    ANDROID_API SkiaBlendFilter(SkColorFilter *skFilter, int color, SkXfermode::Mode mode);
 
     void describe(ProgramDescription& description, const Extensions& extensions);
     void setupProgram(Program* program);
diff --git a/libs/hwui/SkiaShader.h b/libs/hwui/SkiaShader.h
index 89dd131..2de9a93 100644
--- a/libs/hwui/SkiaShader.h
+++ b/libs/hwui/SkiaShader.h
@@ -22,6 +22,8 @@
 
 #include <GLES2/gl2.h>
 
+#include <cutils/compiler.h>
+
 #include "Extensions.h"
 #include "ProgramCache.h"
 #include "TextureCache.h"
@@ -52,8 +54,8 @@
         kCompose
     };
 
-    SkiaShader(Type type, SkShader* key, SkShader::TileMode tileX, SkShader::TileMode tileY,
-            SkMatrix* matrix, bool blend);
+    ANDROID_API SkiaShader(Type type, SkShader* key, SkShader::TileMode tileX,
+            SkShader::TileMode tileY, SkMatrix* matrix, bool blend);
     virtual ~SkiaShader();
 
     virtual SkiaShader* copy() = 0;
@@ -139,7 +141,7 @@
  * A shader that draws a bitmap.
  */
 struct SkiaBitmapShader: public SkiaShader {
-    SkiaBitmapShader(SkBitmap* bitmap, SkShader* key, SkShader::TileMode tileX,
+    ANDROID_API SkiaBitmapShader(SkBitmap* bitmap, SkShader* key, SkShader::TileMode tileX,
             SkShader::TileMode tileY, SkMatrix* matrix, bool blend);
     SkiaShader* copy();
 
@@ -169,8 +171,8 @@
  * A shader that draws a linear gradient.
  */
 struct SkiaLinearGradientShader: public SkiaShader {
-    SkiaLinearGradientShader(float* bounds, uint32_t* colors, float* positions, int count,
-            SkShader* key, SkShader::TileMode tileMode, SkMatrix* matrix, bool blend);
+    ANDROID_API SkiaLinearGradientShader(float* bounds, uint32_t* colors, float* positions,
+            int count, SkShader* key, SkShader::TileMode tileMode, SkMatrix* matrix, bool blend);
     ~SkiaLinearGradientShader();
     SkiaShader* copy();
 
@@ -193,8 +195,8 @@
  * A shader that draws a sweep gradient.
  */
 struct SkiaSweepGradientShader: public SkiaShader {
-    SkiaSweepGradientShader(float x, float y, uint32_t* colors, float* positions, int count,
-            SkShader* key, SkMatrix* matrix, bool blend);
+    ANDROID_API SkiaSweepGradientShader(float x, float y, uint32_t* colors, float* positions,
+            int count, SkShader* key, SkMatrix* matrix, bool blend);
     ~SkiaSweepGradientShader();
     SkiaShader* copy();
 
@@ -218,8 +220,9 @@
  * A shader that draws a circular gradient.
  */
 struct SkiaCircularGradientShader: public SkiaSweepGradientShader {
-    SkiaCircularGradientShader(float x, float y, float radius, uint32_t* colors, float* positions,
-            int count, SkShader* key,SkShader::TileMode tileMode, SkMatrix* matrix, bool blend);
+    ANDROID_API SkiaCircularGradientShader(float x, float y, float radius, uint32_t* colors,
+            float* positions, int count, SkShader* key,SkShader::TileMode tileMode,
+            SkMatrix* matrix, bool blend);
     SkiaShader* copy();
 
     void describe(ProgramDescription& description, const Extensions& extensions);
@@ -233,7 +236,8 @@
  * A shader that draws two shaders, composited with an xfermode.
  */
 struct SkiaComposeShader: public SkiaShader {
-    SkiaComposeShader(SkiaShader* first, SkiaShader* second, SkXfermode::Mode mode, SkShader* key);
+    ANDROID_API SkiaComposeShader(SkiaShader* first, SkiaShader* second, SkXfermode::Mode mode,
+            SkShader* key);
     ~SkiaComposeShader();
     SkiaShader* copy();
 
diff --git a/libs/rs/driver/rsdBcc.cpp b/libs/rs/driver/rsdBcc.cpp
index 5fd5c35..269703c 100644
--- a/libs/rs/driver/rsdBcc.cpp
+++ b/libs/rs/driver/rsdBcc.cpp
@@ -226,6 +226,7 @@
     RsdHal * dc = (RsdHal *)mtls->rsc->mHal.drv;
     uint32_t sig = mtls->sig;
 
+    outer_foreach_t fn = dc->mForEachLaunch[sig];
     while (1) {
         uint32_t slice = (uint32_t)android_atomic_inc(&mtls->mSliceNum);
         uint32_t yStart = mtls->yStart + slice * mtls->mSliceSize;
@@ -239,16 +240,10 @@
         //LOGE("usr ptr in %p,  out %p", mtls->ptrIn, mtls->ptrOut);
         for (p.y = yStart; p.y < yEnd; p.y++) {
             uint32_t offset = mtls->dimX * p.y;
-            uint8_t *xPtrOut = mtls->ptrOut + (mtls->eStrideOut * offset);
-            const uint8_t *xPtrIn = mtls->ptrIn + (mtls->eStrideIn * offset);
-
-            for (p.x = mtls->xStart; p.x < mtls->xEnd; p.x++) {
-                p.in = xPtrIn;
-                p.out = xPtrOut;
-                dc->mForEachLaunch[sig](&mtls->script->mHal.info.root, &p);
-                xPtrIn += mtls->eStrideIn;
-                xPtrOut += mtls->eStrideOut;
-            }
+            p.out = mtls->ptrOut + (mtls->eStrideOut * offset);
+            p.in = mtls->ptrIn + (mtls->eStrideIn * offset);
+            fn(&mtls->script->mHal.info.root, &p, mtls->xStart, mtls->xEnd,
+               mtls->eStrideIn, mtls->eStrideOut);
         }
     }
 }
@@ -262,6 +257,7 @@
     RsdHal * dc = (RsdHal *)mtls->rsc->mHal.drv;
     uint32_t sig = mtls->sig;
 
+    outer_foreach_t fn = dc->mForEachLaunch[sig];
     while (1) {
         uint32_t slice = (uint32_t)android_atomic_inc(&mtls->mSliceNum);
         uint32_t xStart = mtls->xStart + slice * mtls->mSliceSize;
@@ -273,15 +269,10 @@
 
         //LOGE("usr idx %i, x %i,%i  y %i,%i", idx, mtls->xStart, mtls->xEnd, yStart, yEnd);
         //LOGE("usr ptr in %p,  out %p", mtls->ptrIn, mtls->ptrOut);
-        uint8_t *xPtrOut = mtls->ptrOut + (mtls->eStrideOut * xStart);
-        const uint8_t *xPtrIn = mtls->ptrIn + (mtls->eStrideIn * xStart);
-        for (p.x = xStart; p.x < xEnd; p.x++) {
-            p.in = xPtrIn;
-            p.out = xPtrOut;
-            dc->mForEachLaunch[sig](&mtls->script->mHal.info.root, &p);
-            xPtrIn += mtls->eStrideIn;
-            xPtrOut += mtls->eStrideOut;
-        }
+        p.out = mtls->ptrOut + (mtls->eStrideOut * xStart);
+        p.in = mtls->ptrIn + (mtls->eStrideIn * xStart);
+        fn(&mtls->script->mHal.info.root, &p, mtls->xStart, mtls->xEnd,
+           mtls->eStrideIn, mtls->eStrideOut);
     }
 }
 
@@ -392,22 +383,17 @@
         uint32_t sig = mtls.sig;
 
         //LOGE("launch 3");
+        outer_foreach_t fn = dc->mForEachLaunch[sig];
         for (p.ar[0] = mtls.arrayStart; p.ar[0] < mtls.arrayEnd; p.ar[0]++) {
             for (p.z = mtls.zStart; p.z < mtls.zEnd; p.z++) {
                 for (p.y = mtls.yStart; p.y < mtls.yEnd; p.y++) {
                     uint32_t offset = mtls.dimX * mtls.dimY * mtls.dimZ * p.ar[0] +
                                       mtls.dimX * mtls.dimY * p.z +
                                       mtls.dimX * p.y;
-                    uint8_t *xPtrOut = mtls.ptrOut + (mtls.eStrideOut * offset);
-                    const uint8_t *xPtrIn = mtls.ptrIn + (mtls.eStrideIn * offset);
-
-                    for (p.x = mtls.xStart; p.x < mtls.xEnd; p.x++) {
-                        p.in = xPtrIn;
-                        p.out = xPtrOut;
-                        dc->mForEachLaunch[sig](&s->mHal.info.root, &p);
-                        xPtrIn += mtls.eStrideIn;
-                        xPtrOut += mtls.eStrideOut;
-                    }
+                    p.out = mtls.ptrOut + (mtls.eStrideOut * offset);
+                    p.in = mtls.ptrIn + (mtls.eStrideIn * offset);
+                    fn(&mtls.script->mHal.info.root, &p, mtls.xStart, mtls.xEnd,
+                       mtls.eStrideIn, mtls.eStrideOut);
                 }
             }
         }
diff --git a/libs/rs/driver/rsdCore.cpp b/libs/rs/driver/rsdCore.cpp
index f8107d9..247f4dc 100644
--- a/libs/rs/driver/rsdCore.cpp
+++ b/libs/rs/driver/rsdCore.cpp
@@ -292,75 +292,136 @@
 }
 
 static void rsdForEach17(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, uint32_t);
     (*(fe*)vRoot)(p->in, p->y);
 }
 
 static void rsdForEach18(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(void *, uint32_t);
     (*(fe*)vRoot)(p->out, p->y);
 }
 
 static void rsdForEach19(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, void *, uint32_t);
     (*(fe*)vRoot)(p->in, p->out, p->y);
 }
 
 static void rsdForEach21(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, const void *, uint32_t);
     (*(fe*)vRoot)(p->in, p->usr, p->y);
 }
 
 static void rsdForEach22(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(void *, const void *, uint32_t);
     (*(fe*)vRoot)(p->out, p->usr, p->y);
 }
 
 static void rsdForEach23(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, void *, const void *, uint32_t);
     (*(fe*)vRoot)(p->in, p->out, p->usr, p->y);
 }
 
 static void rsdForEach25(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, uint32_t, uint32_t);
-    (*(fe*)vRoot)(p->in, p->x, p->y);
+    const uint8_t *pin = (const uint8_t *)p->in;
+    uint32_t y = p->y;
+    for (uint32_t x = x1; x < x2; x++) {
+        (*(fe*)vRoot)(pin, x, y);
+        pin += instep;
+    }
 }
 
 static void rsdForEach26(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(void *, uint32_t, uint32_t);
-    (*(fe*)vRoot)(p->out, p->x, p->y);
+    uint8_t *pout = (uint8_t *)p->out;
+    uint32_t y = p->y;
+    for (uint32_t x = x1; x < x2; x++) {
+        (*(fe*)vRoot)(pout, x, y);
+        pout += outstep;
+    }
 }
 
 static void rsdForEach27(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, void *, uint32_t, uint32_t);
-    (*(fe*)vRoot)(p->in, p->out, p->x, p->y);
+    uint8_t *pout = (uint8_t *)p->out;
+    const uint8_t *pin = (const uint8_t *)p->in;
+    uint32_t y = p->y;
+    for (uint32_t x = x1; x < x2; x++) {
+        (*(fe*)vRoot)(pin, pout, x, y);
+        pin += instep;
+        pout += outstep;
+    }
 }
 
 static void rsdForEach29(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, const void *, uint32_t, uint32_t);
-    (*(fe*)vRoot)(p->in, p->usr, p->x, p->y);
+    const uint8_t *pin = (const uint8_t *)p->in;
+    const void *usr = p->usr;
+    const uint32_t y = p->y;
+    for (uint32_t x = x1; x < x2; x++) {
+        (*(fe*)vRoot)(pin, usr, x, y);
+        pin += instep;
+    }
 }
 
 static void rsdForEach30(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(void *, const void *, uint32_t, uint32_t);
-    (*(fe*)vRoot)(p->out, p->usr, p->x, p->y);
+    uint8_t *pout = (uint8_t *)p->out;
+    const void *usr = p->usr;
+    const uint32_t y = p->y;
+    for (uint32_t x = x1; x < x2; x++) {
+        (*(fe*)vRoot)(pout, usr, x, y);
+        pout += outstep;
+    }
 }
 
 static void rsdForEach31(const void *vRoot,
-        const android::renderscript::RsForEachStubParamStruct *p) {
+        const android::renderscript::RsForEachStubParamStruct *p,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep) {
     typedef void (*fe)(const void *, void *, const void *, uint32_t, uint32_t);
-    (*(fe*)vRoot)(p->in, p->out, p->usr, p->x, p->y);
+    uint8_t *pout = (uint8_t *)p->out;
+    const uint8_t *pin = (const uint8_t *)p->in;
+    const void *usr = p->usr;
+    const uint32_t y = p->y;
+    for (uint32_t x = x1; x < x2; x++) {
+        (*(fe*)vRoot)(pin, pout, usr, x, y);
+        pin += instep;
+        pout += outstep;
+    }
 }
 
 
diff --git a/libs/rs/driver/rsdCore.h b/libs/rs/driver/rsdCore.h
index 159b72a..ce86d11 100644
--- a/libs/rs/driver/rsdCore.h
+++ b/libs/rs/driver/rsdCore.h
@@ -28,7 +28,9 @@
 typedef void (*WorkerCallback_t)(void *usr, uint32_t idx);
 
 typedef void (*outer_foreach_t)(const void *,
-    const android::renderscript::RsForEachStubParamStruct *);
+    const android::renderscript::RsForEachStubParamStruct *,
+                                uint32_t x1, uint32_t x2,
+                                uint32_t instep, uint32_t outstep);
 
 typedef struct RsdSymbolTableRec {
     const char * mName;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index e13464e..72f1282 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -724,8 +724,10 @@
         const uint8_t *data, size_t size) {
     LOGV("onPayloadData mStreamType=0x%02x", mStreamType);
 
-    CHECK(PTS_DTS_flags == 2 || PTS_DTS_flags == 3);
-    int64_t timeUs = mProgram->convertPTSToTimestamp(PTS);
+    int64_t timeUs = 0ll;  // no presentation timestamp available.
+    if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
+        timeUs = mProgram->convertPTSToTimestamp(PTS);
+    }
 
     status_t err = mQueue->appendData(data, size, timeUs);
 
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index f8a1d84..a56da36 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -444,6 +444,10 @@
         }
     }
 
+    if (timeUs == 0ll) {
+        LOGV("Returning 0 timestamp");
+    }
+
     return timeUs;
 }
 
diff --git a/packages/SystemUI/AndroidManifest.xml b/packages/SystemUI/AndroidManifest.xml
index a2452c4..64c54d9 100644
--- a/packages/SystemUI/AndroidManifest.xml
+++ b/packages/SystemUI/AndroidManifest.xml
@@ -101,5 +101,21 @@
             android:taskAffinity="com.android.systemui.net"
             android:excludeFromRecents="true" />
 
+        <!-- started from ... somewhere -->
+        <activity
+            android:name=".Nyandroid"
+            android:exported="true"
+            android:label="Nyandroid"
+            android:icon="@drawable/nyandroid04"
+            android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen"
+            android:hardwareAccelerated="true"
+            android:launchMode="singleInstance"
+            android:excludeFromRecents="true">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.DEFAULT" />
+                <category android:name="android.intent.category.DREAM" />
+            </intent-filter>
+        </activity>
     </application>
 </manifest>
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back.png
index 3adcbec..4a1d37e 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_highlight.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_highlight.png
index d7a591c..9378fac 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_highlight.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_highlight.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png
index 277dcb8..6e84546 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_inout.png
index edc1760..c56905e 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_inout.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png
index fbc6b99..11ffbde 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png
index fb938e8..2bb923e 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png
index 2d35517..783ad175 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png
index fe68c3c..e499f9d 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back.png
index 49411bd..39e3df0 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_highlight.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_highlight.png
index 77924f0..b4920c3 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_highlight.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_highlight.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png
index 000e98b..31c0936 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png
index 62b940a..7e9b752 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png
index 5beb543..3209234d 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png
index f70d315..95c56ed 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png
index be9953f..11b9a93 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png
index de20bdd..0f85ca0 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid00.png b/packages/SystemUI/res/drawable-nodpi/nyandroid00.png
new file mode 100644
index 0000000..6cea873
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid00.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid01.png b/packages/SystemUI/res/drawable-nodpi/nyandroid01.png
new file mode 100644
index 0000000..82b8a21
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid01.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid02.png b/packages/SystemUI/res/drawable-nodpi/nyandroid02.png
new file mode 100644
index 0000000..fde0033
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid02.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid03.png b/packages/SystemUI/res/drawable-nodpi/nyandroid03.png
new file mode 100644
index 0000000..54c5f46
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid03.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid04.png b/packages/SystemUI/res/drawable-nodpi/nyandroid04.png
new file mode 100644
index 0000000..35e5ab5
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid04.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid05.png b/packages/SystemUI/res/drawable-nodpi/nyandroid05.png
new file mode 100644
index 0000000..d3eaace
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid05.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid06.png b/packages/SystemUI/res/drawable-nodpi/nyandroid06.png
new file mode 100644
index 0000000..0e0d3b1
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid06.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid07.png b/packages/SystemUI/res/drawable-nodpi/nyandroid07.png
new file mode 100644
index 0000000..edb0b17
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid07.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid08.png b/packages/SystemUI/res/drawable-nodpi/nyandroid08.png
new file mode 100644
index 0000000..10fc4f6
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid08.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid09.png b/packages/SystemUI/res/drawable-nodpi/nyandroid09.png
new file mode 100644
index 0000000..57ade54
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid09.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid10.png b/packages/SystemUI/res/drawable-nodpi/nyandroid10.png
new file mode 100644
index 0000000..36feb2f
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid10.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/nyandroid11.png b/packages/SystemUI/res/drawable-nodpi/nyandroid11.png
new file mode 100644
index 0000000..125935b
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/nyandroid11.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/star0.png b/packages/SystemUI/res/drawable-nodpi/star0.png
new file mode 100644
index 0000000..f2ca960
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/star0.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/star1.png b/packages/SystemUI/res/drawable-nodpi/star1.png
new file mode 100644
index 0000000..69ef4da
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/star1.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/star2.png b/packages/SystemUI/res/drawable-nodpi/star2.png
new file mode 100644
index 0000000..b95968a
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/star2.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/star3.png b/packages/SystemUI/res/drawable-nodpi/star3.png
new file mode 100644
index 0000000..ad0f589
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/star3.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/star4.png b/packages/SystemUI/res/drawable-nodpi/star4.png
new file mode 100644
index 0000000..934c45b
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/star4.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-nodpi/star5.png b/packages/SystemUI/res/drawable-nodpi/star5.png
new file mode 100644
index 0000000..46a4435
--- /dev/null
+++ b/packages/SystemUI/res/drawable-nodpi/star5.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_in.png
index 8a3d90c..3d67766 100644
--- a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_in.png
+++ b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_inout.png
index 45dda51c..b74e070 100644
--- a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_inout.png
+++ b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_out.png
index 18e019c..24485e1 100644
--- a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_out.png
+++ b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_in.png
index cb8ed3a..390d500 100644
--- a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_in.png
+++ b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_inout.png
index ab4ad05..78998f9 100644
--- a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_inout.png
+++ b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_out.png
index 956b6c1..c539615 100644
--- a/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_out.png
+++ b/packages/SystemUI/res/drawable-sw600dp-hdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_in.png
index 9d95f17..5c38d45 100644
--- a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_in.png
+++ b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_inout.png
index e68d57d..6a79695 100644
--- a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_inout.png
+++ b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_out.png
index 4ac361d9..99dbe1b 100644
--- a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_out.png
+++ b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_in.png
index 5e7ecdc..6a73a89 100644
--- a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_in.png
+++ b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_inout.png
index 462fad4..7042f2b 100644
--- a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_inout.png
+++ b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_out.png
index d284c02..3da781e 100644
--- a/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_out.png
+++ b/packages/SystemUI/res/drawable-sw600dp-mdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_in.png
index 4a5e701..cf63e24 100644
--- a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_in.png
+++ b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_inout.png
index 9a08949..8f68e1f 100644
--- a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_inout.png
+++ b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_out.png
index 314f422..894c63b 100644
--- a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_out.png
+++ b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_in.png
index 4e0a48a..1ec5b49 100644
--- a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_in.png
+++ b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_inout.png
index 4eeae1d..9ca3ca8 100644
--- a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_inout.png
+++ b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_out.png
index 1a6f1ef..74241e0 100644
--- a/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_out.png
+++ b/packages/SystemUI/res/drawable-sw600dp-xhdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_back.png b/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_back.png
index d853993..faeee29 100644
--- a/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_back.png
+++ b/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_back.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_highlight.png b/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_highlight.png
index 2e6e3ac..f7e7102 100644
--- a/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_highlight.png
+++ b/packages/SystemUI/res/drawable-xhdpi/ic_sysbar_highlight.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_in.png
index 2864ec3..cc9c49f 100644
--- a/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_in.png
+++ b/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_inout.png
index 0bb0c72..5a313c5 100644
--- a/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_inout.png
+++ b/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_out.png
index f23dd60..373a4a4 100644
--- a/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_out.png
+++ b/packages/SystemUI/res/drawable-xhdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_in.png
index b1c3168..d299daf 100644
--- a/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_in.png
+++ b/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_inout.png
index 5e41470..dcfdb7b 100644
--- a/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_inout.png
+++ b/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_out.png
index 639842b..fb8125a 100644
--- a/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_out.png
+++ b/packages/SystemUI/res/drawable-xhdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable/notification_row_bg.xml b/packages/SystemUI/res/drawable/notification_row_bg.xml
index dc626d1..1bb2172 100644
--- a/packages/SystemUI/res/drawable/notification_row_bg.xml
+++ b/packages/SystemUI/res/drawable/notification_row_bg.xml
@@ -17,6 +17,6 @@
 <selector xmlns:android="http://schemas.android.com/apk/res/android"
         android:exitFadeDuration="@android:integer/config_mediumAnimTime">
 
-    <item android:state_pressed="true"  android:drawable="@android:color/holo_blue_light" />
+    <item android:state_pressed="true"  android:drawable="@drawable/notification_item_background_color_pressed" />
     <item android:state_pressed="false" android:drawable="@drawable/notification_item_background_color" />
 </selector>
diff --git a/packages/SystemUI/res/drawable/nyandroid_anim.xml b/packages/SystemUI/res/drawable/nyandroid_anim.xml
new file mode 100644
index 0000000..855a0c2
--- /dev/null
+++ b/packages/SystemUI/res/drawable/nyandroid_anim.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<animation-list
+        xmlns:android="http://schemas.android.com/apk/res/android"
+        android:oneshot="false">
+    <item android:drawable="@drawable/nyandroid00" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid01" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid02" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid03" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid04" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid05" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid06" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid07" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid08" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid09" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid10" android:duration="80" />
+    <item android:drawable="@drawable/nyandroid11" android:duration="80" />
+</animation-list>
+
diff --git a/packages/SystemUI/res/drawable/star_anim.xml b/packages/SystemUI/res/drawable/star_anim.xml
new file mode 100644
index 0000000..d7f2d8f
--- /dev/null
+++ b/packages/SystemUI/res/drawable/star_anim.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<animation-list
+        xmlns:android="http://schemas.android.com/apk/res/android"
+        android:oneshot="false">
+    <item android:drawable="@drawable/star0" android:duration="200" />
+    <item android:drawable="@drawable/star1" android:duration="200" />
+    <item android:drawable="@drawable/star2" android:duration="200" />
+    <item android:drawable="@drawable/star3" android:duration="200" />
+    <item android:drawable="@drawable/star4" android:duration="200" />
+    <item android:drawable="@drawable/star5" android:duration="200" />
+</animation-list>
+
diff --git a/packages/SystemUI/res/layout/status_bar_notification_row.xml b/packages/SystemUI/res/layout/status_bar_notification_row.xml
index 3220e62..abbc89a 100644
--- a/packages/SystemUI/res/layout/status_bar_notification_row.xml
+++ b/packages/SystemUI/res/layout/status_bar_notification_row.xml
@@ -41,7 +41,7 @@
         android:layout_width="match_parent"
         android:layout_height="@dimen/notification_divider_height"
         android:layout_alignParentBottom="true"
-        android:background="@drawable/notification_item_background_color"
+        android:background="@drawable/status_bar_notification_row_background_color"
         />
 
 </RelativeLayout>
diff --git a/packages/SystemUI/res/values/colors.xml b/packages/SystemUI/res/values/colors.xml
index 5ba1908..c88d651 100644
--- a/packages/SystemUI/res/values/colors.xml
+++ b/packages/SystemUI/res/values/colors.xml
@@ -19,6 +19,7 @@
 <resources>
     <drawable name="notification_number_text_color">#ffffffff</drawable>
     <drawable name="notification_item_background_color">#ff111111</drawable>
+    <drawable name="notification_item_background_color_pressed">#ff257390</drawable>
     <drawable name="ticker_background_color">#ff1d1d1d</drawable>
     <drawable name="status_bar_background">#ff000000</drawable>
     <drawable name="status_bar_recents_background">#b3000000</drawable>
diff --git a/packages/SystemUI/src/com/android/systemui/Nyandroid.java b/packages/SystemUI/src/com/android/systemui/Nyandroid.java
new file mode 100644
index 0000000..6f168ba
--- /dev/null
+++ b/packages/SystemUI/src/com/android/systemui/Nyandroid.java
@@ -0,0 +1,253 @@
+/*);
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.systemui;
+
+import android.animation.AnimatorSet;
+import android.animation.PropertyValuesHolder;
+import android.animation.ObjectAnimator;
+import android.animation.TimeAnimator;
+import android.app.Activity;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.drawable.AnimationDrawable;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.os.Handler;
+import android.util.AttributeSet;
+import android.util.DisplayMetrics;
+import android.util.Pair;
+import android.view.Gravity;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.WindowManager;
+import android.widget.FrameLayout;
+import android.widget.ImageView;
+import java.util.HashMap;
+import java.util.Random;
+
+public class Nyandroid extends Activity {
+    final static boolean DEBUG = false;
+
+    public static class Board extends FrameLayout
+    {
+        public static final boolean FIXED_STARS = true;
+        public static final int NUM_CATS = 20;
+
+        static Random sRNG = new Random();
+
+        static float lerp(float a, float b, float f) {
+            return (b-a)*f + a;
+        }
+
+        static float randfrange(float a, float b) {
+            return lerp(a, b, sRNG.nextFloat());
+        }
+
+        static int randsign() {
+            return sRNG.nextBoolean() ? 1 : -1;
+        }
+
+        static <E> E pick(E[] array) {
+            if (array.length == 0) return null;
+            return array[sRNG.nextInt(array.length)];
+        }
+
+        public class FlyingCat extends ImageView {
+            public static final float VMAX = 1000.0f;
+            public static final float VMIN = 100.0f;
+
+            public float v, vr;
+
+            public float dist;
+            public float z;
+
+            public ComponentName component;
+
+            public FlyingCat(Context context, AttributeSet as) {
+                super(context, as);
+                setImageResource(R.drawable.nyandroid_anim); // @@@
+
+                if (DEBUG) setBackgroundColor(0x80FF0000);
+            }
+
+            public String toString() {
+                return String.format("<cat (%.1f, %.1f) (%d x %d)>",
+                    getX(), getY(), getWidth(), getHeight());
+            }
+
+            public void reset() {
+                final float scale = lerp(0.1f,2f,z);
+                setScaleX(scale); setScaleY(scale);
+
+                setX(-scale*getWidth()+1);
+                setY(randfrange(0, Board.this.getHeight()-scale*getHeight()));
+                v = lerp(VMIN, VMAX, z);
+
+                dist = 0;
+
+//                android.util.Log.d("Nyandroid", "reset cat: " + this);
+            }
+
+            public void update(float dt) {
+                dist += v * dt;
+                setX(getX() + v * dt);
+            }
+        }
+
+        TimeAnimator mAnim;
+
+        public Board(Context context, AttributeSet as) {
+            super(context, as);
+
+            setLayerType(View.LAYER_TYPE_HARDWARE, null);
+            setSystemUiVisibility(View.SYSTEM_UI_FLAG_LOW_PROFILE | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
+            setBackgroundColor(0xFF003366);
+        }
+
+        private void reset() {
+//            android.util.Log.d("Nyandroid", "board reset");
+            removeAllViews();
+
+            final ViewGroup.LayoutParams wrap = new ViewGroup.LayoutParams(
+                        ViewGroup.LayoutParams.WRAP_CONTENT,
+                        ViewGroup.LayoutParams.WRAP_CONTENT);
+
+            if (FIXED_STARS) {
+                for(int i=0; i<20; i++) {
+                    ImageView fixedStar = new ImageView(getContext(), null);
+                    if (DEBUG) fixedStar.setBackgroundColor(0x8000FF80);
+                    fixedStar.setImageResource(R.drawable.star_anim); // @@@
+                    addView(fixedStar, wrap);
+                    final float scale = randfrange(0.1f, 1f);
+                    fixedStar.setScaleX(scale); fixedStar.setScaleY(scale);
+                    fixedStar.setX(randfrange(0, getWidth()));
+                    fixedStar.setY(randfrange(0, getHeight()));
+                    final AnimationDrawable anim = (AnimationDrawable) fixedStar.getDrawable();
+                    postDelayed(new Runnable() { 
+                        public void run() {
+                            anim.start();
+                        }}, (int) randfrange(0, 1000));
+                }
+            }
+
+            for(int i=0; i<NUM_CATS; i++) {
+                FlyingCat nv = new FlyingCat(getContext(), null);
+                addView(nv, wrap);
+                nv.z = ((float)i/NUM_CATS);
+                nv.z *= nv.z;
+                nv.reset();
+                nv.setX(randfrange(0,Board.this.getWidth()));
+                final AnimationDrawable anim = (AnimationDrawable) nv.getDrawable();
+                postDelayed(new Runnable() { 
+                    public void run() {
+                        anim.start();
+                    }}, (int) randfrange(0, 1000));
+            }
+
+            if (mAnim != null) {
+                mAnim.cancel();
+            }
+            mAnim = new TimeAnimator();
+            mAnim.setTimeListener(new TimeAnimator.TimeListener() {
+                public void onTimeUpdate(TimeAnimator animation, long totalTime, long deltaTime) {
+                    // setRotation(totalTime * 0.01f); // not as cool as you would think
+//                    android.util.Log.d("Nyandroid", "t=" + totalTime);
+
+                    for (int i=0; i<getChildCount(); i++) {
+                        View v = getChildAt(i);
+                        if (!(v instanceof FlyingCat)) continue;
+                        FlyingCat nv = (FlyingCat) v;
+                        nv.update(deltaTime / 1000f);
+                        final float catWidth = nv.getWidth() * nv.getScaleX();
+                        final float catHeight = nv.getHeight() * nv.getScaleY();
+                        if (   nv.getX() + catWidth < -2
+                            || nv.getX() > getWidth() + 2
+                            || nv.getY() + catHeight < -2
+                            || nv.getY() > getHeight() + 2)
+                        {
+                            nv.reset();
+                        }
+                    }
+                }
+            });
+        }
+
+        @Override
+        protected void onSizeChanged (int w, int h, int oldw, int oldh) {
+            super.onSizeChanged(w,h,oldw,oldh);
+//            android.util.Log.d("Nyandroid", "resized: " + w + "x" + h);
+            post(new Runnable() { public void run() { 
+                reset();
+                mAnim.start(); 
+            } });
+        }
+
+
+        @Override
+        protected void onDetachedFromWindow() {
+            super.onDetachedFromWindow();
+            mAnim.cancel();
+        }
+
+        @Override
+        public boolean isOpaque() {
+            return true;
+        }
+    }
+
+    private Board mBoard;
+
+    @Override
+    public void onStart() {
+        super.onStart();
+
+        getWindow().addFlags(
+                  WindowManager.LayoutParams.FLAG_ALLOW_LOCK_WHILE_SCREEN_ON
+                | WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
+                );
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+        mBoard = new Board(this, null);
+        setContentView(mBoard);
+
+        mBoard.setOnSystemUiVisibilityChangeListener(new View.OnSystemUiVisibilityChangeListener() {
+            @Override
+            public void onSystemUiVisibilityChange(int vis) {
+                if (0 == (vis & View.SYSTEM_UI_FLAG_HIDE_NAVIGATION)) {
+                    Nyandroid.this.finish();
+                }
+            }
+        });
+    }
+
+    @Override
+    public void onUserInteraction() {
+//        android.util.Log.d("Nyandroid", "finishing on user interaction");
+        finish();
+    }
+}
diff --git a/services/java/com/android/server/accessibility/TouchExplorer.java b/services/java/com/android/server/accessibility/TouchExplorer.java
index 5875ee3..3c6b416 100644
--- a/services/java/com/android/server/accessibility/TouchExplorer.java
+++ b/services/java/com/android/server/accessibility/TouchExplorer.java
@@ -457,6 +457,9 @@
             case MotionEvent.ACTION_MOVE: {
                 final int activePointerCount = mPointerTracker.getActivePointerCount();
                 switch (activePointerCount) {
+                    case 1: {
+                        // do nothing
+                    } break;
                     case 2: {
                         if (isDraggingGesture(event)) {
                             // If still dragging send a drag event.
@@ -484,10 +487,12 @@
                 }
             } break;
             case MotionEvent.ACTION_POINTER_UP: {
-                mCurrentState = STATE_TOUCH_EXPLORING;
                 // Send an event to the end of the drag gesture.
                 sendMotionEvent(event, MotionEvent.ACTION_UP, pointerIdBits, policyFlags);
              } break;
+            case MotionEvent.ACTION_UP: {
+                mCurrentState = STATE_TOUCH_EXPLORING;
+            } break;
             case MotionEvent.ACTION_CANCEL: {
                 clear();
             } break;
@@ -500,7 +505,7 @@
      * @param event The event to be handled.
      * @param policyFlags The policy flags associated with the event.
      */
-    public void handleMotionEventStateDelegating(MotionEvent event, int policyFlags) {
+    private void handleMotionEventStateDelegating(MotionEvent event, int policyFlags) {
         switch (event.getActionMasked()) {
             case MotionEvent.ACTION_DOWN: {
                 throw new IllegalStateException("Delegating state can only be reached if "
diff --git a/services/java/com/android/server/wm/WindowManagerService.java b/services/java/com/android/server/wm/WindowManagerService.java
index 06a6e98..595c256 100644
--- a/services/java/com/android/server/wm/WindowManagerService.java
+++ b/services/java/com/android/server/wm/WindowManagerService.java
@@ -5230,32 +5230,27 @@
         startFreezingDisplayLocked(inTransaction);
         mInputManager.setDisplayOrientation(0, rotation);
 
-        // NOTE: We disable the rotation in the emulator because
-        //       it doesn't support hardware OpenGL emulation yet.
-        if (CUSTOM_SCREEN_ROTATION && mScreenRotationAnimation != null
-                && mScreenRotationAnimation.hasScreenshot()) {
-            Surface.freezeDisplay(0);
-            if (!inTransaction) {
-                if (SHOW_TRANSACTIONS) Slog.i(TAG,
-                        ">>> OPEN TRANSACTION setRotationUnchecked");
-                Surface.openTransaction();
-            }
-            try {
-                if (mScreenRotationAnimation != null) {
-                    mScreenRotationAnimation.setRotation(rotation);
-                }
-            } finally {
-                if (!inTransaction) {
-                    Surface.closeTransaction();
-                    if (SHOW_TRANSACTIONS) Slog.i(TAG,
-                            "<<< CLOSE TRANSACTION setRotationUnchecked");
-                }
-            }
-            Surface.setOrientation(0, rotation);
-            Surface.unfreezeDisplay(0);
-        } else {
-            Surface.setOrientation(0, rotation);
+        if (!inTransaction) {
+            if (SHOW_TRANSACTIONS) Slog.i(TAG,
+                    ">>> OPEN TRANSACTION setRotationUnchecked");
+            Surface.openTransaction();
         }
+        try {
+            // NOTE: We disable the rotation in the emulator because
+            //       it doesn't support hardware OpenGL emulation yet.
+            if (CUSTOM_SCREEN_ROTATION && mScreenRotationAnimation != null
+                    && mScreenRotationAnimation.hasScreenshot()) {
+                mScreenRotationAnimation.setRotation(rotation);
+            }
+            Surface.setOrientation(0, rotation);
+        } finally {
+            if (!inTransaction) {
+                Surface.closeTransaction();
+                if (SHOW_TRANSACTIONS) Slog.i(TAG,
+                        "<<< CLOSE TRANSACTION setRotationUnchecked");
+            }
+        }
+
         rebuildBlackFrame(inTransaction);
 
         for (int i=mWindows.size()-1; i>=0; i--) {
diff --git a/services/surfaceflinger/SurfaceFlinger.cpp b/services/surfaceflinger/SurfaceFlinger.cpp
index 195ad2e..1441a54 100644
--- a/services/surfaceflinger/SurfaceFlinger.cpp
+++ b/services/surfaceflinger/SurfaceFlinger.cpp
@@ -1234,10 +1234,22 @@
 }
 
 
-void SurfaceFlinger::setTransactionState(const Vector<ComposerState>& state) {
+void SurfaceFlinger::setTransactionState(const Vector<ComposerState>& state,
+        int orientation) {
     Mutex::Autolock _l(mStateLock);
 
     uint32_t flags = 0;
+    if (mCurrentState.orientation != orientation) {
+        if (uint32_t(orientation)<=eOrientation270 || orientation==42) {
+            mCurrentState.orientation = orientation;
+            flags |= eTransactionNeeded;
+            mResizeTransationPending = true;
+        } else if (orientation != eOrientationUnchanged) {
+            LOGW("setTransactionState: ignoring unrecognized orientation: %d",
+                    orientation);
+        }
+    }
+
     const size_t count = state.size();
     for (size_t i=0 ; i<count ; i++) {
         const ComposerState& s(state[i]);
diff --git a/services/surfaceflinger/SurfaceFlinger.h b/services/surfaceflinger/SurfaceFlinger.h
index 1cb9be2..0e642c1 100644
--- a/services/surfaceflinger/SurfaceFlinger.h
+++ b/services/surfaceflinger/SurfaceFlinger.h
@@ -167,7 +167,8 @@
     virtual sp<IGraphicBufferAlloc>     createGraphicBufferAlloc();
     virtual sp<IMemoryHeap>             getCblk() const;
     virtual void                        bootFinished();
-    virtual void                        setTransactionState(const Vector<ComposerState>& state);
+    virtual void                        setTransactionState(const Vector<ComposerState>& state,
+                                                            int orientation);
     virtual status_t                    freezeDisplay(DisplayID dpy, uint32_t flags);
     virtual status_t                    unfreezeDisplay(DisplayID dpy, uint32_t flags);
     virtual int                         setOrientation(DisplayID dpy, int orientation, uint32_t flags);
diff --git a/telephony/java/com/android/internal/telephony/SMSDispatcher.java b/telephony/java/com/android/internal/telephony/SMSDispatcher.java
index e4c6028..a42a267 100644
--- a/telephony/java/com/android/internal/telephony/SMSDispatcher.java
+++ b/telephony/java/com/android/internal/telephony/SMSDispatcher.java
@@ -518,6 +518,7 @@
      * @param address the originating address
      * @param referenceNumber distinguishes concatenated messages from the same sender
      * @param sequenceNumber the order of this segment in the message
+     *          (starting at 0 for CDMA WDP datagrams and 1 for concatenated messages).
      * @param messageCount the number of segments in the message
      * @param timestamp the service center timestamp in millis
      * @param destPort the destination port for the message, or -1 for no destination port
@@ -583,7 +584,11 @@
             for (int i = 0; i < cursorCount; i++) {
                 cursor.moveToNext();
                 int cursorSequence = cursor.getInt(SEQUENCE_COLUMN);
-                pdus[cursorSequence - 1] = HexDump.hexStringToByteArray(
+                // GSM sequence numbers start at 1; CDMA WDP datagram sequence numbers start at 0
+                if (!isCdmaWapPush) {
+                    cursorSequence--;
+                }
+                pdus[cursorSequence] = HexDump.hexStringToByteArray(
                         cursor.getString(PDU_COLUMN));
 
                 // Read the destination port from the first segment (needed for CDMA WAP PDU).
@@ -593,7 +598,12 @@
                 }
             }
             // This one isn't in the DB, so add it
-            pdus[sequenceNumber - 1] = pdu;
+            // GSM sequence numbers start at 1; CDMA WDP datagram sequence numbers start at 0
+            if (isCdmaWapPush) {
+                pdus[sequenceNumber] = pdu;
+            } else {
+                pdus[sequenceNumber - 1] = pdu;
+            }
 
             // Remove the parts from the database
             mResolver.delete(mRawUri, where, whereArgs);
diff --git a/telephony/java/com/android/internal/telephony/cdma/CdmaSMSDispatcher.java b/telephony/java/com/android/internal/telephony/cdma/CdmaSMSDispatcher.java
index fe41e7e..ca8d9ae 100755
--- a/telephony/java/com/android/internal/telephony/cdma/CdmaSMSDispatcher.java
+++ b/telephony/java/com/android/internal/telephony/cdma/CdmaSMSDispatcher.java
@@ -198,13 +198,18 @@
     protected int processCdmaWapPdu(byte[] pdu, int referenceNumber, String address) {
         int index = 0;
 
-        int msgType = pdu[index++];
+        int msgType = (0xFF & pdu[index++]);
         if (msgType != 0) {
             Log.w(TAG, "Received a WAP SMS which is not WDP. Discard.");
             return Intents.RESULT_SMS_HANDLED;
         }
-        int totalSegments = pdu[index++];   // >= 1
-        int segment = pdu[index++];         // >= 0
+        int totalSegments = (0xFF & pdu[index++]);   // >= 1
+        int segment = (0xFF & pdu[index++]);         // >= 0
+
+        if (segment >= totalSegments) {
+            Log.e(TAG, "WDP bad segment #" + segment + " expecting 0-" + (totalSegments - 1));
+            return Intents.RESULT_SMS_HANDLED;
+        }
 
         // Only the first segment contains sourcePort and destination Port
         int sourcePort = 0;
diff --git a/telephony/java/com/android/internal/telephony/gsm/SmsMessage.java b/telephony/java/com/android/internal/telephony/gsm/SmsMessage.java
index ea030e6..2da9642 100644
--- a/telephony/java/com/android/internal/telephony/gsm/SmsMessage.java
+++ b/telephony/java/com/android/internal/telephony/gsm/SmsMessage.java
@@ -300,8 +300,8 @@
                 return null;
             }
             // TP-Data-Coding-Scheme
-            // Class 3, UCS-2 encoding, uncompressed
-            bo.write(0x0b);
+            // UCS-2 encoding, uncompressed
+            bo.write(0x08);
         }
 
         // (no TP-Validity-Period)
diff --git a/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbosync.rs b/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbosync.rs
index b77ccb4..42b1cf1 100644
--- a/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbosync.rs
+++ b/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbosync.rs
@@ -105,8 +105,8 @@
         rsgMeshComputeBoundingBox(info->mMesh,
                                   &minX, &minY, &minZ,
                                   &maxX, &maxY, &maxZ);
-        info->bBoxMin = (minX, minY, minZ);
-        info->bBoxMax = (maxX, maxY, maxZ);
+        info->bBoxMin = (float3){minX, minY, minZ};
+        info->bBoxMax = (float3){maxX, maxY, maxZ};
         gLookAt += (info->bBoxMin + info->bBoxMax)*0.5f;
     }
     gLookAt = gLookAt / (float)size;
diff --git a/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbotest.rs b/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbotest.rs
index d44fd2b..05ef3ac 100644
--- a/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbotest.rs
+++ b/tests/RenderScriptTests/FBOTest/src/com/android/fbotest/fbotest.rs
@@ -104,8 +104,8 @@
         rsgMeshComputeBoundingBox(info->mMesh,
                                   &minX, &minY, &minZ,
                                   &maxX, &maxY, &maxZ);
-        info->bBoxMin = (minX, minY, minZ);
-        info->bBoxMax = (maxX, maxY, maxZ);
+        info->bBoxMin = (float3){minX, minY, minZ};
+        info->bBoxMax = (float3){maxX, maxY, maxZ};
         gLookAt += (info->bBoxMin + info->bBoxMax)*0.5f;
     }
     gLookAt = gLookAt / (float)size;