Merge "Docs: Fixed misspelled word in build.png: "Reources" > "Resources"" into mnc-docs
diff --git a/core/java/android/content/Intent.java b/core/java/android/content/Intent.java
index 77a7c69..234ac50 100644
--- a/core/java/android/content/Intent.java
+++ b/core/java/android/content/Intent.java
@@ -1287,14 +1287,14 @@
             "android.intent.extra.ASSIST_INPUT_DEVICE_ID";
 
     /**
-     * Activity Action: List all available applications
+     * Activity Action: List all available applications.
      * <p>Input: Nothing.
      * <p>Output: nothing.
      */
     @SdkConstant(SdkConstantType.ACTIVITY_INTENT_ACTION)
     public static final String ACTION_ALL_APPS = "android.intent.action.ALL_APPS";
     /**
-     * Activity Action: Show settings for choosing wallpaper
+     * Activity Action: Show settings for choosing wallpaper.
      * <p>Input: Nothing.
      * <p>Output: Nothing.
      */
@@ -1856,7 +1856,7 @@
     /**
      * Broadcast Action: An existing application package has been removed from
      * the device.  The data contains the name of the package.  The package
-     * that is being installed does <em>not</em> receive this Intent.
+     * that is being removed does <em>not</em> receive this Intent.
      * <ul>
      * <li> {@link #EXTRA_UID} containing the integer uid previously assigned
      * to the package.
@@ -1890,9 +1890,9 @@
     public static final String ACTION_PACKAGE_FULLY_REMOVED
             = "android.intent.action.PACKAGE_FULLY_REMOVED";
     /**
-     * Broadcast Action: An existing application package has been changed (e.g.
-     * a component has been enabled or disabled).  The data contains the name of
-     * the package.
+     * Broadcast Action: An existing application package has been changed (for
+     * example, a component has been enabled or disabled).  The data contains
+     * the name of the package.
      * <ul>
      * <li> {@link #EXTRA_UID} containing the integer uid assigned to the package.
      * <li> {@link #EXTRA_CHANGED_COMPONENT_NAME_LIST} containing the class name
@@ -1964,9 +1964,9 @@
     public static final String ACTION_UID_REMOVED = "android.intent.action.UID_REMOVED";
 
     /**
-     * Broadcast Action: Sent to the installer package of an application
-     * when that application is first launched (that is the first time it
-     * is moved out of the stopped state).  The data contains the name of the package.
+     * Broadcast Action: Sent to the installer package of an application when
+     * that application is first launched (that is the first time it is moved
+     * out of the stopped state).  The data contains the name of the package.
      *
      * <p class="note">This is a protected intent that can only be sent
      * by the system.
@@ -1994,8 +1994,9 @@
     public static final String ACTION_PACKAGE_VERIFIED = "android.intent.action.PACKAGE_VERIFIED";
 
     /**
-     * Broadcast Action: Sent to the system intent filter verifier when an intent filter
-     * needs to be verified. The data contains the filter data hosts to be verified against.
+     * Broadcast Action: Sent to the system intent filter verifier when an
+     * intent filter needs to be verified. The data contains the filter data
+     * hosts to be verified against.
      * <p class="note">
      * This is a protected intent that can only be sent by the system.
      * </p>
diff --git a/core/java/android/nfc/tech/NfcBarcode.java b/core/java/android/nfc/tech/NfcBarcode.java
index 8901f28..421ba78 100644
--- a/core/java/android/nfc/tech/NfcBarcode.java
+++ b/core/java/android/nfc/tech/NfcBarcode.java
@@ -113,10 +113,10 @@
      * <p>Does not cause any RF activity and does not block.
      *
      * @return a byte array containing the barcode
-     * @see <a href="http://www.kovio.com/docs/kovionfcbarcode.pdf">
-     *      Kovio 128-bit NFC barcode datasheet</a>
-     * @see <a href="http://kovio.com/docs/kovio-128-nfc-barcode-data-format.pdf">
-     *      Kovio 128-bit NFC barcode data format</a>
+     * @see <a href="http://www.thinfilm.no/docs/thinfilm-nfc-barcode-datasheet.pdf">
+     *      Thinfilm NFC Barcode tag specification (previously Kovio NFC Barcode)</a>
+     * @see <a href="http://www.thinfilm.no/docs/thinfilm-nfc-barcode-data-format.pdf">
+     *      Thinfilm NFC Barcode data format (previously Kovio NFC Barcode)</a>
      */
     public byte[] getBarcode() {
         switch (mType) {
diff --git a/docs/html/about/dashboards/index.jd b/docs/html/about/dashboards/index.jd
index 471dc07..2eecd45 100644
--- a/docs/html/about/dashboards/index.jd
+++ b/docs/html/about/dashboards/index.jd
@@ -59,7 +59,7 @@
 </div>
 
 
-<p style="clear:both"><em>Data collected during a 7-day period ending on January 4, 2016.
+<p style="clear:both"><em>Data collected during a 7-day period ending on February 1, 2016.
 <br/>Any versions with less than 0.1% distribution are not shown.</em>
 </p>
 
@@ -90,7 +90,7 @@
 </div>
 
 
-<p style="clear:both"><em>Data collected during a 7-day period ending on January 4, 2016.
+<p style="clear:both"><em>Data collected during a 7-day period ending on February 1, 2016.
 
 <br/>Any screen configurations with less than 0.1% distribution are not shown.</em></p>
 
@@ -110,7 +110,7 @@
 
 
 <img alt="" style="float:right"
-src="//chart.googleapis.com/chart?chl=GL%202.0%7CGL%203.0%7CGL%203.1&chf=bg%2Cs%2C00000000&chd=t%3A54.3%2C38.9%2C6.8&chco=c4df9b%2C6fad0c&cht=p&chs=400x250">
+src="//chart.googleapis.com/chart?chl=GL%202.0%7CGL%203.0%7CGL%203.1&chf=bg%2Cs%2C00000000&chd=t%3A53.2%2C39.7%2C7.1&chco=c4df9b%2C6fad0c&cht=p&chs=400x250">
 
 <p>To declare which version of OpenGL ES your application requires, you should use the {@code
 android:glEsVersion} attribute of the <a
@@ -128,21 +128,21 @@
 </tr>
 <tr>
 <td>2.0</td>
-<td>54.3%</td>
+<td>53.2%</td>
 </tr>
 <tr>
 <td>3.0</td>
-<td>38.9%</td>
+<td>39.7%</td>
 </tr>
 <tr>
 <td>3.1</td>
-<td>6.8%</td>
+<td>7.1%</td>
 </tr>
 </table>
 
 
 
-<p style="clear:both"><em>Data collected during a 7-day period ending on January 4, 2016</em></p>
+<p style="clear:both"><em>Data collected during a 7-day period ending on February 1, 2016</em></p>
 
 
 
@@ -156,19 +156,19 @@
       "Large": {
         "hdpi": "0.6",
         "ldpi": "0.3",
-        "mdpi": "5.4",
-        "tvdpi": "2.5",
-        "xhdpi": "0.6"
+        "mdpi": "5.0",
+        "tvdpi": "2.3",
+        "xhdpi": "0.5"
       },
       "Normal": {
-        "hdpi": "41.1",
-        "mdpi": "5.5",
+        "hdpi": "41.5",
+        "mdpi": "5.1",
         "tvdpi": "0.1",
-        "xhdpi": "22.1",
-        "xxhdpi": "14.7"
+        "xhdpi": "22.9",
+        "xxhdpi": "14.8"
       },
       "Small": {
-        "ldpi": "2.6"
+        "ldpi": "2.4"
       },
       "Xlarge": {
         "hdpi": "0.3",
@@ -176,8 +176,8 @@
         "xhdpi": "0.7"
       }
     },
-    "densitychart": "//chart.googleapis.com/chart?chd=t%3A2.9%2C14.4%2C2.6%2C42.0%2C23.4%2C14.7&chf=bg%2Cs%2C00000000&chco=c4df9b%2C6fad0c&chl=ldpi%7Cmdpi%7Ctvdpi%7Chdpi%7Cxhdpi%7Cxxhdpi&cht=p&chs=400x250",
-    "layoutchart": "//chart.googleapis.com/chart?chd=t%3A4.5%2C9.4%2C83.5%2C2.6&chf=bg%2Cs%2C00000000&chco=c4df9b%2C6fad0c&chl=Xlarge%7CLarge%7CNormal%7CSmall&cht=p&chs=400x250"
+    "densitychart": "//chart.googleapis.com/chart?cht=p&chs=400x250&chl=ldpi%7Cmdpi%7Ctvdpi%7Chdpi%7Cxhdpi%7Cxxhdpi&chco=c4df9b%2C6fad0c&chd=t%3A2.7%2C13.6%2C2.4%2C42.4%2C24.1%2C14.8&chf=bg%2Cs%2C00000000",
+    "layoutchart": "//chart.googleapis.com/chart?cht=p&chs=400x250&chl=Xlarge%7CLarge%7CNormal%7CSmall&chco=c4df9b%2C6fad0c&chd=t%3A4.5%2C8.7%2C84.4%2C2.4&chf=bg%2Cs%2C00000000"
   }
 ];
 
@@ -185,57 +185,57 @@
 var VERSION_DATA =
 [
   {
-    "chart": "//chart.googleapis.com/chart?chd=t%3A0.2%2C3.0%2C2.7%2C24.7%2C36.1%2C32.6%2C0.7&chf=bg%2Cs%2C00000000&chco=c4df9b%2C6fad0c&chl=Froyo%7CGingerbread%7CIce%20Cream%20Sandwich%7CJelly%20Bean%7CKitKat%7CLollipop%7CMarshmallow&cht=p&chs=500x250",
+    "chart": "//chart.googleapis.com/chart?cht=p&chs=500x250&chl=Froyo%7CGingerbread%7CIce%20Cream%20Sandwich%7CJelly%20Bean%7CKitKat%7CLollipop%7CMarshmallow&chco=c4df9b%2C6fad0c&chd=t%3A0.1%2C2.7%2C2.5%2C23.9%2C35.5%2C34.1%2C1.2&chf=bg%2Cs%2C00000000",
     "data": [
       {
         "api": 8,
         "name": "Froyo",
-        "perc": "0.2"
+        "perc": "0.1"
       },
       {
         "api": 10,
         "name": "Gingerbread",
-        "perc": "3.0"
+        "perc": "2.7"
       },
       {
         "api": 15,
         "name": "Ice Cream Sandwich",
-        "perc": "2.7"
+        "perc": "2.5"
       },
       {
         "api": 16,
         "name": "Jelly Bean",
-        "perc": "9.0"
+        "perc": "8.8"
       },
       {
         "api": 17,
         "name": "Jelly Bean",
-        "perc": "12.2"
+        "perc": "11.7"
       },
       {
         "api": 18,
         "name": "Jelly Bean",
-        "perc": "3.5"
+        "perc": "3.4"
       },
       {
         "api": 19,
         "name": "KitKat",
-        "perc": "36.1"
+        "perc": "35.5"
       },
       {
         "api": 21,
         "name": "Lollipop",
-        "perc": "16.9"
+        "perc": "17.0"
       },
       {
         "api": 22,
         "name": "Lollipop",
-        "perc": "15.7"
+        "perc": "17.1"
       },
       {
         "api": 23,
         "name": "Marshmallow",
-        "perc": "0.7"
+        "perc": "1.2"
       }
     ]
   }
diff --git a/docs/html/auto/images/assets/landing/01.gif b/docs/html/auto/images/assets/landing/01.gif
index 7a75f87..1433463 100644
--- a/docs/html/auto/images/assets/landing/01.gif
+++ b/docs/html/auto/images/assets/landing/01.gif
Binary files differ
diff --git a/docs/html/auto/images/assets/landing/02.gif b/docs/html/auto/images/assets/landing/02.gif
index 655c5bc..c0f9e52 100644
--- a/docs/html/auto/images/assets/landing/02.gif
+++ b/docs/html/auto/images/assets/landing/02.gif
Binary files differ
diff --git a/docs/html/auto/images/assets/landing/03.gif b/docs/html/auto/images/assets/landing/03.gif
index 195315a..7dacf59 100644
--- a/docs/html/auto/images/assets/landing/03.gif
+++ b/docs/html/auto/images/assets/landing/03.gif
Binary files differ
diff --git a/docs/html/auto/images/assets/landing/04.png b/docs/html/auto/images/assets/landing/04.png
index aecdb58..2e0df02 100644
--- a/docs/html/auto/images/assets/landing/04.png
+++ b/docs/html/auto/images/assets/landing/04.png
Binary files differ
diff --git a/docs/html/auto/images/assets/landing/05.png b/docs/html/auto/images/assets/landing/05.png
index a3baba4..564a6fd 100644
--- a/docs/html/auto/images/assets/landing/05.png
+++ b/docs/html/auto/images/assets/landing/05.png
Binary files differ
diff --git a/docs/html/auto/images/assets/landing/06.png b/docs/html/auto/images/assets/landing/06.png
index fe74a49..dcbba9a 100644
--- a/docs/html/auto/images/assets/landing/06.png
+++ b/docs/html/auto/images/assets/landing/06.png
Binary files differ
diff --git a/docs/html/google/play/billing/billing_best_practices.jd b/docs/html/google/play/billing/billing_best_practices.jd
index 9476ffb..70084b8 100644
--- a/docs/html/google/play/billing/billing_best_practices.jd
+++ b/docs/html/google/play/billing/billing_best_practices.jd
@@ -100,6 +100,12 @@
 made the purchase, so that you can later verify that this is a legitimate purchase by
 that user. For consumable items, you can use a randomly generated string, but for non-
 consumable items you should use a string that uniquely identifies the user.</p>
+
+<p class="note">
+  <strong>Note:</strong> Do not use the user's
+  email address in the payload string, since that address may change.
+</p>
+
 <p>When you get back the response from Google Play, make sure to verify that the
 developer payload string matches the token that you sent previously with the purchase
 request. As a further security precaution, you should perform the verification on your
diff --git a/docs/html/images/training/tv/playback/guided-step-screen-2x.png b/docs/html/images/training/tv/playback/guided-step-screen-2x.png
new file mode 100644
index 0000000..e13d97a
--- /dev/null
+++ b/docs/html/images/training/tv/playback/guided-step-screen-2x.png
Binary files differ
diff --git a/docs/html/images/training/tv/playback/guided-step-screen.png b/docs/html/images/training/tv/playback/guided-step-screen.png
new file mode 100644
index 0000000..3025fe1
--- /dev/null
+++ b/docs/html/images/training/tv/playback/guided-step-screen.png
Binary files differ
diff --git a/docs/html/ndk/downloads/index.jd b/docs/html/ndk/downloads/index.jd
index f674993..03eef03 100644
--- a/docs/html/ndk/downloads/index.jd
+++ b/docs/html/ndk/downloads/index.jd
@@ -314,9 +314,6 @@
   NDK Revision History.</a></p>
 
 
-<h2 id="Downloads">Downloading</h2>
-
-
 <script>
 $('#Downloads').after($('#download-table'));
 </script>
diff --git a/docs/html/ndk/guides/audio/basics.jd b/docs/html/ndk/guides/audio/basics.jd
new file mode 100644
index 0000000..a5f0ff5
--- /dev/null
+++ b/docs/html/ndk/guides/audio/basics.jd
@@ -0,0 +1,125 @@
+page.title=OpenSL ES™ Basics
+@jd:body
+
+<div id="qv-wrapper">
+    <div id="qv">
+      <h2>On this page</h2>
+
+      <ol>
+        <li><a href="#adding">Adding OpenSL ES to Your App</a></li>
+        <li><a href="#building">Building and Debugging</a></li>
+        <li><a href="#samples">Samples</a></li>
+      </ol>
+    </div>
+  </div>
+
+
+<p>
+The Khronos Group's OpenSL ES standard exposes audio features
+similar to those in the {@link android.media.MediaPlayer} and {@link android.media.MediaRecorder}
+APIs in the Android Java framework. OpenSL ES provides a C language interface as well as
+C++ bindings, allowing you to call it from code written in either language.
+</p>
+
+<p>
+This page describes how to add these audio APIs into your app's source code, and how to incorporate
+them into the build process.
+</p>
+
+<h2 id="adding">Adding OpenSL ES to your App</h2>
+
+<p>
+You can call OpenSL ES from both C and C++ code. To add the core OpenSL ES
+feature set to your app, include the {@code OpenSLES.h} header file:
+
+</p>
+<pre>
+#include &lt;SLES/OpenSLES.h&gt;
+</pre>
+
+<p>
+To add the OpenSL ES <a href="{@docRoot}ndk/guides/audio/opensl-for-android.html#ae">
+Android extensions</a> as well, include the {@code OpenSLES_Android.h} header file:
+</p>
+<pre>
+#include &lt;SLES/OpenSLES_Android.h&gt;
+</pre>
+
+
+<h2 id="building">Building and Debugging</h2>
+
+<p>
+You can incorporate OpenSL ES into your build by specifying it in the
+<a href="{@docRoot}ndk/guides/android_mk.html">{@code Android.mk}</a> file that serves as one of the
+NDK build system's makefiles. Add the following line to
+<a href="{@docRoot}ndk/guides/android_mk.html">{@code Android.mk}</a>:
+</p>
+
+<pre>
+LOCAL_LDLIBS += -lOpenSLES
+</pre>
+
+<p>
+For robust debugging, we recommend that you examine the {@code SLresult} value that most of
+the OpenSL ES APIs return. You can use
+<a class="external-link" href="http://en.wikipedia.org/wiki/Assertion_(computing)">asserts</a>
+or more advanced error-handling logic for debugging; neither offers
+an inherent advantage for working with OpenSL ES, although one or the other might be more suitable
+for a given use case.
+</p>
+
+<p>
+We use asserts in our <a href="https://github.com/googlesamples/android-ndk">examples</a>, because
+they help catch unrealistic conditions that would indicate a coding error. We have used explicit
+error handling for other conditions more likely to occur in production.
+</p>
+
+<p>
+Many API errors result in a log entry, in addition to a non-zero result code. Such log entries
+can provide additional detail that proves especially useful for relatively complex APIs such as
+<a class="external-link" href="https://www.khronos.org/registry/sles/specs/OpenSL_ES_Specification_1.1.pdf">
+{@code Engine::CreateAudioPlayer}</a>.
+</p>
+
+<p>
+You can view the log either from the command line or from Android Studio. To examine the log from
+the command line, type the following:
+</p>
+
+<pre class="no-pretty-print">
+$ adb logcat
+</pre>
+
+<p>
+To examine the log from Android Studio, either click the <em>Logcat</em> tab in the
+<a href="{@docRoot}tools/debugging/debugging-studio.html#runDebug"><em>Debug</em></a>
+window, or click the <em>Devices | logcat</em> tab in the
+<a href="{@docRoot}tools/debugging/debugging-studio.html#systemLogView"><em>Android DDMS</em></a>
+window.
+</p>
+
+<h2 id="samples">Samples</h2>
+
+<p>
+Supported and tested example code that you can use as a model for your own code resides both locally
+and on GitHub. The local examples are located in
+{@code platforms/android-9/samples/native-audio/}, under your NDK root installation directory.
+On GitHub, they are available from the
+<a class="external-link" href="https://github.com/googlesamples/android-ndk">{@code android-ndk}</a>
+repository, in the
+<a class="external-link" href="https://github.com/googlesamples/android-ndk/tree/master/audio-echo">
+{@code audio-echo}</a> and
+<a class="external-link" href="https://github.com/googlesamples/android-ndk/tree/master/native-audio">
+{@code native-audio}</a> directories.
+</p>
+<p>The Android NDK implementation of OpenSL ES differs
+from the reference specification for OpenSL ES 1.0.1 in a number of respects.
+These differences are an important reason as to why sample code that
+you copy directly from the OpenSL ES reference specification may not work in your
+Android app.
+</p>
+<p>
+For more information on differences between the reference specification and the
+Android implementation, see
+<a href="{@docRoot}ndk/guides/audio/opensl-for-android.html">
+OpenSL ES™ for Android</a>.
diff --git a/docs/html/ndk/guides/audio/index.jd b/docs/html/ndk/guides/audio/index.jd
new file mode 100644
index 0000000..ac6e539
--- /dev/null
+++ b/docs/html/ndk/guides/audio/index.jd
@@ -0,0 +1,15 @@
+page.title=NDK Audio: OpenSL ES&#8482
+@jd:body
+
+<p>The NDK package includes an Android-specific implementation of the
+<a href="https://www.khronos.org/opensles/">OpenSL ES</a> API
+specification from the <a href="https://www.khronos.org">Khronos Group</a>. This library
+allows you to use C or C++ to implement high-performance, low-latency audio in your game or other
+demanding app.</p>
+
+<p>This section begins by providing some
+<a href="{@docRoot}ndk/guides/audio/basics.html">basic information</a> about the API, including how
+to incorporate it into your app. It then explains what you need to know about the
+<a href="{@docRoot}ndk/guides/audio/opensl-for-android.html">Android-specific implementation</a>
+of OpenSL ES, focusing on differences between this implementation and the reference specification.
+</p>
diff --git a/docs/html/ndk/guides/audio/opensl-for-android.jd b/docs/html/ndk/guides/audio/opensl-for-android.jd
new file mode 100644
index 0000000..763da5a
--- /dev/null
+++ b/docs/html/ndk/guides/audio/opensl-for-android.jd
@@ -0,0 +1,881 @@
+page.title=Native Audio: OpenSL ES&#8482; for Android
+@jd:body
+
+<div id="qv-wrapper">
+    <div id="qv">
+      <h2>On this page</h2>
+
+      <ol>
+        <li><a href="#inherited">Features Inherited from the Reference Specification</a></li>
+        <li><a href="#ae">Android Extensions</a></li>
+      </ol>
+    </div>
+  </div>
+
+<p>
+This page provides details about how the NDK implementation of OpenSL ES™ differs
+from the reference specification for OpenSL ES 1.0.1. When using sample code from the
+specification, you may need to modify it to work on Android.
+</p>
+
+<h2 id="inherited">Features Inherited from the Reference Specification</h2>
+
+<p>
+The Android NDK implementation of OpenSL ES inherits much of the feature set from
+the reference specification, although with certain limitations.
+</p>
+
+<h3>Global entry points</h3>
+
+<p>
+OpenSL ES for Android supports all of the global entry points in the Android specification.
+These entry points include:
+</p>
+
+<ul>
+<li>{@code slCreateEngine}
+</li>
+<li>{@code slQueryNumSupportedEngineInterfaces}</code>
+</li>
+<li>{@code slQuerySupportedEngineInterfaces}</code>
+</li>
+</ul>
+
+<h3>Objects and interfaces</h3>
+
+<p>
+Table 1 shows which objects and interfaces the Android NDK implementation of
+OpenSL ES supports. Green cells indicate features available in this implementation.
+</p>
+
+<p class="table-caption" id="Objects-and-interfaces">
+  <strong>Table 1.</strong> Android NDK support for objects and interfaces.</p>
+<table>
+  <tr>
+    <th scope="col">Feature</th>
+    <th scope="col">Audio player</th>
+    <th scope="col">Audio recorder</th>
+    <th scope="col">Engine</th>
+    <th scope="col">Output mix</th>
+  </tr>
+  <tr>
+    <td>Bass boost</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Buffer queue</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Dynamic interface management</td>
+    <td>Yes</td>
+    <td>Yes</td>
+    <td>Yes</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Effect send</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Engine</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Environmental reverb</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Equalizer</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Metadata extraction</td>
+    <td>Yes: Decode to PCM</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Mute solo</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Object</td>
+    <td>Yes</td>
+    <td>Yes</td>
+    <td>Yes</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Play</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Playback rate</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Prefetch status</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Preset reverb</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Record</td>
+    <td>No</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Seek</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Virtualizer</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Volume</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Buffer queue data locator</td>
+    <td>Yes: Source</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>I/O device data locator</td>
+    <td>No</td>
+    <td>Yes: Source</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Output mix locator</td>
+    <td>Yes: Sink</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>URI data locator</td>
+    <td>Yes: Source</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  </table>
+
+The next section explains limitations of some of these features.
+
+<h3>Limitations</h3>
+
+<p>
+Certain limitations apply to the features in Table 1. These limitations
+represent differences from the reference specification. The rest of this section provides
+information about these differences.</p>
+
+<h4>Dynamic interface management</h4>
+
+<p>
+OpenSL ES for Android does not support {@code RemoveInterface} or
+{@code ResumeInterface}.
+</p>
+
+<h4>Effect combinations: environment reverb and preset reverb</h4>
+
+<p>
+You cannot have both environmental reverb and preset reverb on the same output mix.
+</p>
+<p>
+The platform might ignore effect requests if it estimates that the
+CPU load would be too high.
+</p>
+
+<h4>Effect send</h4>
+
+<p>
+<code>SetSendLevel()</code> supports a single send level per audio player.
+</p>
+
+<h4>Environmental reverb</h4>
+
+<p>
+Environmental reverb does not support the <code>reflectionsDelay</code>,
+<code>reflectionsLevel</code>, or <code>reverbDelay</code> fields of
+the <code>SLEnvironmentalReverbSettings</code> struct.
+</p>
+
+<h4>MIME data format</h4>
+
+<p>
+You can use the MIME data format only with the URI data locator, and only for an audio
+player. You cannot use this data format for an audio recorder.
+</p>
+<p>
+The Android implementation of OpenSL ES requires you to initialize <code>mimeType</code>
+to either <code>NULL</code> or a valid UTF-8 string. You must also initialize
+<code>containerType</code> to a valid value.
+In the absence of other considerations, such as portability to other
+implementations, or content format that an app cannot identify by header,
+we recommend that you
+set <code>mimeType</code> to <code>NULL</code> and <code>containerType</code>
+to <code>SL_CONTAINERTYPE_UNSPECIFIED</code>.
+</p>
+<p>
+OpenSL ES for Android supports the following audio formats, so long as the
+Android platform supports them as well:</p>
+
+<ul>
+<li>WAV PCM</li>
+<li>WAV alaw</li>
+<li>WAV ulaw</li>
+<li>MP3 Ogg Vorbis</li>
+<li>AAC LC</li>
+<li>HE-AACv1 (AAC+)</li>
+<li>HE-AACv2 (enhanced AAC+)</li>
+<li>AMR</li>
+<li>FLAC</li>
+</ul>
+
+<p>
+For a list of audio formats that Android supports, see
+<a href="{@docRoot}guide/appendix/media-formats.html">Supported Media Formats</a>.
+</p>
+
+<p>
+The following limitations apply to handling of these and other formats in this
+implementation of OpenSL ES:
+</p>
+
+<ul>
+<li>AAC formats must be reside within an MP4 or ADTS container.</li>
+<li>OpenSL ES for Android does not support MIDI.</li>
+<li>WMA is not part of <a class="external-link" href="https://source.android.com/">AOSP</a>, and we
+have not verified its compatibility with OpenSL ES for Android.</li>
+<li>The Android NDK implementation of OpenSL ES does not support direct
+playback of DRM or encrypted content. To play back protected audio content, you must
+decrypt it in your application before playing, with your app enforcing any DRM
+restrictions.</li>
+</ul>
+
+<h4>Object-related methods</h4>
+
+<p>
+OpenSL ES for Android does not support the following methods for manipulating objects:
+</p>
+
+<ul>
+<li>{@code Resume()}</li>
+<li>{@code RegisterCallback()}</li>
+<li>{@code AbortAsyncOperation()}</li>
+<li>{@code SetPriority()}</li>
+<li>{@code GetPriority()}</li>
+<li>{@code SetLossOfControlInterfaces()}</li>
+</ul>
+
+<h4>PCM data format</h4>
+
+<p>
+PCM is the only data format you can use with buffer queues. Supported PCM
+playback configurations have the following characteristics:
+</p>
+
+<ul>
+<li>8-bit unsigned or 16-bit signed.</li>
+<li>Mono or stereo.</li>
+<li>Little-endian byte ordering.</li>
+<li>Sample rates of: 8,000, 11,025, 12,000, 16,000, 22,050, 24,000, 32,000, 44,100, or
+48,000 Hz.</li>
+</ul>
+
+<p>
+The configurations that OpenSL ES for Android supports for recording are
+device-dependent; usually, 16,000 Hz mono 16-bit signed is available regardless of device.
+</p>
+<p>
+The value of the <code>samplesPerSec</code> field is in units of milliHz, despite the misleading
+name. To avoid accidentally using the wrong value, we recommend that you initialize this field using
+one of the symbolic constants defined for this purpose, such as {@code SL_SAMPLINGRATE_44_1}.
+</p>
+<p>
+Android 5.0 (API level 21) and above support <a href="#fp">floating-point data</a>.
+</p>
+
+<h4>Playback rate</h4>
+
+<p>
+An OpenSL ES <i>playback rate</i> indicates the speed at which an
+object presents data, expressed in thousandths of normal speed, or <i>per mille</i>. For example,
+a playback rate of 1,000 per mille is 1,000/1,000, or normal speed.
+A <i>rate range</i> is a closed interval that expresses possible rate ranges.
+</p>
+
+<p>
+Support for playback-rate ranges and other capabilities may vary depending
+on the platform version and implementation. Your app can determine these capabilities at runtime by
+using <code>PlaybackRate::GetRateRange()</code> or
+<code>PlaybackRate::GetCapabilitiesOfRate()</code> to query the device.
+</p>
+
+<p>
+A device typically supports the same rate range for a data source in PCM format, and a unity rate
+range of 1000 per mille to 1000 per mille for other formats: that is, the unity rate range is
+effectively a single value.
+</p>
+
+<h4>Record</h4>
+
+<p>
+OpenSL ES for Android does not support the <code>SL_RECORDEVENT_HEADATLIMIT</code>
+or <code>SL_RECORDEVENT_HEADMOVING</code> events.
+</p>
+
+<h4>Seek</h4>
+
+<p>
+The <code>SetLoop()</code> method enables whole-file looping. To enable looping,
+set the <code>startPos</code> parameter to 0, and the value of the <code>endPos</code> parameter
+to <code>SL_TIME_UNKNOWN</code>.
+</p>
+
+<h4>Buffer queue data locator</h4>
+
+<p>
+An audio player or recorder with a data locator for a buffer queue supports PCM data format only.
+</p>
+
+<h4>I/O Device data locator</h4>
+
+<p>
+OpenSL ES for Android only supports use of an I/O device data locator when you have
+specified the locator as the data source for <code>Engine::CreateAudioRecorder()</code>.
+Initialize the device data locator using the values contained in the following code snippet.
+</p>
+
+<pre>
+SLDataLocator_IODevice loc_dev =
+  {SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
+  SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
+</pre>
+
+<h4>URI data locator</h4>
+
+<p>
+OpenSL ES for Android can only use the URI data locator with MIME data format,
+and only for an audio player. You cannot use this data format for an audio recorder. It supports
+{@code http:} and {@code file:} schemes. It does not support other schemes, such as {@code https:},
+{@code ftp:}, or
+{@code content:}.
+</p>
+
+<p>
+We have not verified support for {@code rtsp:} with audio on the Android platform.
+</p>
+
+<h2 id="ae">Android Extensions</h2>
+
+<p>
+OpenSL ES for Android extends the reference OpenSL ES specification to make it compatible with
+Android, and to take advantage of the power and flexibility of the Android platform.
+</p>
+
+<p>
+The definition of the API for the Android extensions resides in <code>OpenSLES_Android.h</code>
+and the header files that it includes. Consult {@code OpenSLES_Android.h}
+for details about these extensions. This file is located under your installation root, in the
+{@code platforms/android-&lt;version&gt;/&lt;abi&gt;/include/SLES} directory. Unless otherwise
+noted, all interfaces are explicit.
+</p>
+
+<p>
+These extensions limit your application's portability to
+other OpenSL ES implementations, because they are Android-specific. You can mitigate this issue by
+avoiding use of the extensions or by using {@code #ifdef} to exclude them at compile time.
+</p>
+
+<p>
+Table 2 shows the Android-specific interfaces and data locators that Android OpenSL ES supports
+for each object type. Green cells indicate interfaces and data locators available for each
+object type.
+</p>
+
+<p class="table-caption" id="Android-extensions">
+  <strong>Table 2.</strong> Interfaces and data locators, by object type.</p>
+<table>
+  <tr>
+    <th scope="col">Feature</th>
+    <th scope="col">Audio player</th>
+    <th scope="col">Audio recorder</th>
+    <th scope="col">Engine</th>
+    <th scope="col">Output mix</th>
+  </tr>
+  <tr>
+    <td>Android buffer queue</td>
+    <td>Yes: Source (decode)</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android configuration</td>
+    <td>Yes</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android effect</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+  </tr>
+  <tr>
+    <td>Android effect capabilities</td>
+    <td>No</td>
+    <td>No</td>
+    <td>Yes</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android effect send</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android simple buffer queue</td>
+    <td>Yes: Source (playback) or sink (decode)</td>
+    <td>Yes</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android buffer queue data locator</td>
+    <td>Yes: Source (decode)</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android file descriptor data locator</td>
+    <td>Yes: Source</td>
+    <td>No</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+  <tr>
+    <td>Android simple buffer queue data locator</td>
+    <td>Yes: Source (playback) or sink (decode)</td>
+    <td>Yes: Sink</td>
+    <td>No</td>
+    <td>No</td>
+  </tr>
+</table>
+
+<h3>Android configuration interface</h3>
+
+<p>
+The Android configuration interface provides a means to set
+platform-specific parameters for objects. This interface is different from other OpenSL ES
+1.0.1 interfaces in that your app can use it before instantiating the corresponding object; thus,
+you can configure the object before instantiating it. The
+{@code OpenSLES_AndroidConfiguration.h} header file</code>, which resides at
+{@code platforms/android-&lt;version&gt;/&lt;abi&gt;/include/SLES},
+documents the following available configuration keys and values:
+</p>
+
+<ul>
+<li>Stream type for audio players (default <code>SL_ANDROID_STREAM_MEDIA</code>).</li>
+<li>Record profile for audio recorders (default <code>SL_ANDROID_RECORDING_PRESET_GENERIC</code>).
+</li>
+</ul>
+
+<p>
+The following code snippet shows an example of how to set the Android audio stream type on an audio
+player:
+</p>
+
+<pre>
+// CreateAudioPlayer and specify SL_IID_ANDROIDCONFIGURATION
+// in the required interface ID array. Do not realize player yet.
+// ...
+SLAndroidConfigurationItf playerConfig;
+result = (*playerObject)-&gt;GetInterface(playerObject,
+    SL_IID_ANDROIDCONFIGURATION, &amp;playerConfig);
+assert(SL_RESULT_SUCCESS == result);
+SLint32 streamType = SL_ANDROID_STREAM_ALARM;
+result = (*playerConfig)-&gt;SetConfiguration(playerConfig,
+    SL_ANDROID_KEY_STREAM_TYPE, &amp;streamType, sizeof(SLint32));
+assert(SL_RESULT_SUCCESS == result);
+// ...
+// Now realize the player here.
+</pre>
+
+<p>
+You can use similar code to configure the preset for an audio recorder:
+</p>
+<pre>
+// ... obtain the configuration interface as the first four lines above, then:
+SLuint32 presetValue = SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION;
+result = (*playerConfig)-&gt;SetConfiguration(playerConfig,
+    RECORDING_PRESET, &amp;presetValue, sizeof(SLuint32));
+</pre>
+
+<h3>Android effects interfaces</h3>
+
+<p>
+Android's effect, effect send, and effect capabilities interfaces provide
+a generic mechanism for an application to query and use device-specific
+audio effects. Device manufacturers should document any available device-specific audio effects
+that they provide.
+</p>
+
+<h3>Android file descriptor data locator</h3>
+
+<p>
+The Android file descriptor data locator permits you to specify the source for an
+audio player as an open file descriptor with read access. The data format must be MIME.
+</p>
+<p>
+This extension is especially useful in conjunction with the native asset manager, because
+the app reads assets from the APK via a file descriptor.
+</p>
+
+<h3 id="simple">Android simple buffer queue data locator and interface</h3>
+
+<p>
+The Android simple buffer queue data locator and interface are
+identical to those in the OpenSL ES 1.0.1 reference specification, with two exceptions: You
+can also use Android simple buffer queues with both audio players and audio recorders.  Also, PCM
+is the only data format you can use with these queues.
+In the reference specification, buffer queues are for audio players only, but
+compatible with data formats beyond PCM.
+</p>
+<p>
+For recording, your app should enqueue empty buffers. When a registered callback sends
+notification that the system has finished writing data to the buffer, the app can
+read the buffer.
+</p>
+<p>
+Playback works in the same way. For future source code
+compatibility, however, we suggest that applications use Android simple
+buffer queues instead of OpenSL ES 1.0.1 buffer queues.
+</p>
+
+<h3>Dynamic interfaces at object creation</h3>
+
+<p>
+For convenience, the Android implementation of OpenSL ES 1.0.1
+permits your app to specify dynamic interfaces when it instantiates an object.
+This is an alternative to using <code>DynamicInterfaceManagement::AddInterface()</code>
+to add these interfaces after instantiation.
+</p>
+
+<h3>Buffer queue behavior</h3>
+
+<p>
+The Android implementation does not include the
+reference specification's requirement that the play cursor return to the beginning
+of the currently playing buffer when playback enters the {@code SL_PLAYSTATE_STOPPED}
+state. This implementation can conform to that behavior, or it can leave the location of the play
+cursor unchanged.
+</p>
+
+<p>
+As a result, your app cannot assume that either behavior occurs. Therefore,
+you should explicitly call the <code>BufferQueue::Clear()</code> method after a transition to
+<code>SL_PLAYSTATE_STOPPED</code>. Doing so sets the buffer queue to a known state.
+</p>
+
+<p>
+Similarly, there is no specification governing whether the trigger for a buffer queue callback must
+be a transition to <code>SL_PLAYSTATE_STOPPED</code> or execution of
+<code>BufferQueue::Clear()</code>. Therefore, we recommend against creating a dependency on
+one or the other; instead, your app should be able to handle both.
+</p>
+
+<h3>Reporting of extensions</h3>
+<p>
+There are three methods for querying whether the platform supports the Android extensions. These
+methods are:
+</p>
+
+<ul>
+<li><code>Engine::QueryNumSupportedExtensions()</code></li>
+<li><code>Engine::QuerySupportedExtension()</code></li>
+<li><code>Engine::IsExtensionSupported()</code></li>
+</ul>
+
+<p>
+Any of these methods returns <code>ANDROID_SDK_LEVEL_&lt;API-level&gt;</code>,
+where {@code API-level} is the platform API level; for example, {@code ANDROID_SDK_LEVEL_23}.
+A platform API level of 9 or higher means that the platform supports the extensions.
+</p>
+
+
+<h3 id="da">Decode audio to PCM</h3>
+
+<p>
+This section describes a deprecated Android-specific extension to OpenSL ES 1.0.1
+for decoding an encoded stream to PCM without immediate playback.
+The table below gives recommendations for use of this extension and alternatives.
+</p>
+
+<table>
+<tr>
+  <th>API level</th>
+  <th>Alternatives</th>
+</tr>
+<tr>
+  <td>13 and below</td>
+  <td>An open-source codec with a suitable license.</td>
+</tr>
+<tr>
+  <td>14 to 15</td>
+  <td>An open-source codec with a suitable license.</td>
+</tr>
+<tr>
+  <td>16 to 20</td>
+  <td>
+    The {@link android.media.MediaCodec} class or an open-source codec with a suitable license.
+  </td>
+</tr>
+<tr>
+  <td>21 and above</td>
+  <td>
+    NDK MediaCodec in the {@code &lt;media/NdkMedia*.h&gt;} header files, the
+    {@link android.media.MediaCodec} class, or an open-source codec with a suitable license.
+  </td>
+</tr>
+</table>
+
+<p>
+A standard audio player plays back to an audio device, specifying the output mix as the data sink.
+The Android extension differs in that an audio player instead
+acts as a decoder if the app has specified the data source either as a URI or as an Android
+file descriptor data locator described in MIME data format. In such a case, the data sink is
+an Android simple buffer queue data locator with PCM data format.
+</p>
+
+<p>
+This feature is primarily intended for games to pre-load their audio assets when changing to a
+new game level, similar to the functionality that the {@link android.media.SoundPool} class
+provides.
+</p>
+
+<p>
+The application should initially enqueue a set of empty buffers in the Android simple
+buffer queue. After that, the app fills the buffers with with PCM data. The Android simple
+buffer queue callback fires after each buffer is filled. The callback handler processes
+the PCM data, re-enqueues the now-empty buffer, and then returns. The application is responsible for
+keeping track of decoded buffers; the callback parameter list does not include
+sufficient information to indicate which buffer contains data or which buffer to enqueue next.
+</p>
+
+<p>
+The data source implicitly reports the end of stream (EOS) by delivering a
+<code>SL_PLAYEVENT_HEADATEND</code> event at the end of the stream. After the app has decoded
+all of the data it received, it makes no further calls to the Android simple buffer queue callback.
+</p>
+<p>
+The sink's PCM data format typically matches that of the encoded data source
+with respect to sample rate, channel count, and bit depth. However, you can decode to a different
+sample rate, channel count, or bit depth.
+For information about a provision to detect the actual PCM format, see <a href="#meta">
+Determining the format of decoded PCM data via metadata</a>.
+</p>
+<p>
+OpenSL ES for Android's PCM decoding feature supports pause and initial seek; it does not support
+volume control, effects, looping, or playback rate.
+</p>
+<p>
+Depending on the platform implementation, decoding may require resources
+that cannot be left idle.  Therefore, we recommend that you make sure to provide
+sufficient numbers of empty PCM buffers; otherwise, the decoder starves. This may happen,
+for example, if your app returns from the Android simple buffer queue callback without
+enqueueing another empty buffer.  The result of decoder starvation is
+unspecified, but may include: dropping the decoded
+PCM data, pausing the decoding process, or terminating the decoder outright.
+</p>
+
+<p class="note"><strong>Note: </strong>
+To decode an encoded stream to PCM but not play back immediately, for apps running on
+Android 4.x (API levels 16&ndash;20), we recommend using the {@link android.media.MediaCodec} class.
+For new applications running on Android 5.0 (API level 21) or higher, we recommend using the NDK
+equivalent, {@code &lt;NdkMedia*.h&gt;}. These header files reside under
+the {@code media/} directory, under your installation root.
+</p>
+
+<h3>Decode streaming ADTS AAC to PCM</h3>
+
+<p>
+An audio player acts as a streaming decoder if the data source is an
+Android buffer queue data locator with MIME data format, and the data
+sink is an Android simple buffer queue data locator with PCM data format.
+Configure the MIME data format as follows:
+</p>
+
+<ul>
+<li>Container: {@code SL_CONTAINERTYPE_RAW}</li>
+<li>MIME type string: {@code SL_ANDROID_MIME_AACADTS}</li>
+</ul>
+
+<p>
+This feature is primarily intended for streaming media applications that
+deal with AAC audio but need to perform custom audio processing
+prior to playback.  Most applications that need to decode audio to PCM
+should use the method that <a href="#da">Decode audio to PCM</a> describes,
+as that method is simpler and handles more audio formats.  The technique described
+here is a more specialized approach, to be used only if both of these
+conditions apply:
+</p>
+
+<ul>
+<li>The compressed audio source is a stream of AAC frames contained in ADTS headers.
+</li>
+<li>The application manages this stream. The data is <em>not</em> located within
+a network resource whose identifier is a URI or within a local file whose identifier is
+a file descriptor.
+</li>
+</ul>
+
+<p>
+The application should initially enqueue a set of filled buffers in the Android buffer queue.
+Each buffer contains one or more complete ADTS AAC frames.
+The Android buffer queue callback fires after each buffer is emptied.
+The callback handler should refill and re-enqueue the buffer, and then return.
+The application need not keep track of encoded buffers; the callback parameter
+list includes sufficient information to indicate which buffer to enqueue next.
+The end of stream is explicitly marked by enqueuing an EOS item.
+After EOS, no more enqueues are permitted.
+</p>
+
+<p>
+We recommend that you make sure to provide full
+ADTS AAC buffers, to avoid starving the decoder. This may happen, for example, if your app
+returns from the Android buffer queue callback without enqueueing another full buffer.
+The result of decoder starvation is unspecified.
+</p>
+
+<p>
+In all respects except for the data source, the streaming decode method is the same as
+the one that <a href="#da">Decode audio to PCM</a> describes.
+</p>
+<p>
+Despite the similarity in names, an Android buffer queue is <em>not</em>
+the same as an <a href="#simple">Android simple buffer queue</a>. The streaming decoder
+uses both kinds of buffer queues: an Android buffer queue for the ADTS
+AAC data source, and an Android simple buffer queue for the PCM data
+sink.  For more information about the Android simple buffer queue API, see <a href="#simple">Android
+simple buffer queue data locator and interface</a>.
+For more information about the Android buffer queue API, see the {@code index.html} file in
+the {@code docs/Additional_library_docs/openmaxal/} directory under the installation root.
+</p>
+
+<h3 id="meta">Determining the format of decoded PCM data via metadata</h3>
+
+<p>
+The <code>SLMetadataExtractionItf</code> interface is part of the reference specification.
+However, the metadata keys that indicate the actual format of decoded PCM data are specific to
+Android. The <code>OpenSLES_AndroidMetadata.h</code> header file defines these metadata keys.
+This header file resides under your installation root, in the
+{@code platforms/android-&lt;version&gt;/&lt;abi&gt;/include/SLES} directory.
+</p>
+
+<p>
+The metadata key indices are available immediately after
+the <code>Object::Realize()</code> method finishes executing. However, the associated values are not
+available until after the app decodes the first encoded data.  A good
+practice is to query for the key indices in the main thread after calling the {@code
+Object::Realize} method, and to read the PCM format metadata values in the Android simple
+buffer queue callback handler when calling it for the first time. Consult the
+<a href="https://github.com/googlesamples/android-ndk">example code in the NDK package</a>
+for examples of working with this interface.
+</p>
+
+<p>
+Metadata key names are stable, but the key indices are not documented,
+and are subject to change.  An application should not assume that indices
+are persistent across different execution runs, and should not assume that
+multiple object instances share indices within the same run.
+</p>
+
+<h3 id="fp">Floating-point data</h3>
+
+<p>
+An app running on Android 5.0 (API level 21) and higher can supply data to an AudioPlayer in
+single-precision, floating-point format.
+</p>
+<p>
+In following example code, the {@code Engine::CreateAudioPlayer} method creates an audio player
+that uses floating-point data:
+</p>
+
+<pre>
+#include &lt;SLES/OpenSLES_Android.h&gt;
+...
+SLAndroidDataFormat_PCM_EX pcm;
+pcm.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
+pcm.numChannels = 2;
+pcm.sampleRate = SL_SAMPLINGRATE_44_1;
+pcm.bitsPerSample = 32;
+pcm.containerSize = 32;
+pcm.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
+pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+pcm.representation = SL_ANDROID_PCM_REPRESENTATION_FLOAT;
+...
+SLDataSource audiosrc;
+audiosrc.pLocator = ...
+audiosrc.pFormat = &amp;pcm;
+</pre>
diff --git a/docs/html/ndk/guides/guides_toc.cs b/docs/html/ndk/guides/guides_toc.cs
index 981eb51..4c4c64e 100644
--- a/docs/html/ndk/guides/guides_toc.cs
+++ b/docs/html/ndk/guides/guides_toc.cs
@@ -63,6 +63,16 @@
       </ul>
    </li>
 
+      <li class="nav-section">
+      <div class="nav-section-header"><a href="<?cs var:toroot ?>ndk/guides/audio/index.html">
+      <span class="en">Audio</span></a></div>
+      <ul>
+      <li><a href="<?cs var:toroot ?>ndk/guides/audio/basics.html">Basics</a></li>
+      <li><a href="<?cs var:toroot ?>ndk/guides/audio/opensl-for-android.html">OpenSL ES for
+      Android</a></li>
+      </ul>
+   </li>
+
 </ul>
 
 
diff --git a/docs/html/sdk/index.jd b/docs/html/sdk/index.jd
index 7d733a6..96169b8 100644
--- a/docs/html/sdk/index.jd
+++ b/docs/html/sdk/index.jd
@@ -256,6 +256,7 @@
   <li><a href="#Requirements">System Requirements</a></li>
   <li><a href="#Other">Other Download Options</a></li>
   <li><a href="{@docRoot}sdk/installing/migrate.html">Migrating to Android Studio</a></li>
+  <li><a href="https://www.google.com/intl/en/usability/index.html?l=9&reserved=0&pType=devel&productTag=0&campaignType=ghc&campaignDate=0&labelTag=0&referral_code=ASDWNLD">Participate in usability research</a></li>
   <li><a href="https://docs.google.com/a/google.com/forms/d/1mjsyfzv3HAnDY-_Kfj-3QJKdpuksyMFs9e73CRwmT6Q/viewform"
 target="_blank">Take a Survey</a></li>
 </ul>
@@ -389,7 +390,7 @@
 <h3>Windows</h3>
 
 <ul>
-<li>Microsoft&reg;  Windows&reg;  8/7/Vista (32 or 64-bit)</li>
+<li>Microsoft&reg;  Windows&reg;  8/7/Vista (32- or 64-bit)</li>
 <li>2 GB RAM minimum, 4 GB RAM recommended</li>
 <li>400 MB hard disk space</li>
 <li>At least 1 GB for Android SDK, emulator system images, and caches</li>
@@ -429,6 +430,7 @@
 <li>At least 1 GB for Android SDK, emulator system images, and caches</li>
 <li>1280 x 800 minimum screen resolution</li>
 <li>Oracle&reg;  Java Development Kit (JDK) 7 </li>
+<li>64-bit distribution capable of running 32-bit applications</li>
 </ul>
 <p>Tested on Ubuntu&reg;  14.04, Trusty Tahr (64-bit distribution capable of running
 32-bit applications).</p>
diff --git a/docs/html/sdk/installing/index.jd b/docs/html/sdk/installing/index.jd
index 203d2ff..c47b9c3d 100644
--- a/docs/html/sdk/installing/index.jd
+++ b/docs/html/sdk/installing/index.jd
@@ -89,7 +89,7 @@
     </li>
   </ol>
 
-<p>If you need use the Android SDK tools from a command line,
+<p>If you need to use the Android SDK tools from a command line,
 you can access them at:</p>
 <p><code>/Users/&lt;user>/Library/Android/sdk/</code></p>
 
diff --git a/docs/html/tools/data-binding/guide.jd b/docs/html/tools/data-binding/guide.jd
index 6acc38c..b57fba7 100644
--- a/docs/html/tools/data-binding/guide.jd
+++ b/docs/html/tools/data-binding/guide.jd
@@ -153,42 +153,6 @@
 <p>To use data binding, Android Plugin for Gradle <strong>1.5.0-alpha1</strong>
 or higher is required.</p>
 
-<h4>Beta release</h4>
-
-<div class="caution">
-  <p>Please note that the Data Binding library is a <strong>beta release</strong>.
-  While Data Binding is in beta, developers should be aware of the following
-  caveats:</p>
-  <ul>
-    <li>
-    This is a beta release of the feature intended to generate developer
-    feedback. It might contain bugs, and it might not work for your use case,
-    so use it at your own risk. That said, we do want your feedback! Please
-    let us know what is or isn’t working for you using the <a
-    href="https://code.google.com/p/android-developer-preview/">issue
-    tracker</a>.
-    </li>
-    <li>
-    The Data Binding library beta release is subject to significant changes,
-    including those which are not source code compatible with your app. That is,
-    significant rework may be required to take updates to the library in the future.
-    </li>
-    <li>
-    Developers should feel free to publish apps built with the Data Binding
-    library beta release, with the caveats that the standard Android SDK and
-    Google Play terms of service apply, and it’s always a great idea to test your
-    app thoroughly when adopting new libraries or tools.
-    </li>
-    <li>
-    We’re just getting started with Android Studio support at this time.
-    Further Android Studio support will come in the future.
-    </li>
-    <li>
-    By using the Data Binding library beta release, you acknowledge these
-    caveats.</li>
-  </ul>
-</div>
-
 <h2 id="build_environment">
   Build Environment
 </h2>
@@ -408,6 +372,36 @@
    &lt;/LinearLayout&gt;
 &lt;/layout&gt;
 </pre>
+
+<p>
+  Some specialized click event handlers exist and they need an attribute other than
+  <code>android:onClick</code> to avoid a conflict. The following attributes have been created
+  to avoid such conflicts:
+</p>
+
+<table>
+  <tr>
+    <th>Class</th>
+    <th>Listener Setter</th>
+    <th>Attribute</th>
+  </tr>
+  <tr>
+    <td>{@link android.widget.SearchView}</td>
+    <td>{@link android.widget.SearchView#setOnSearchClickListener}</td>
+    <td><code>android:onSearchClick</code></td>
+  </tr>
+  <tr>
+    <td>{@link android.widget.ZoomControls}</td>
+    <td>{@link android.widget.ZoomControls#setOnZoomInClickListener}</td>
+    <td><code>android:onZoomIn</code></td>
+  </tr>
+  <tr>
+    <td>{@link android.widget.ZoomControls}</td>
+    <td>{@link android.widget.ZoomControls#setOnZoomOutClickListener}</td>
+    <td><code>android:onZoomOut</code></td>
+  </tr>
+</table>
+
 <h2 id="layout_details">
   Layout Details
 </h2>
@@ -540,6 +534,14 @@
   <code>boolean</code>, etc.
 </p>
 
+<p>
+  A special variable named <code>context</code> is generated for use in binding
+  expressions as needed. The value for <code>context</code> is the
+  <code>Context</code> from the root View's {@link android.view.View#getContext}.
+  The <code>context</code> variable will be overridden by an explicit variable
+  declaration with that name.
+</p>
+
 <h3 id="custom_binding_class_names">
   Custom Binding Class Names
 </h3>
diff --git a/docs/html/tools/sdk/tools-notes.jd b/docs/html/tools/sdk/tools-notes.jd
index 3859ecf..64e4f59 100644
--- a/docs/html/tools/sdk/tools-notes.jd
+++ b/docs/html/tools/sdk/tools-notes.jd
@@ -21,11 +21,31 @@
 <p>For a summary of all known issues in SDK Tools, see <a
 href="http://tools.android.com/knownissues">http://tools.android.com/knownissues</a>.</p>
 
-
-
 <div class="toggle-content opened">
   <p><a href="#" onclick="return toggleContent(this)">
     <img src="{@docRoot}assets/images/triangle-opened.png" class="toggle-content-img"
+      alt=""/>SDK Platform-tools, Revision 23.1.0</a> <em>(December 2015)</em>
+  </p>
+
+  <div class="toggle-content-toggleme">
+
+    <dl>
+        <dt>General Notes:</dt>
+    <dd>
+      <ul>
+        <li>Changed Linux requirements for Android SDK Platform-tools revision 23.1.0 and later:
+          it now requires 64-bit Linux.</li>
+      </ul>
+    </dd>
+
+
+
+  </div>
+</div>
+
+<div class="toggle-content closed">
+  <p><a href="#" onclick="return toggleContent(this)">
+    <img src="{@docRoot}assets/images/triangle-closed.png" class="toggle-content-img"
       alt=""/>SDK Tools, Revision 24.4.1</a> <em>(October 2015)</em>
   </p>
 
diff --git a/docs/html/training/auto/audio/index.jd b/docs/html/training/auto/audio/index.jd
index 75974e4..9144900 100644
--- a/docs/html/training/auto/audio/index.jd
+++ b/docs/html/training/auto/audio/index.jd
@@ -19,6 +19,7 @@
     <ol>
       <li><a href="#overview">Provide Audio Services</a></li>
       <li><a href="#config_manifest">Configure Your Manifest</a></li>
+      <li><a href="#isconnected">Determine if Your App is Connected</a></li>
       <li><a href="#implement_browser">Build a Browser Service</a></li>
       <li><a href="#implement_callback">Implement Play Controls</a></li>
       <li><a href="#support_voice">Support Voice Actions</a></li>
@@ -210,12 +211,12 @@
 <p class="note"><strong>Note:</strong> The icon you provide should have transparency enabled, so the
 icon's background gets filled in with the app's primary color.</p>
 
-<h2 id=isconnected">Determine if Your App is Connected</h2>
+<h2 id="isconnected">Determine if Your App is Connected</h2>
 <p>
 It is possible to determine if your app is selected as the current media app.</p>
 <p>
-Android Auto broadcasts an intent with <code>com.google.android.gms.car.media.
-STATUS</code> action when a user connects or disconnects from a media app. The broadcast intent is
+Android Auto broadcasts an intent with <code>com.google.android.gms.car.media.STATUS</code>
+action when a user connects or disconnects from a media app. The broadcast intent is
 scoped to the package name of the media app selected. You can register a broadcast receiver in your
 app, preferably in your <a href="{@docRoot}reference/android/service/media/MediaBrowserService.html">
 MediaBrowserService</a> implementation and listen for this intent
diff --git a/docs/html/training/efficient-downloads/redundant_redundant.jd b/docs/html/training/efficient-downloads/redundant_redundant.jd
index f77c8de..0825b5d 100644
--- a/docs/html/training/efficient-downloads/redundant_redundant.jd
+++ b/docs/html/training/efficient-downloads/redundant_redundant.jd
@@ -37,7 +37,7 @@
 
 <p>To ensure that your caching doesn't result in your app displaying stale data, be sure to extract the time at which the requested content was last updated, and when it expires, from within the HTTP response headers. This will allow you to determine when the associated content should be refreshed.</p>
 
-<pre>long currentTime = System.currentTimeMillis());
+<pre>long currentTime = System.currentTimeMillis();
 
 HttpURLConnection conn = (HttpURLConnection) url.openConnection();
 
@@ -84,4 +84,4 @@
 
 <p>With the cache installed, fully cached HTTP requests can be served directly from local storage, eliminating the need to open a network connection. Conditionally cached responses can validate their freshness from the server, eliminating the bandwidth cost associated with the download.</p>
 
-<p>Uncached responses get stored in the response cache for future requests.</p>
\ No newline at end of file
+<p>Uncached responses get stored in the response cache for future requests.</p>
diff --git a/docs/html/training/sync-adapters/creating-sync-adapter.jd b/docs/html/training/sync-adapters/creating-sync-adapter.jd
index 9bd17ba..a790b87 100644
--- a/docs/html/training/sync-adapters/creating-sync-adapter.jd
+++ b/docs/html/training/sync-adapters/creating-sync-adapter.jd
@@ -416,7 +416,7 @@
          * Add the account and account type, no password or user data
          * If successful, return the Account object, otherwise report an error.
          */
-        if (accountManager.addAccountExplicitly(newAccount, null, null))) {
+        if (accountManager.addAccountExplicitly(newAccount, null, null)) {
             /*
              * If you don't set android:syncable="true" in
              * in your &lt;provider&gt; element in the manifest,
diff --git a/docs/html/training/training_toc.cs b/docs/html/training/training_toc.cs
index 5b0d603..9dbbe24 100644
--- a/docs/html/training/training_toc.cs
+++ b/docs/html/training/training_toc.cs
@@ -1085,6 +1085,10 @@
               Displaying a Now Playing Card</a>
           </li>
           <li>
+            <a href="<?cs var:toroot ?>training/tv/playback/guided-step.html">
+              Adding a Guided Step</a>
+          </li>
+          <li>
             <a href="<?cs var:toroot ?>training/tv/playback/options.html">
               Enabling Background Playback</a>
           </li>
diff --git a/docs/html/training/tv/discovery/recommendations.jd b/docs/html/training/tv/discovery/recommendations.jd
index ffe33f2..0b0b270 100644
--- a/docs/html/training/tv/discovery/recommendations.jd
+++ b/docs/html/training/tv/discovery/recommendations.jd
@@ -63,10 +63,13 @@
 
 <ul>
 <li><strong>Continuation content</strong> recommendations for the next episode for users to resume
-watching a series.</li>
+watching a series. Or, use continuation recommendations for paused movies, TV shows, or podcasts
+so users can get back to watching paused content in just a few clicks.</li>
 <li><strong>New content</strong> recommendations, such as for a new first-run episode, if the user
-finished watching another series.
-<li><strong>Related content</strong> recommendations based on the users historic viewing behavior.
+finished watching another series. Also, if your app lets users subscribe to, follow, or track
+content, use new content recommendations for unwatched items in their list of tracked content.</li>
+<li><strong>Related content</strong> recommendations based on the users' historic viewing behavior.
+</li>
 </ul>
 
 <p>For more information on how to design recommendation cards for the best user experience, see
@@ -88,6 +91,25 @@
 <a href="https://www.google.com/design/spec-tv/system-overview/recommendation-row.html#recommendation-row-card-customization"
 class="external-link">Recommendation Row</a> in the Android TV Design Spec.</p>
 
+<h3 id="grouping">Grouping Recommendations</h3>
+
+<p>
+You can optionally group recommendations based on recommendation source. For example, your app
+might provide two groups of recommendations: recommendations for content the user is subscribed to,
+and recommendations for new trending content the user might not be aware of.
+</p>
+<p>
+The system ranks and orders recommendations for each group separately when creating or updating
+the recommendation row. By providing group information for your recommendations, you can ensure
+that your recommendations don’t get ordered below unrelated recommendations.
+</p>
+<p>
+Use
+{@link android.support.v4.app.NotificationCompat.Builder#setGroup
+NotificationCompat.Builder.setGroup()} to set the group key string of a recommendation. For
+example, to mark a recommendation as belonging to a group that contains new trending content,
+you might call <code>setGroup("trending")</code>.
+</p>
 
 <h2 id="service">Create a Recommendations Service</h2>
 
diff --git a/docs/html/training/tv/playback/guided-step.jd b/docs/html/training/tv/playback/guided-step.jd
new file mode 100644
index 0000000..121961f
--- /dev/null
+++ b/docs/html/training/tv/playback/guided-step.jd
@@ -0,0 +1,259 @@
+page.title=Adding a Guided Step
+page.tags=tv, guided step
+helpoutsWidget=true
+
+trainingnavtop=true
+
+@jd:body
+
+<div id="tb-wrapper">
+<div id="tb">
+  <h2>This lesson teaches you to</h2>
+  <ol>
+    <li><a href="#details">Provide Details for a Step</a></li>
+    <li><a href="#actions">Create and Handle User Actions</a></li>
+    <li><a href="#sequence">Group Guided Steps Into a Sequence</a></li>
+    <li><a href="#presentation">Customize Step Presentation</a></li>
+  </ol>
+  <h2>Try it out</h2>
+  <ul>
+    <li><a class="external-link" href="https://github.com/googlesamples/androidtv-Leanback">Android
+    Leanback sample app</a></li>
+  </ul>
+</div>
+</div>
+
+<p>
+Your application might have multi-step tasks for users. For example, your app might need to guide
+users through purchasing additional content, or setting up a complex configuration setting, or
+simply confirming a decision. All of these tasks require walking users through one or more ordered
+steps or decisions.
+</p>
+
+<p>
+The <a href=
+"{@docRoot}tools/support-library/features.html#v17-leanback">v17 Leanback support library</a>
+provides classes to implement multi-step user tasks. This lesson discusses how to use the
+{@link android.support.v17.leanback.app.GuidedStepFragment} class to guide a user through a series
+of decisions to accomplish a task. {@link android.support.v17.leanback.app.GuidedStepFragment} uses
+TV UI best practices to make multi-step tasks easy to understand and navigate on TV devices.
+</p>
+
+<h2 id="details">Provide Details for a Step</h2>
+
+<p>
+A {@link android.support.v17.leanback.app.GuidedStepFragment} represents a single step in a series
+of steps. Visually it provides a guidance view on the left with step information. On the right,
+{@link android.support.v17.leanback.app.GuidedStepFragment} provides a view containing a
+list of possible actions or decisions for this step.
+</p>
+
+<img src="{@docRoot}images/training/tv/playback/guided-step-screen.png"
+srcset="{@docRoot}images/training/tv/playback/guided-step-screen.png 1x,
+{@docRoot}images/training/tv/playback/guided-step-screen-2x.png 2x" />
+<p class="img-caption"><strong>Figure 1.</strong> An example guided step.</p>
+
+<p>
+For each step in your multi-step task, extend
+{@link android.support.v17.leanback.app.GuidedStepFragment} and provide context information about
+the step and actions the user can take. Override
+{@link android.support.v17.leanback.app.GuidedStepFragment#onCreateGuidance onCreateGuidance()}
+and return a new
+{@link android.support.v17.leanback.widget.GuidanceStylist.Guidance} that contains context
+information, such as the step title, description, and icon.
+</p>
+
+<pre>
+&#64;Override
+public GuidanceStylist.Guidance onCreateGuidance(Bundle savedInstanceState) {
+    String title = getString(R.string.guidedstep_first_title);
+    String breadcrumb = getString(R.string.guidedstep_first_breadcrumb);
+    String description = getString(R.string.guidedstep_first_description);
+    Drawable icon = getActivity().getDrawable(R.drawable.guidedstep_main_icon_1);
+    return new GuidanceStylist.Guidance(title, description, breadcrumb, icon);
+}
+</pre>
+
+<p>
+Add your {@link android.support.v17.leanback.app.GuidedStepFragment} subclass to your desired
+activity by calling
+{@link android.support.v17.leanback.app.GuidedStepFragment#add GuidedStepFragment.add()}
+in your activity’s {@link android.app.Activity#onCreate onCreate()} method.
+
+If your activity contains only {@link android.support.v17.leanback.app.GuidedStepFragment}
+objects, use {@link android.support.v17.leanback.app.GuidedStepFragment#addAsRoot
+GuidedStepFragment.addAsRoot()} instead of
+{@link android.support.v17.leanback.app.GuidedStepFragment#add add()} to add the first
+{@link android.support.v17.leanback.app.GuidedStepFragment}. Using
+{@link android.support.v17.leanback.app.GuidedStepFragment#addAsRoot
+addAsRoot()} ensures that if the user presses the Back button on the TV remote when viewing
+the first {@link android.support.v17.leanback.app.GuidedStepFragment}, both the
+{@link android.support.v17.leanback.app.GuidedStepFragment} and the parent activity will close.
+</p>
+
+<p class="note"<strong>Note:</strong> Add
+{@link android.support.v17.leanback.app.GuidedStepFragment} objects programmatically
+and not in your layout XML files.</p>
+
+<h2 id="actions">Create and Handle User Actions</h2>
+
+<p>
+Add user actions by overriding
+{@link android.support.v17.leanback.app.GuidedStepFragment#onCreateActions onCreateActions()}.
+In your override, add a new {@link android.support.v17.leanback.widget.GuidedAction} for each
+action item, and provide the action string, description, and ID. Use
+{@link android.support.v17.leanback.widget.GuidedAction.Builder} to add new actions.
+</p>
+
+<pre>
+&#64;Override
+public void onCreateActions(List<GuidedAction> actions, Bundle savedInstanceState) {
+    // Add "Continue" user action for this step
+    actions.add(new GuidedAction.Builder()
+           .id(CONTINUE)
+           .title(getString(R.string.guidedstep_continue))
+           .description(getString(R.string.guidedstep_letsdoit))
+           .hasNext(true)
+           .build());
+...
+</pre>
+
+<p>
+Actions aren’t limited to single-line selections. Use
+{@link android.support.v17.leanback.widget.GuidedAction} attributes
+to add the following additional types of actions:
+</p>
+
+<ul>
+<li>
+Add a information label action by setting
+{@link android.support.v17.leanback.widget.GuidedAction.Builder#infoOnly infoOnly(true)}.
+If <code>infoOnly</code> is set to true, the action can't be selected by the user. Use label
+actions to provide additional information about user choices.
+</li>
+<li>
+Add an editable text action by setting
+{@link android.support.v17.leanback.widget.GuidedAction.Builder#editable editable(true)}. If
+<code>editable</code> is true, when the action is selected the user can enter text using the
+remote or a connected keyboard.
+</li>
+<li>
+Add a set of actions that behave as checkable radio buttons by using
+{@link android.support.v17.leanback.widget.GuidedAction.Builder#checkSetId checkSetId()}
+with a common ID value to group actions into a set. All actions in the same list with the same
+check-set ID are considered linked. When one of the actions within that set is selected, that
+action becomes checked, while all other actions become unchecked.
+</li>
+</ul>
+
+<p>
+You can also add a visual indicator that indicates selecting the action leads to a new step by
+setting
+{@link android.support.v17.leanback.widget.GuidedAction#hasNext hasNext(true)}.
+See {@link android.support.v17.leanback.widget.GuidedAction} for all the different attributes
+you can set.
+</p>
+
+<p>
+To respond to actions, override
+{@link android.support.v17.leanback.app.GuidedStepFragment#onGuidedActionClicked
+onGuidedActionClicked()} and process the passed-in
+{@link android.support.v17.leanback.widget.GuidedAction}. Identify the selected action by
+examining {@link android.support.v17.leanback.widget.GuidedAction#getId GuidedAction.getId()}.
+</p>
+
+<h2 id="sequence">Group Guided Steps Into a Guided Sequence</h2>
+
+<p>
+A {@link android.support.v17.leanback.app.GuidedStepFragment} represents a single step, however
+you might have several steps in an ordered sequence. Group multiple
+{@link android.support.v17.leanback.app.GuidedStepFragment} objects together by using
+{@link android.support.v17.leanback.app.GuidedStepFragment#add GuidedStepFragment.add()} to add
+the next step in the sequence to the fragment stack.
+</p>
+
+<pre>
+&#64;Override
+public void onGuidedActionClicked(GuidedAction action) {
+    FragmentManager fm = getFragmentManager();
+    if (action.getId() == CONTINUE) {
+       GuidedStepFragment.add(fm, new SecondStepFragment());
+    }
+...
+</pre>
+
+<p>
+If the user presses the Back button on the TV remote, the device shows the previous
+{@link android.support.v17.leanback.app.GuidedStepFragment} on the fragment stack. If you
+decide to provide your own {@link android.support.v17.leanback.widget.GuidedAction} that
+returns to the previous step, you can implement the Back behavior by calling
+{@link android.app.FragmentManager#popBackStack getFragmentManager().popBackStack()}.
+</p>
+
+<h2 id="presentation">Customize Step Presentation</h2>
+
+<p>
+The {@link android.support.v17.leanback.app.GuidedStepFragment} class can use custom
+themes that control presentation aspects such as title text formatting or step transition
+animations. Custom themes must inherit from
+{@link android.support.v17.leanback.R.style#Theme_Leanback_GuidedStep}, and can provide
+overriding values for attributes defined in
+{@link android.support.v17.leanback.widget.GuidanceStylist} and
+{@link android.support.v17.leanback.widget.GuidedActionsStylist}.
+</p>
+
+<p>
+To apply a custom theme to your GuidedStepFragment, do one of the following:
+</p>
+
+<ul>
+<li>
+Apply the theme to the parent activity by setting the <code>android:theme</code> attribute to the
+activity element in the Android manifest. Setting this attribute applies the theme to all child
+views and is the easiest way to apply a custom theme if the parent activity contains only
+{@link android.support.v17.leanback.app.GuidedStepFragment} objects.
+</li>
+<li>
+If your activity already uses a custom theme and you don’t want to apply
+{@link android.support.v17.leanback.app.GuidedStepFragment} styles to other views in the activity,
+add the
+{@link android.support.v17.leanback.R.styleable#LeanbackGuidedStepTheme_guidedStepTheme}
+attribute to your existing custom activity theme. This attribute points to the custom theme that
+only the {@link android.support.v17.leanback.app.GuidedStepFragment} objects in your
+activity will use.
+</li>
+<li>
+If you use {@link android.support.v17.leanback.app.GuidedStepFragment} objects in different
+activities that are part of the same overall multi-step task, and want to use a consistent
+visual theme across all steps, override
+{@link android.support.v17.leanback.app.GuidedStepFragment#onProvideTheme
+GuidedStepFragment.onProvideTheme()} and return your custom theme.
+</li>
+</ul>
+
+<p>
+For more information on how to add styles and themes, see
+<a href="{@docRoot}guide/topics/ui/themes.html">Styles and Themes</a>.
+</p>
+
+<p>
+The {@link android.support.v17.leanback.app.GuidedStepFragment} class uses special
+<em>stylist classes</em> to access and apply theme attributes.
+The {@link android.support.v17.leanback.widget.GuidanceStylist} class uses theme information
+to control presentation of the left guidance view, while the
+{@link android.support.v17.leanback.widget.GuidedActionsStylist} class uses theme information
+to control presentation of the right actions view.
+</p>
+
+<p>
+To customize the visual style of your steps beyond what theme customization can provide, subclass
+{@link android.support.v17.leanback.widget.GuidanceStylist} or
+{@link android.support.v17.leanback.widget.GuidedActionsStylist} and return your subclass in
+{@link android.support.v17.leanback.app.GuidedStepFragment#onCreateGuidanceStylist
+GuidedStepFragment.onCreateGuidanceStylist()} or
+{@link android.support.v17.leanback.app.GuidedStepFragment#onCreateActionsStylist
+GuidedStepFragment.onCreateActionsStylist()}.
+For details on what you can customize in these subclasses, see the documentation on
+{@link android.support.v17.leanback.widget.GuidanceStylist} and
+{@link android.support.v17.leanback.widget.GuidedActionsStylist}.
+</p>
\ No newline at end of file
diff --git a/docs/html/training/tv/playback/index.jd b/docs/html/training/tv/playback/index.jd
index 43c6d41..d5e4e67 100644
--- a/docs/html/training/tv/playback/index.jd
+++ b/docs/html/training/tv/playback/index.jd
@@ -65,6 +65,10 @@
   <dt><b><a href="now-playing.html">Displaying a Now Playing Card</a></b></dt>
     <dd>Learn how to use a MediaSession to display a Now Playing card on the home screen.</dd>
 
+  <dt><b><a href="guided-step.html">Adding a Guided Step</a></b></dt>
+    <dd>Learn how to use the Leanback support library to guide a user through a series of
+    decisions.</dd>
+
   <dt><b><a href="options.html">Enabling Background Playback</a></b></dt>
     <dd>Learn how to continue playback when the user clicks on <strong>Home</strong>.</dd>
 </dl>
diff --git a/docs/html/training/tv/publishing/checklist.jd b/docs/html/training/tv/publishing/checklist.jd
index 6259721..c044f0e 100644
--- a/docs/html/training/tv/publishing/checklist.jd
+++ b/docs/html/training/tv/publishing/checklist.jd
@@ -137,6 +137,11 @@
   <p>See <a href="{@docRoot}training/tv/start/layouts.html#advertising">Provide Effective Advertising</a>.</p>
 </li>
 
+<li>
+  Use the Leanback library for guiding the user through a series of decisions.
+  <p>See <a href="{@docRoot}training/tv/playback/guided-step.html">Adding a Guided Step</a>.</p>
+</li>
+
 </ol>
 
 
diff --git a/docs/image_sources/training/tv/playback/guided-step-screen-orig.png b/docs/image_sources/training/tv/playback/guided-step-screen-orig.png
new file mode 100644
index 0000000..c39099c
--- /dev/null
+++ b/docs/image_sources/training/tv/playback/guided-step-screen-orig.png
Binary files differ