Merge "Reinitialize KeyStore::mMasterKey after unlock (and add test-keystore)"
diff --git a/Android.mk b/Android.mk
index d4d9a33..f752f44 100644
--- a/Android.mk
+++ b/Android.mk
@@ -364,6 +364,7 @@
     -since ./frameworks/base/api/10.xml 10 \
     -since ./frameworks/base/api/11.xml 11 \
     -since ./frameworks/base/api/12.xml 12 \
+    -since ./frameworks/base/api/13.xml 13 \
 		-werror -hide 113 \
 		-overview $(LOCAL_PATH)/core/java/overview.html
 
@@ -414,6 +415,8 @@
 		            resources/samples/NotePad "Note Pad" \
 		-samplecode $(sample_dir)/SampleSyncAdapter \
 		            resources/samples/SampleSyncAdapter "Sample Sync Adapter" \
+		-samplecode $(sample_dir)/RandomMusicPlayer \
+		            resources/samples/RandomMusicPlayer "Random Music Player" \
 		-samplecode $(sample_dir)/RenderScript \
 		            resources/samples/RenderScript "RenderScript" \
 		-samplecode $(sample_dir)/SearchableDictionary \
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index ca77185..d7b1e71 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -388,13 +388,15 @@
 
     sp<MediaSource> mSource;
     StreamType mStreamType;
+    bool mSawFirstIDRFrame;
 
     DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
 };
 
 DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
     : mSource(source),
-      mStreamType(OTHER) {
+      mStreamType(OTHER),
+      mSawFirstIDRFrame(false) {
     const char *mime;
     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
 
@@ -410,6 +412,8 @@
 }
 
 status_t DetectSyncSource::start(MetaData *params) {
+    mSawFirstIDRFrame = false;
+
     return mSource->start(params);
 }
 
@@ -439,16 +443,30 @@
 
 status_t DetectSyncSource::read(
         MediaBuffer **buffer, const ReadOptions *options) {
-    status_t err = mSource->read(buffer, options);
+    for (;;) {
+        status_t err = mSource->read(buffer, options);
 
-    if (err != OK) {
-        return err;
-    }
+        if (err != OK) {
+            return err;
+        }
 
-    if (mStreamType == AVC && isIDRFrame(*buffer)) {
-        (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, true);
-    } else {
-        (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, true);
+        if (mStreamType == AVC) {
+            bool isIDR = isIDRFrame(*buffer);
+            (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, isIDR);
+            if (isIDR) {
+                mSawFirstIDRFrame = true;
+            }
+        } else {
+            (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, true);
+        }
+
+        if (mStreamType != AVC || mSawFirstIDRFrame) {
+            break;
+        }
+
+        // Ignore everything up to the first IDR frame.
+        (*buffer)->release();
+        *buffer = NULL;
     }
 
     return OK;
@@ -945,6 +963,17 @@
                     fprintf(stderr, "could not create extractor.\n");
                     return -1;
                 }
+
+                sp<MetaData> meta = extractor->getMetaData();
+
+                if (meta != NULL) {
+                    const char *mime;
+                    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+                    if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
+                        syncInfoPresent = false;
+                    }
+                }
             }
 
             size_t numTracks = extractor->countTracks();
diff --git a/core/java/android/net/DhcpStateMachine.java b/core/java/android/net/DhcpStateMachine.java
index c49c019..445b2f7 100644
--- a/core/java/android/net/DhcpStateMachine.java
+++ b/core/java/android/net/DhcpStateMachine.java
@@ -117,13 +117,14 @@
 
         PowerManager powerManager = (PowerManager)mContext.getSystemService(Context.POWER_SERVICE);
         mDhcpRenewWakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, WAKELOCK_TAG);
+        mDhcpRenewWakeLock.setReferenceCounted(false);
 
         mBroadcastReceiver = new BroadcastReceiver() {
             @Override
             public void onReceive(Context context, Intent intent) {
                 //DHCP renew
                 if (DBG) Log.d(TAG, "Sending a DHCP renewal " + this);
-                //acquire a 40s wakelock to finish DHCP renewal
+                //Lock released after 40s in worst case scenario
                 mDhcpRenewWakeLock.acquire(40000);
                 sendMessage(CMD_RENEW_DHCP);
             }
@@ -166,6 +167,7 @@
             switch (message.what) {
                 case CMD_RENEW_DHCP:
                     Log.e(TAG, "Error! Failed to handle a DHCP renewal on " + mInterfaceName);
+                    mDhcpRenewWakeLock.release();
                     break;
                 case SM_QUIT_CMD:
                     mContext.unregisterReceiver(mBroadcastReceiver);
@@ -268,10 +270,12 @@
                         /* Notify controller before starting DHCP */
                         mController.sendMessage(CMD_PRE_DHCP_ACTION);
                         transitionTo(mWaitBeforeRenewalState);
+                        //mDhcpRenewWakeLock is released in WaitBeforeRenewalState
                     } else {
                         if (!runDhcp(DhcpAction.RENEW)) {
                             transitionTo(mStoppedState);
                         }
+                        mDhcpRenewWakeLock.release();
                     }
                     break;
                 case CMD_START_DHCP:
@@ -318,6 +322,10 @@
             }
             return retValue;
         }
+        @Override
+        public void exit() {
+            mDhcpRenewWakeLock.release();
+        }
     }
 
     private boolean runDhcp(DhcpAction dhcpAction) {
diff --git a/docs/html/guide/guide_toc.cs b/docs/html/guide/guide_toc.cs
index 55d711f..75b8ead 100644
--- a/docs/html/guide/guide_toc.cs
+++ b/docs/html/guide/guide_toc.cs
@@ -270,8 +270,8 @@
   	  </li>
 
       <li><a href="<?cs var:toroot ?>guide/topics/media/index.html">
-            <span class="en">Audio and Video</span>
-          </a></li>
+            <span class="en">Media</span>
+          </a><span class="new">updated!</span></li>
       <li>
         <a href="<?cs var:toroot ?>guide/topics/clipboard/copy-paste.html">
             <span class="en">Copy and Paste</span>
diff --git a/docs/html/guide/topics/appwidgets/index.jd b/docs/html/guide/topics/appwidgets/index.jd
index e589292..78b5b51 100644
--- a/docs/html/guide/topics/appwidgets/index.jd
+++ b/docs/html/guide/topics/appwidgets/index.jd
@@ -213,7 +213,7 @@
 <p class="note"><strong>Note:</strong> To make your app widget portable across
 devices, your app widget's minimum size should never be larger than 4 x 4 cells.
 See the <a
-href="{@docRoot}guide/practices/ui_guidelines/widget_design.htmll#sizes">App
+href="{@docRoot}guide/practices/ui_guidelines/widget_design.html#sizes">App
 Widget Design Guidelines</a> for more discussion of Home screen cell sizes.</p>
   </li>
   <li>The <code>updatePeriodMillis</code> attribute defines how often the App
diff --git a/docs/html/guide/topics/fundamentals/services.jd b/docs/html/guide/topics/fundamentals/services.jd
index 468a417..d3ef70a 100644
--- a/docs/html/guide/topics/fundamentals/services.jd
+++ b/docs/html/guide/topics/fundamentals/services.jd
@@ -670,7 +670,7 @@
 either stopped or removed from the foreground.</p>
 
 <p>For example, a music player that plays music from a service should be set to run in the
-foreground, because the user it explicitly aware
+foreground, because the user is explicitly aware
 of its operation. The notification in the status bar might indicate the current song and allow
 the user to launch an activity to interact with the music player.</p>
 
diff --git a/docs/html/guide/topics/media/images/notification1.png b/docs/html/guide/topics/media/images/notification1.png
new file mode 100644
index 0000000..9b01971
--- /dev/null
+++ b/docs/html/guide/topics/media/images/notification1.png
Binary files differ
diff --git a/docs/html/guide/topics/media/images/notification2.png b/docs/html/guide/topics/media/images/notification2.png
new file mode 100644
index 0000000..488648e
--- /dev/null
+++ b/docs/html/guide/topics/media/images/notification2.png
Binary files differ
diff --git a/docs/html/guide/topics/media/index.jd b/docs/html/guide/topics/media/index.jd
index b6d1629..06e6208 100644
--- a/docs/html/guide/topics/media/index.jd
+++ b/docs/html/guide/topics/media/index.jd
@@ -1,4 +1,4 @@
-page.title=Audio and Video
+page.title=Media
 @jd:body
 
     <div id="qv-wrapper">
@@ -6,30 +6,44 @@
 
 <h2>Quickview</h2>
 <ul>
-<li>Audio playback and record</li>
-<li>Video playback</li>
-<li>Handles data from raw resources, files, streams</li>
-<li>Built-in codecs for a variety of media. See <a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
+<li>MediaPlayer APIs allow you to play and record media</li>
+<li>You can handle data from raw resources, files, and streams</li>
+<li>The platform supports a variety of media formats. See <a
+href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media Formats</a></li>
 </ul>
 
 <h2>In this document</h2>
 <ol>
-<li><a href="#playback">Audio and Video Playback</a>
-    <ol>
-      <li><a href="#playraw">Playing from a Raw Resource</li>
-      <li><a href="#playfile">Playing from a File or Stream</li>
-      <li><a href="#jet">Playing JET Content</li>
-    </ol>
+<li><a href="#mediaplayer">Using MediaPlayer</a>
+   <ol>
+      <li><a href='#preparingasync'>Asynchronous Preparation</a></li>
+      <li><a href='#managestate'>Managing State</a></li>
+      <li><a href='#releaseplayer'>Releasing the MediaPlayer</a></li>
+   </ol>
 </li>
-<li><a href="#capture">Audio Capture</a></li>
+<li><a href="#mpandservices">Using a Service with MediaPlayer</a>
+   <ol>
+      <li><a href="#asyncprepare">Running asynchronously</a></li>
+      <li><a href="#asyncerror">Handling asynchronous errors</a></li>
+      <li><a href="#wakelocks">Using wake locks</a></li>
+      <li><a href="#foregroundserv">Running as a foreground service</a></li>
+      <li><a href="#audiofocus">Handling audio focus</a></li>
+      <li><a href="#cleanup">Performing cleanup</a></li>
+   </ol>
+</li>
+<li><a href="#noisyintent">Handling the AUDIO_BECOMING_NOISY Intent</a>
+<li><a href="#viacontentresolver">Retrieving Media from a Content Resolver</a>
+<li><a href="#jetcontent">Playing JET content</a>
+<li><a href="#audiocapture">Performing Audio Capture</a>
 </ol>
 
 <h2>Key classes</h2>
 <ol>
-<li>{@link android.media.MediaPlayer MediaPlayer}</li>
-<li>{@link android.media.MediaRecorder MediaRecorder}</li>
-<li>{@link android.media.JetPlayer JetPlayer}</li>
-<li>{@link android.media.SoundPool SoundPool}</li>
+<li>{@link android.media.MediaPlayer}</li>
+<li>{@link android.media.MediaRecorder}</li>
+<li>{@link android.media.AudioManager}</li>
+<li>{@link android.media.JetPlayer}</li>
+<li>{@link android.media.SoundPool}</li>
 </ol>
 
 <h2>See also</h2>
@@ -41,129 +55,729 @@
 </div>
 </div>
 
-<p>The Android platform offers built-in encoding/decoding for a variety of
-common media types, so that you can easily integrate audio, video, and images into your
-applications. Accessing the platform's media capabilities is fairly straightforward 
-&mdash; you do so using the same intents and activities mechanism that the rest of
-Android uses.</p>
+<p>The Android multimedia framework includes support for encoding and decoding a
+variety of common media types, so that you can easily integrate audio,
+video and images into your applications. You can play audio or video from media files stored in your 
+application's resources (raw resources), from standalone files in the filesystem, or from a data
+stream arriving over a network connection, all using {@link android.media.MediaPlayer} APIs.</p>
 
-<p>Android lets you play audio and video from several types of data sources. You
-can play audio or video from media files stored in the application's resources
-(raw resources), from standalone files in the filesystem, or from a data stream
-arriving over a network connection. To play audio or video from your
-application, use the {@link android.media.MediaPlayer} class.</p>
+<p>You can also record audio and video using the {@link android.media.MediaRecorder} APIs if
+supported by the device hardware. Note that the emulator doesn't have hardware to capture audio or
+video, but actual mobile devices are likely to provide these capabilities.</p>
 
-<p>The platform also lets you record audio and video, where supported by the
-mobile device hardware. To record audio or video, use the {@link
-android.media.MediaRecorder} class. Note that the emulator doesn't have hardware
-to capture audio or video, but actual mobile devices are likely to provide these
-capabilities, accessible through the MediaRecorder class. </p>
+<p>This document shows you how to write a media-playing application that interacts with the user and
+the system in order to obtain good performance and a pleasant user experience.</p>
 
-<p>For a list of media formats for which Android offers built-in support,
-see the <a href="{@docRoot}guide/appendix/media-formats.html">Android Media
-Formats</a> appendix. </p>
+<p class="note"><strong>Note:</strong> You can play back the audio data only to the standard output
+device. Currently, that is the mobile device speaker or a Bluetooth headset. You cannot play sound
+files in the conversation audio during a call.</p>
 
-<h2 id="playback">Audio and Video Playback</h2>
-<p>Media can be played from anywhere: from a raw resource, from a file from the system, 
-or from an available network (URL).</p>
-  
-<p>You can play back the audio data only to the standard
-output device; currently, that is the mobile device speaker or Bluetooth headset. You
-cannot play sound files in the conversation audio. </p>
 
-<h3 id="playraw">Playing from a Raw Resource</h3>
-<p>Perhaps the most common thing to want to do is play back media (notably sound)
-within your own applications. Doing this is easy:</p>
-<ol>
-  <li>Put the sound (or other media resource) file into the <code>res/raw</code>
-  folder of your project, where the Eclipse plugin (or aapt) will find it and
-  make it into a resource that can be referenced from your R class</li>
-  <li>Create an instance of <code>MediaPlayer</code>, referencing that resource using
-  {@link android.media.MediaPlayer#create MediaPlayer.create}, and then call
-  {@link android.media.MediaPlayer#start() start()} on the instance:</li>
-</ol>
-<pre>
-    MediaPlayer mp = MediaPlayer.create(context, R.raw.sound_file_1);
-    mp.start();
+<h2 id="mediaplayer">Using MediaPlayer</h2>
+
+<p>One of the most important components of the media framework is the
+{@link android.media.MediaPlayer MediaPlayer}
+class. An object of this class can fetch, decode, and play both audio and video
+with minimal setup. It supports several different media sources such as:
+<ul>
+   <li>Local resources</li>
+   <li>Internal URIs, such as one you might obtain from a Content Resolver</li>
+   <li>External URLs (streaming)</li>
+</ul>
+</p>
+
+<p>For a list of media formats that Android supports,
+see the <a href="{@docRoot}guide/appendix/media-formats.html">Android Supported Media
+Formats</a> document. </p>
+
+<p>Here is an example
+of how to play audio that's available as a local raw resource (saved in your application's
+{@code res/raw/} directory):</p>
+
+<pre>MediaPlayer mediaPlayer = MediaPlayer.create(context, R.raw.sound_file_1);
+mediaPlayer.start(); // no need to call prepare(); create() does that for you
 </pre>
-<p>To stop playback, call {@link android.media.MediaPlayer#stop() stop()}. If 
-you wish to later replay the media, then you must 
-{@link android.media.MediaPlayer#reset() reset()} and
-{@link android.media.MediaPlayer#prepare() prepare()} the MediaPlayer object
-before calling {@link android.media.MediaPlayer#start() start()} again. 
-(<code>create()</code> calls <code>prepare()</code> the first time.)</p>
-<p>To pause playback, call {@link android.media.MediaPlayer#pause() pause()}. 
-Resume playback from where you paused with 
-{@link android.media.MediaPlayer#start() start()}.</p>
 
-<h3 id="playfile">Playing from a File or Stream</h3>
-<p>You can play back media files from the filesystem or a web URL:</p>
-<ol>
-  <li>Create an instance of the <code>MediaPlayer</code> using <code>new</code></li>
-  <li>Call {@link android.media.MediaPlayer#setDataSource setDataSource()}
-    with a String containing the path (local filesystem or URL)
-    to the file you want to play</li>
-  <li>First {@link android.media.MediaPlayer#prepare prepare()} then
-  {@link android.media.MediaPlayer#start() start()} on the instance:</li>
-</ol>
-<pre>
-    MediaPlayer mp = new MediaPlayer();
-    mp.setDataSource(PATH_TO_FILE);
-    mp.prepare();
-    mp.start();
-</pre>
-<p>{@link android.media.MediaPlayer#stop() stop()} and 
-{@link android.media.MediaPlayer#pause() pause()} work the same as discussed
-above.</p>
-  <p class="note"><strong>Note:</strong>
-  <code>IllegalArgumentException</code> and <code>IOException</code> either
-  need to be caught or passed on when using <code>setDataSource()</code>, since
-  the file you are referencing may not exist.</p>
+<p>In this case, a "raw" resource is a file that the system does not
+try to parse in any particular way. However, the content of this resource should not
+be raw audio. It should be a properly encoded and formatted media file in one 
+of the supported formats.</p>
+
+<p>And here is how you might play from a URI available locally in the system
+(that you obtained through a Content Resolver, for instance):</p>
+
+<pre>Uri myUri = ....; // initialize Uri here
+MediaPlayer mediaPlayer = new MediaPlayer();
+mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+mediaPlayer.setDataSource(getApplicationContext(), myUri);
+mediaPlayer.prepare();
+mediaPlayer.start();</pre>
+
+<p>Playing from a remote URL via HTTP streaming looks like this:</p>
+
+<pre>String url = "http://........"; // your URL here
+MediaPlayer mediaPlayer = new MediaPlayer();
+mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+mediaPlayer.setDataSource(url);
+mediaPlayer.prepare(); // might take long! (for buffering, etc)
+mediaPlayer.start();</pre>
+
 <p class="note"><strong>Note:</strong>
-If you're passing a URL to an online media file, the file must be capable of 
+If you're passing a URL to stream an online media file, the file must be capable of
 progressive download.</p>
 
-<h3 id="jet">Playing JET content</h3>
-<p>The Android platform includes a JET engine that lets you add interactive playback of JET audio content in your applications. You can create JET content for interactive playback using the JetCreator authoring application that ships with the SDK. To play and manage JET content from your application, use the {@link android.media.JetPlayer JetPlayer} class.</p>
+<p class="caution"><strong>Caution:</strong> You must either catch or pass
+{@link java.lang.IllegalArgumentException} and {@link java.io.IOException} when using
+{@link android.media.MediaPlayer#setDataSource setDataSource()}, because
+the file you are referencing might not exist.</p>
 
-<p>For a description of JET concepts and instructions on how to use the JetCreator authoring tool, see the <a href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User Manual</a>. The tool is available fully-featured on the OS X and Windows platforms and the Linux version supports all the content creation features, but not the auditioning of the imported assets. </p>
+<h3 id='#preparingasync'>Asynchronous Preparation</h3>
 
-<p>Here's an example of how to set up JET playback from a .jet file stored on the SD card:</p>
+<p>Using {@link android.media.MediaPlayer MediaPlayer} can be straightforward in
+principle. However, it's important to keep in mind that a few more things are
+necessary to integrate it correctly with a typical Android application. For
+example, the call to {@link android.media.MediaPlayer#prepare prepare()} can
+take a long time to execute, because
+it might involve fetching and decoding media data. So, as is the case with any
+method that may take long to execute, you should <strong>never call it from your
+application's UI thread</strong>. Doing that will cause the UI to hang until the method returns,
+which is a very bad user experience and can cause an ANR (Application Not Responding) error. Even if
+you expect your resource to load quickly, remember that anything that takes more than a tenth
+of a second to respond in the UI will cause a noticeable pause and will give
+the user the impression that your application is slow.</p>
+
+<p>To avoid hanging your UI thread, spawn another thread to
+prepare the {@link android.media.MediaPlayer} and notify the main thread when done. However, while
+you could write the threading logic
+yourself, this pattern is so common when using {@link android.media.MediaPlayer} that the framework
+supplies a convenient way to accomplish this task by using the
+{@link android.media.MediaPlayer#prepareAsync prepareAsync()} method. This method
+starts preparing the media in the background and returns immediately. When the media
+is done preparing, the {@link android.media.MediaPlayer.OnPreparedListener#onPrepared onPrepared()}
+method of the {@link android.media.MediaPlayer.OnPreparedListener
+MediaPlayer.OnPreparedListener}, configured through
+{@link android.media.MediaPlayer#setOnPreparedListener setOnPreparedListener()} is called.</p>
+
+<h3 id='#managestate'>Managing State</h3>
+
+<p>Another aspect of a {@link android.media.MediaPlayer} that you should keep in mind is
+that it's state-based. That is, the {@link android.media.MediaPlayer} has an internal state
+that you must always be aware of when writing your code, because certain operations
+are only valid when then player is in specific states. If you perform an operation while in the
+wrong state, the system may throw an exception or cause other undesireable behaviors.</p>
+
+<p>The documentation in the
+{@link android.media.MediaPlayer MediaPlayer} class shows a complete state diagram,
+that clarifies which methods move the {@link android.media.MediaPlayer} from one state to another.
+For example, when you create a new {@link android.media.MediaPlayer}, it is in the <em>Idle</em>
+state. At that point, you should initialize it by calling
+{@link android.media.MediaPlayer#setDataSource setDataSource()}, bringing it
+to the <em>Initialized</em> state. After that, you have to prepare it using either the
+{@link android.media.MediaPlayer#prepare prepare()} or
+{@link android.media.MediaPlayer#prepareAsync prepareAsync()} method. When
+the {@link android.media.MediaPlayer} is done preparing, it will then enter the <em>Prepared</em>
+state, which means you can call {@link android.media.MediaPlayer#start start()}
+to make it play the media. At that point, as the diagram illustrates,
+you can move between the <em>Started</em>, <em>Paused</em> and <em>PlaybackCompleted</em> states by
+calling such methods as
+{@link android.media.MediaPlayer#start start()},
+{@link android.media.MediaPlayer#pause pause()}, and
+{@link android.media.MediaPlayer#seekTo seekTo()},
+amongst others. When you
+call {@link android.media.MediaPlayer#stop stop()}, however, notice that you
+cannot call {@link android.media.MediaPlayer#start start()} again until you
+prepare the {@link android.media.MediaPlayer} again.</p>
+
+<p>Always keep <a href='{@docRoot}images/mediaplayer_state_diagram.gif'>the state diagram</a> 
+in mind when writing code that interacts with a
+{@link android.media.MediaPlayer} object, because calling its methods from the wrong state is a
+common cause of bugs.</p>
+
+<h3 id='#releaseplayer'>Releasing the MediaPlayer</h3>
+
+<p>A {@link android.media.MediaPlayer MediaPlayer} can consume valuable
+system resources.
+Therefore, you should always take extra precautions to make sure you are not
+hanging on to a {@link android.media.MediaPlayer} instance longer than necessary. When you
+are done with it, you should always call
+{@link android.media.MediaPlayer#release release()} to make sure any
+system resources allocated to it are properly released. For example, if you are
+using a {@link android.media.MediaPlayer} and your activity receives a call to {@link
+android.app.Activity#onStop onStop()}, you must release the {@link android.media.MediaPlayer},
+because it
+makes little sense to hold on to it while your activity is not interacting with
+the user (unless you are playing media in the background, which is discussed in the next section).
+When your activity is resumed or restarted, of course, you need to
+create a new {@link android.media.MediaPlayer} and prepare it again before resuming playback.</p>
+
+<p>Here's how you should release and then nullify your {@link android.media.MediaPlayer}:</p>
+<pre>
+mediaPlayer.release();
+mediaPlayer = null;
+</pre>
+
+<p>As an example, consider the problems that could happen if you
+forgot to release the {@link android.media.MediaPlayer} when your activity is stopped, but create a
+new one when the activity starts again. As you may know, when the user changes the
+screen orientation (or changes the device configuration in another way), 
+the system handles that by restarting the activity (by default), so you might quickly
+consume all of the system resources as the user
+rotates the device back and forth between portrait and landscape, because at each
+orientation change, you create a new {@link android.media.MediaPlayer} that you never
+release. (For more information about runtime restarts, see <a
+href="{@docRoot}guide/topics/resources/runtime-changes.html">Handling Runtime Changes</a>.)</p>
+
+<p>You may be wondering what happens if you want to continue playing
+"background media" even when the user leaves your activity, much in the same
+way that the built-in Music application behaves. In this case, what you need is
+a {@link android.media.MediaPlayer MediaPlayer} controlled by a {@link android.app.Service}, as
+discussed in <a href="mpandservices">Using a Service with MediaPlayer</a>.</p>
+
+<h2 id="mpandservices">Using a Service with MediaPlayer</h2>
+
+<p>If you want your media to play in the background even when your application
+is not onscreen&mdash;that is, you want it to continue playing while the user is
+interacting with other applications&mdash;then you must start a
+{@link android.app.Service Service} and control the
+{@link android.media.MediaPlayer MediaPlayer} instance from there.
+You should be careful about this setup, because the user and the system have expectations
+about how an application running a background service should interact with the rest of the
+system. If your application does not fulfil those expectations, the user may
+have a bad experience. This section describes the main issues that you should be
+aware of and offers suggestions about how to approach them.</p>
+
+
+<h3 id="asyncprepare">Running asynchronously</h3>
+
+<p>First of all, like an {@link android.app.Activity Activity}, all work in a
+{@link android.app.Service Service} is done in a single thread by
+default&mdash;in fact, if you're running an activity and a service from the same application, they
+use the same thread (the "main thread") by default. Therefore, services need to
+process incoming intents quickly
+and never perform lengthy computations when responding to them. If any heavy
+work or blocking calls are expected, you must do those tasks asynchronously: either from
+another thread you implement yourself, or using the framework's many facilities
+for asynchronous processing.</p>
+
+<p>For instance, when using a {@link android.media.MediaPlayer} from your main thread,
+you should call {@link android.media.MediaPlayer#prepareAsync prepareAsync()} rather than
+{@link android.media.MediaPlayer#prepare prepare()}, and implement
+a {@link android.media.MediaPlayer.OnPreparedListener MediaPlayer.OnPreparedListener}
+in order to be notified when the preparation is complete and you can start playing.
+For example:</p>
 
 <pre>
-JetPlayer myJet = JetPlayer.getJetPlayer();
-myJet.loadJetFile("/sdcard/level1.jet");
+public class MyService extends Service implements MediaPlayer.OnPreparedListener {
+    private static final ACTION_PLAY = "com.example.action.PLAY";
+    MediaPlayer mMediaPlayer = null;
+
+    public int onStartCommand(Intent intent, int flags, int startId) {
+        ...
+        if (intent.getAction().equals(ACTION_PLAY)) {
+            mMediaPlayer = ... // initialize it here
+            mMediaPlayer.setOnPreparedListener(this);
+            mMediaPlayer.prepareAsync(); // prepare async to not block main thread
+        }
+    }
+
+    /** Called when MediaPlayer is ready */
+    public void onPrepared(MediaPlayer player) {
+        player.start();
+    }
+}
+</pre>
+
+
+<h3 id="asyncerror">Handling asynchronous errors</h3>
+
+<p>On synchronous operations, errors would normally
+be signaled with an exception or an error code, but whenever you use asynchronous
+resources, you should make sure your application is notified
+of errors appropriately. In the case of a {@link android.media.MediaPlayer MediaPlayer},
+you can accomplish this by implementing a
+{@link android.media.MediaPlayer.OnErrorListener MediaPlayer.OnErrorListener} and
+setting it in your {@link android.media.MediaPlayer} instance:</p>
+
+<pre>
+public class MyService extends Service implements MediaPlayer.OnErrorListener {
+    MediaPlayer mMediaPlayer;
+
+    public void initMediaPlayer() {
+        // ...initialize the MediaPlayer here...
+
+        mMediaPlayer.setOnErrorListener(this);
+    }
+
+    &#64;Override
+    public boolean onError(MediaPlayer mp, int what, int extra) {
+        // ... react appropriately ...
+        // The MediaPlayer has moved to the Error state, must be reset!
+    }
+}
+</pre>
+
+<p>It's important to remember that when an error occurs, the {@link android.media.MediaPlayer}
+moves to the <em>Error</em> state (see the documentation for the
+{@link android.media.MediaPlayer MediaPlayer} class for the full state diagram)
+and you must reset it before you can use it again.
+
+
+<h3 id="wakelocks">Using wake locks</h3>
+
+<p>When designing applications that play media
+in the background, the device may go to sleep
+while your service is running. Because the Android system tries to conserve
+battery while the device is sleeping, the system tries to shut off any 
+of the phone's features that are
+not necessary, including the CPU and the WiFi hardware.
+However, if your service is playing or streaming music, you want to prevent
+the system from interfering with your playback.</p>
+
+<p>In order to ensure that your service continues to run under
+those conditions, you have to use "wake locks." A wake lock is a way to signal to
+the system that your application is using some feature that should
+stay available even if the phone is idle.</p>
+
+<p class="caution"><strong>Notice:</strong> You should always use wake locks sparingly and hold them
+only for as long as truly necessary, because they significantly reduce the battery life of the
+device.</p>
+
+<p>To ensure that the CPU continues running while your {@link android.media.MediaPlayer} is
+playing, call the {@link android.media.MediaPlayer#setWakeMode
+setWakeMode()} method when initializing your {@link android.media.MediaPlayer}. Once you do,
+the {@link android.media.MediaPlayer} holds the specified lock while playing and releases the lock
+when paused or stopped:</p>
+
+<pre>
+mMediaPlayer = new MediaPlayer();
+// ... other initialization here ...
+mMediaPlayer.setWakeMode(getApplicationContext(), PowerManager.PARTIAL_WAKE_LOCK);
+</pre>
+
+<p>However, the wake lock acquired in this example guarantees only that the CPU remains awake. If
+you are streaming media over the
+network and you are using Wi-Fi, you probably want to hold a
+{@link android.net.wifi.WifiManager.WifiLock WifiLock} as
+well, which you must acquire and release manually. So, when you start preparing the
+{@link android.media.MediaPlayer} with the remote URL, you should create and acquire the Wi-Fi lock.
+For example:</p>
+
+<pre>
+WifiLock wifiLock = ((WifiManager) getSystemService(Context.WIFI_SERVICE))
+    .createWifiLock(WifiManager.WIFI_MODE_FULL, "mylock");
+
+wifiLock.acquire();
+</pre>
+
+<p>When you pause or stop your media, or when you no longer need the
+network, you should release the lock:</p>
+
+<pre>
+wifiLock.release();
+</pre>
+
+
+<h3 id="foregroundserv">Running as a foreground service</h3>
+
+<p>Services are often used for performing background tasks, such as fetching emails,
+synchronizing data, downloading content, amongst other possibilities. In these
+cases, the user is not actively aware of the service's execution, and probably
+wouldn't even notice if some of these services were interrupted and later restarted.</p>
+
+<p>But consider the case of a service that is playing music. Clearly this is a service that the user
+is actively aware of and the experience would be severely affected by any interruptions.
+Additionally, it's a service that the user will likely wish to interact with during its execution.
+In this case, the service should run as a "foreground service." A
+foreground service holds a higher level of importance within the system&mdash;the system will
+almost never kill the service, because it is of immediate importance to the user. When running
+in the foreground, the service also must provide a status bar notification to ensure that users are
+aware of the running service and allow them to open an activity that can interact with the
+service.</p>
+
+<p>In order to turn your service into a foreground service, you must create a
+{@link android.app.Notification Notification} for the status bar and call
+{@link android.app.Service#startForeground startForeground()} from the {@link
+android.app.Service}. For example:</p>
+
+<pre>String songName;
+// assign the song name to songName
+PendingIntent pi = PendingIntent.getActivity(getApplicationContext(), 0,
+                new Intent(getApplicationContext(), MainActivity.class),
+                PendingIntent.FLAG_UPDATE_CURRENT);
+Notification notification = new Notification();
+notification.tickerText = text;
+notification.icon = R.drawable.play0;
+notification.flags |= Notification.FLAG_ONGOING_EVENT;
+notification.setLatestEventInfo(getApplicationContext(), "MusicPlayerSample",
+                "Playing: " + songName, pi);
+startForeground(NOTIFICATION_ID, notification);
+</pre>
+
+<p>While your service is running in the foreground, the notification you
+configured is visible in the notification area of the device. If the user
+selects the notification, the system invokes the {@link android.app.PendingIntent} you supplied. In
+the example above, it opens an activity ({@code MainActivity}).</p>
+
+<p>Figure 1 shows how your notification appears to the user:</p>
+
+<img src='images/notification1.png' />
+&nbsp;&nbsp;
+<img src='images/notification2.png' />
+<p class="img-caption"><strong>Figure 1.</strong> Screenshots of a foreground service's notification, showing the notification icon in the status bar (left) and the expanded view (right).</p>
+
+<p>You should only hold on to the "foreground service" status while your
+service is actually performing something the user is actively aware of. Once
+that is no longer true, you should release it by calling
+{@link android.app.Service#stopForeground stopForeground()}:</p>
+
+<pre>
+stopForeground(true);
+</pre>
+
+<p>For more information, see the documentation about <a
+href="{@docRoot}guide/topics/fundamentals/services.html#Foreground">Services</a> and
+<a href="{@docRoot}guide/topics/ui/notifiers/notifications.html">Status Bar Notifications</a>.</p>
+
+
+<h3 id="audiofocus">Handling audio focus</h3>
+
+<p>Even though only one activity can run at any given time, Android is a
+multi-tasking environment. This poses a particular challenge to applications
+that use audio, because there is only one audio output and there may be several
+media services competing for its use. Before Android 2.2, there was no built-in
+mechanism to address this issue, which could in some cases lead to a bad user
+experience. For example, when a user is listening to
+music and another application needs to notify the user of something very important,
+the user might not hear the notification tone due to the loud music. Starting with
+Android 2.2, the platform offers a way for applications to negotiate their
+use of the device's audio output. This mechanism is called Audio Focus.</p>
+
+<p>When your application needs to output audio such as music or a notification, 
+you should always request audio focus. Once it has focus, it can use the sound output freely, but it should
+always listen for focus changes. If it is notified that it has lost the audio
+focus, it should immediately either kill the audio or lower it to a quiet level
+(known as "ducking"&mdash;there is a flag that indicates which one is appropriate) and only resume
+loud playback after it receives focus again.</p>
+
+<p>Audio Focus is cooperative in nature. That is, applications are expected
+(and highly encouraged) to comply with the audio focus guidelines, but the
+rules are not enforced by the system. If an application wants to play loud
+music even after losing audio focus, nothing in the system will prevent that.
+However, the user is more likely to have a bad experience and will be more
+likely to uninstall the misbehaving application.</p>
+
+<p>To request audio focus, you must call
+{@link android.media.AudioManager#requestAudioFocus requestAudioFocus()} from the {@link
+android.media.AudioManager}, as the example below demonstrates:</p>
+
+<pre>
+AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+int result = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC,
+    AudioManager.AUDIOFOCUS_GAIN);
+
+if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+    // could not get audio focus.
+}
+</pre>
+
+<p>The first parameter to {@link android.media.AudioManager#requestAudioFocus requestAudioFocus()}
+is an {@link android.media.AudioManager.OnAudioFocusChangeListener
+AudioManager.OnAudioFocusChangeListener},
+whose {@link android.media.AudioManager.OnAudioFocusChangeListener#onAudioFocusChange
+onAudioFocusChange()} method is called whenever there is a change in audio focus. Therefore, you
+should also implement this interface on your service and activities. For example:</p>
+
+<pre>
+class MyService extends Service
+                implements AudioManager.OnAudioFocusChangeListener {
+    // ....
+    public void onAudioFocusChange(int focusChange) {
+        // Do something based on focus change...
+    }
+}
+</pre>
+
+<p>The <code>focusChange</code> parameter tells you how the audio focus has changed, and
+can be one of the following values (they are all constants defined in
+{@link android.media.AudioManager AudioManager}):</p>
+
+<ul>
+<li>{@link android.media.AudioManager#AUDIOFOCUS_GAIN}: You have gained the audio focus.</li>
+
+<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS}: You have lost the audio focus for a
+presumably long time.
+You must stop all audio playback. Because you should expect not to have focus back
+for a long time, this would be a good place to clean up your resources as much
+as possible. For example, you should release the {@link android.media.MediaPlayer}.</li>
+
+<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT}: You have
+temporarily lost audio focus, but should receive it back shortly. You must stop
+all audio playback, but you can keep your resources because you will probably get
+focus back shortly.</li>
+
+<li>{@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}: You have temporarily
+lost audio focus,
+but you are allowed to continue to play audio quietly (at a low volume) instead
+of killing audio completely.</li>
+</ul>
+
+<p>Here is an example implementation:</p>
+
+<pre>
+public void onAudioFocusChange(int focusChange) {
+    switch (focusChange) {
+        case AudioManager.AUDIOFOCUS_GAIN:
+            // resume playback
+            if (mMediaPlayer == null) initMediaPlayer();
+            else if (!mMediaPlayer.isPlaying()) mMediaPlayer.start();
+            mMediaPlayer.setVolume(1.0f, 1.0f);
+            break;
+
+        case AudioManager.AUDIOFOCUS_LOSS:
+            // Lost focus for an unbounded amount of time: stop playback and release media player
+            if (mMediaPlayer.isPlaying()) mMediaPlayer.stop();
+            mMediaPlayer.release();
+            mMediaPlayer = null;
+            break;
+
+        case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
+            // Lost focus for a short time, but we have to stop
+            // playback. We don't release the media player because playback
+            // is likely to resume
+            if (mMediaPlayer.isPlaying()) mMediaPlayer.pause();
+            break;
+
+        case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
+            // Lost focus for a short time, but it's ok to keep playing
+            // at an attenuated level
+            if (mMediaPlayer.isPlaying()) mMediaPlayer.setVolume(0.1f, 0.1f);
+            break;
+    }
+}
+</pre>
+
+<p>Keep in mind that the audio focus APIs are available only with API level 8 (Android 2.2)
+and above, so if you want to support previous
+versions of Android, you should adopt a backward compatibility strategy that
+allows you to use this feature if available, and fall back seamlessly if not.</p>
+
+<p>You can achieve backward compatibility either by calling the audio focus methods by reflection
+or by implementing all the audio focus features in a separate class (say,
+<code>AudioFocusHelper</code>). Here is an example of such a class:</p>
+
+<pre>
+public class AudioFocusHelper implements AudioManager.OnAudioFocusChangeListener {
+    AudioManager mAudioManager;
+
+    // other fields here, you'll probably hold a reference to an interface
+    // that you can use to communicate the focus changes to your Service
+
+    public AudioFocusHelper(Context ctx, /* other arguments here */) {
+        mAudioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
+        // ...
+    }
+
+    public boolean requestFocus() {
+        return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
+            mAudioManager.requestAudioFocus(mContext, AudioManager.STREAM_MUSIC,
+            AudioManager.AUDIOFOCUS_GAIN);
+    }
+
+    public boolean abandonFocus() {
+        return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
+            mAudioManager.abandonAudioFocus(this);
+    }
+
+    &#64;Override
+    public void onAudioFocusChange(int focusChange) {
+        // let your service know about the focus change
+    }
+}
+</pre>
+
+
+<p>You can create an instance of <code>AudioFocusHelper</code> class only if you detect that
+the system is running API level 8 or above. For example:</p>
+
+<pre>
+if (android.os.Build.VERSION.SDK_INT &gt;= 8) {
+    mAudioFocusHelper = new AudioFocusHelper(getApplicationContext(), this);
+} else {
+    mAudioFocusHelper = null;
+}
+</pre>
+
+
+<h3 id="cleanup">Performing cleanup</h3>
+
+<p>As mentioned earlier, a {@link android.media.MediaPlayer} object can consume a significant
+amount of system resources, so you should keep it only for as long as you need and call
+{@link android.media.MediaPlayer#release release()} when you are done with it. It's important
+to call this cleanup method explicitly rather than rely on system garbage collection because
+it might take some time before the garbage collector reclaims the {@link android.media.MediaPlayer},
+as it's only sensitive to memory needs and not to shortage of other media-related resources.
+So, in the case when you're using a service, you should always override the
+{@link android.app.Service#onDestroy onDestroy()} method to make sure you are releasing
+the {@link android.media.MediaPlayer}:</p>
+
+<pre>
+public class MyService extends Service {
+   MediaPlayer mMediaPlayer;
+   // ...
+
+   &#64;Override
+   public void onDestroy() {
+       if (mMediaPlayer != null) mMediaPlayer.release();
+   }
+}
+</pre>
+
+<p>You should always look for other opportunities to release your {@link android.media.MediaPlayer}
+as well, apart from releasing it when being shut down. For example, if you expect not
+to be able to play media for an extended period of time (after losing audio focus, for example),
+you should definitely release your existing {@link android.media.MediaPlayer} and create it again
+later. On the
+other hand, if you only expect to stop playback for a very short time, you should probably
+hold on to your {@link android.media.MediaPlayer} to avoid the overhead of creating and preparing it
+again.</p>
+
+
+
+<h2 id="noisyintent">Handling the AUDIO_BECOMING_NOISY Intent</h2>
+
+<p>Many well-written applications that play audio automatically stop playback when an event
+occurs that causes the audio to become noisy (ouput through external speakers). For instance,
+this might happen when a user is listening to music through headphones and accidentally
+disconnects the headphones from the device. However, this behavior does not happen automatically.
+If you don't implement this feature, audio plays out of the device's external speakers, which
+might not be what the user wants.</p>
+
+<p>You can ensure your app stops playing music in these situations by handling
+the {@link android.media.AudioManager#ACTION_AUDIO_BECOMING_NOISY} intent, for which you can register a receiver by
+adding the following to your manifest:</p>
+
+<pre>
+&lt;receiver android:name=".MusicIntentReceiver"&gt;
+   &lt;intent-filter&gt;
+      &lt;action android:name="android.media.AUDIO_BECOMING_NOISY" /&gt;
+   &lt;/intent-filter&gt;
+&lt;/receiver&gt;
+</pre>
+
+<p>This registers the <code>MusicIntentReceiver</code> class as a broadcast receiver for that
+intent. You should then implement this class:</p>
+
+<pre>
+public class MusicIntentReceiver implements android.content.BroadcastReceiver {
+   &#64;Override
+   public void onReceive(Context ctx, Intent intent) {
+      if (intent.getAction().equals(
+                    android.media.AudioManager.ACTION_AUDIO_BECOMING_NOISY)) {
+          // signal your service to stop playback
+          // (via an Intent, for instance)
+      }
+   }
+}
+</pre>
+
+
+
+
+<h2 id="viacontentresolver">Retrieving Media from a Content Resolver</h2>
+
+<p>Another feature that may be useful in a media player application is the ability to
+retrieve music that the user has on the device. You can do that by querying the {@link
+android.content.ContentResolver} for external media:</p>
+
+<pre>
+ContentResolver contentResolver = getContentResolver();
+Uri uri = android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
+Cursor cursor = contentResolver.query(uri, null, null, null, null);
+if (cursor == null) {
+    // query failed, handle error.
+} else if (!cursor.moveToFirst()) {
+    // no media on the device
+} else {
+    int titleColumn = cursor.getColumnIndex(android.provider.MediaStore.Audio.Media.TITLE);
+    int idColumn = cursor.getColumnIndex(android.provider.MediaStore.Audio.Media._ID);
+    do {
+       long thisId = cursor.getLong(idColumn);
+       String thisTitle = cursor.getString(titleColumn);
+       // ...process entry...
+    } while (cursor.moveToNext());
+}
+</pre>
+
+<p>To use this with the {@link android.media.MediaPlayer}, you can do this:</p>
+
+<pre>
+long id = /* retrieve it from somewhere */;
+Uri contentUri = ContentUris.withAppendedId(
+        android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, id);
+
+mMediaPlayer = new MediaPlayer();
+mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+mMediaPlayer.setDataSource(getApplicationContext(), contentUri);
+
+// ...prepare and start...
+</pre>
+
+
+
+<h2 id="jetcontent">Playing JET content</h2>
+
+<p>The Android platform includes a JET engine that lets you add interactive playback of JET audio
+content in your applications. You can create JET content for interactive playback using the
+JetCreator authoring application that ships with the SDK. To play and manage JET content from your
+application, use the {@link android.media.JetPlayer JetPlayer} class.</p>
+
+<p>For a description of JET concepts and instructions on how to use the JetCreator authoring tool,
+see the <a href="{@docRoot}guide/topics/media/jet/jetcreator_manual.html">JetCreator User
+Manual</a>. The tool is available on Windows, OS X, and Linux platforms (Linux does not
+support auditioning of imported assets like with the Windows and OS X versions).
+</p>
+
+<p>Here's an example of how to set up JET playback from a <code>.jet</code> file stored on the SD card:</p>
+
+<pre>
+JetPlayer jetPlayer = JetPlayer.getJetPlayer();
+jetPlayer.loadJetFile("/sdcard/level1.jet");
 byte segmentId = 0;
 
 // queue segment 5, repeat once, use General MIDI, transpose by -1 octave
-myJet.queueJetSegment(5, -1, 1, -1, 0, segmentId++);
+jetPlayer.queueJetSegment(5, -1, 1, -1, 0, segmentId++);
 // queue segment 2
-myJet.queueJetSegment(2, -1, 0, 0, 0, segmentId++);
+jetPlayer.queueJetSegment(2, -1, 0, 0, 0, segmentId++);
 
-myJet.play();
+jetPlayer.play();
 </pre>
 
-<p>The SDK includes an example application &mdash; JetBoy &mdash; that shows how to use {@link android.media.JetPlayer JetPlayer} to create an interactive music soundtrack in your game. It also illustrates how to use JET events to synchronize music and game logic. The application is located at <code>&lt;sdk&gt;/platforms/android-1.5/samples/JetBoy</code>.
+<p>The SDK includes an example application &mdash; JetBoy &mdash; that shows how to use {@link
+android.media.JetPlayer JetPlayer} to create an interactive music soundtrack in your game. It also
+illustrates how to use JET events to synchronize music and game logic. The application is located at
+<code>&lt;sdk&gt;/platforms/android-1.5/samples/JetBoy</code>.</p>
 
-<h2 id="capture">Audio Capture</h2>
-<p>Audio capture from the device is a bit more complicated than audio/video playback, but still fairly simple:</p>
+
+<h2 id="audiocapture">Performing Audio Capture</h2>
+
+<p>Audio capture from the device is a bit more complicated than audio and video playback, but still fairly simple:</p>
 <ol>
-  <li>Create a new instance of {@link android.media.MediaRecorder android.media.MediaRecorder} using <code>new</code></li>
+  <li>Create a new instance of {@link android.media.MediaRecorder android.media.MediaRecorder}.</li>
   <li>Set the audio source using
         {@link android.media.MediaRecorder#setAudioSource MediaRecorder.setAudioSource()}. You will probably want to use
-  <code>MediaRecorder.AudioSource.MIC</code></li>
+  <code>MediaRecorder.AudioSource.MIC</code>.</li>
   <li>Set output file format using
-        {@link android.media.MediaRecorder#setOutputFormat MediaRecorder.setOutputFormat()}
+        {@link android.media.MediaRecorder#setOutputFormat MediaRecorder.setOutputFormat()}.
   </li>
   <li>Set output file name using
-        {@link android.media.MediaRecorder#setOutputFile MediaRecorder.setOutputFile()}
+        {@link android.media.MediaRecorder#setOutputFile MediaRecorder.setOutputFile()}.
   </li>
-  <li>Set the audio encoder using 
-        {@link android.media.MediaRecorder#setAudioEncoder MediaRecorder.setAudioEncoder()}
+  <li>Set the audio encoder using
+        {@link android.media.MediaRecorder#setAudioEncoder MediaRecorder.setAudioEncoder()}.
   </li>
   <li>Call {@link android.media.MediaRecorder#prepare MediaRecorder.prepare()}
    on the MediaRecorder instance.</li>
-  <li>To start audio capture, call 
+  <li>To start audio capture, call
   {@link android.media.MediaRecorder#start MediaRecorder.start()}. </li>
   <li>To stop audio capture, call {@link android.media.MediaRecorder#stop MediaRecorder.stop()}.
   <li>When you are done with the MediaRecorder instance, call
@@ -354,3 +968,4 @@
 </pre>
 
 
+
diff --git a/docs/html/guide/topics/usb/adk.jd b/docs/html/guide/topics/usb/adk.jd
index 0e35637..2e172f5 100644
--- a/docs/html/guide/topics/usb/adk.jd
+++ b/docs/html/guide/topics/usb/adk.jd
@@ -54,6 +54,11 @@
         </li>
       </ol>
 
+      <h2>Download</h2>
+      <ol>
+        <li><a href="https://dl-ssl.google.com/android/adk/adk_release_0512.zip">ADK package</a></li>
+      </ol>
+
 
       <h2>See also</h2>
 
diff --git a/docs/html/guide/topics/wireless/bluetooth.jd b/docs/html/guide/topics/wireless/bluetooth.jd
index 48632ea..a6c46d2 100644
--- a/docs/html/guide/topics/wireless/bluetooth.jd
+++ b/docs/html/guide/topics/wireless/bluetooth.jd
@@ -66,7 +66,7 @@
 
 <h2 id="TheBasics">The Basics</h2>
 
-<p>This document describes how to us the Android Bluetooth APIs to accomplish
+<p>This document describes how to use the Android Bluetooth APIs to accomplish
 the four major tasks necessary to communicate using Bluetooth: setting up
 Bluetooth, finding devices that are either paired or available in the local
 area, connecting devices, and transferring data between devices.</p>
diff --git a/docs/html/resources/resources-data.js b/docs/html/resources/resources-data.js
index 097d004..e8c9ae7 100644
--- a/docs/html/resources/resources-data.js
+++ b/docs/html/resources/resources-data.js
@@ -547,6 +547,16 @@
     }
   },
   {
+    tags: ['sample', 'new', 'media' ],
+    path: 'samples/RandomMusicPlayer/index.html',
+    title: {
+      en: 'Random Music Player'
+    },
+    description: {
+      en: 'Demonstrates how to write a multimedia application that plays music from the device and from URLs. It manages media playback from a service and can play music in the background, respecting audio focus changes.'
+    }
+  },
+  {
     tags: ['sample', 'new', 'newfeature', 'performance', 'gamedev', 'gl'],
     path: 'samples/RenderScript/index.html',
     title: {
diff --git a/docs/html/resources/samples/images/randommusicplayer.png b/docs/html/resources/samples/images/randommusicplayer.png
new file mode 100644
index 0000000..e16e067
--- /dev/null
+++ b/docs/html/resources/samples/images/randommusicplayer.png
Binary files differ
diff --git a/docs/html/search.jd b/docs/html/search.jd
index 609ade9..339ce2d 100644
--- a/docs/html/search.jd
+++ b/docs/html/search.jd
@@ -1,144 +1,140 @@
 page.title=Search Results

 @jd:body

 

-<script src="http://www.google.com/jsapi" type="text/javascript"></script>
+<script src="http://www.google.com/jsapi" type="text/javascript"></script>

 <script src="{@docRoot}assets/jquery-history.js" type="text/javascript"></script>

-<script type="text/javascript">      
-      var tabIndex = 0;
-            
-      google.load('search', '1');
-
-      function OnLoad() {
-        document.getElementById("search_autocomplete").style.color = "#000";
-
-        // create search control
-        searchControl = new google.search.SearchControl();
-
-        // use our existing search form and use tabs when multiple searchers are used
-        drawOptions = new google.search.DrawOptions();
-        drawOptions.setDrawMode(google.search.SearchControl.DRAW_MODE_TABBED);
-        drawOptions.setInput(document.getElementById("search_autocomplete"));
-
-        // configure search result options
-        searchOptions = new google.search.SearcherOptions();
-        searchOptions.setExpandMode(GSearchControl.EXPAND_MODE_OPEN);
-
-        // configure each of the searchers, for each tab
-        devSiteSearcher = new google.search.WebSearch();
-        devSiteSearcher.setUserDefinedLabel("All Developers Site");
-        devSiteSearcher.setSiteRestriction("http://developer.android.com/");
-
-        devGuideSearcher = new google.search.WebSearch();
-        devGuideSearcher.setUserDefinedLabel("Dev Guide");
-        devGuideSearcher.setSiteRestriction("http://developer.android.com/guide/");
-
-        referenceSearcher = new google.search.WebSearch();
-        referenceSearcher.setUserDefinedLabel("Reference");
-        referenceSearcher.setSiteRestriction("http://developer.android.com/reference/");
-
-        blogSearcher = new google.search.WebSearch();
-        blogSearcher.setUserDefinedLabel("Blog");
-        blogSearcher.setSiteRestriction("http://android-developers.blogspot.com");
-
-        groupsSearcher = new google.search.WebSearch();
-        groupsSearcher.setUserDefinedLabel("Developer Groups");
-        groupsSearcher.setSiteRestriction("001283715400630100512:ggqrtvkztwm");
-
-        sourceSiteSearcher = new google.search.WebSearch();
-        sourceSiteSearcher.setUserDefinedLabel("Android Source");
-        sourceSiteSearcher.setSiteRestriction("http://source.android.com");
-
-        homeSiteSearcher = new google.search.WebSearch();
-        homeSiteSearcher.setUserDefinedLabel("Android Home");
-        homeSiteSearcher.setSiteRestriction("http://www.android.com");
- 
-        // add each searcher to the search control
-        searchControl.addSearcher(devSiteSearcher, searchOptions);
-        searchControl.addSearcher(devGuideSearcher, searchOptions);
-        searchControl.addSearcher(referenceSearcher, searchOptions);
-        searchControl.addSearcher(groupsSearcher, searchOptions);
-        searchControl.addSearcher(sourceSiteSearcher, searchOptions);
-        searchControl.addSearcher(blogSearcher, searchOptions);
-
-        // configure result options
-        searchControl.setResultSetSize(google.search.Search.LARGE_RESULTSET);
-        searchControl.setLinkTarget(google.search.Search.LINK_TARGET_SELF);
-        searchControl.setTimeoutInterval(google.search.SearchControl.TIMEOUT_LONG);
-        searchControl.setNoResultsString(google.search.SearchControl.NO_RESULTS_DEFAULT_STRING);
-
-        // upon ajax search, refresh the url and search title
-        searchControl.setSearchStartingCallback(this, function(control, searcher, query) {
-            // save the tab index from the hash
-            tabIndex = location.hash.split("&t=")[1];
-
-            $("#searchTitle").html("search results for <em>" + escapeHTML(query) + "</em>");
-            $.history.add('q=' + query + '&t=' + tabIndex);
-            openTab();
-        });
-
-        // draw the search results box
-        searchControl.draw(document.getElementById("leftSearchControl"), drawOptions);
-
-        // get query and execute the search
-        if (location.hash.indexOf("&t=") != -1) {
-          searchControl.execute(decodeURI(getQuery(location.hash)));
-        }
-        
-        document.getElementById("search_autocomplete").focus();
-        addTabListeners();
-      }
-      // End of OnLoad
-
-
-      google.setOnLoadCallback(OnLoad, true);
-
-      // when an event on the browser history occurs (back, forward, load) perform a search
-      $(window).history(function(e, hash) {
-        var query = decodeURI(getQuery(hash));
-        searchControl.execute(query);
-
-        $("#searchTitle").html("search results for <em>" + escapeHTML(query) + "</em>");
-      });
-
-      // forcefully regain key-up event control (previously jacked by search api)
-      $("#search_autocomplete").keyup(function(event) {
-        return search_changed(event, false, '/');
-      });
-
-      // open a tab, specified by its array position
-      function openTab() {
-        tabIndex = location.hash.split("&t=")[1];
-        
-        // show the appropriate tab
-        var tabHeaders = $(".gsc-tabHeader");
-        $(tabHeaders[tabIndex]).click();
-      }
-      
-      // add event listeners to each tab so we can track the browser history
-      function addTabListeners() {
-        var tabHeaders = $(".gsc-tabHeader");
-        for (var i = 0; i < tabHeaders.length; i++) {
-          $(tabHeaders[i]).attr("id",i).click(function() {
-            var tabHeaders = $(".gsc-tabHeader");
+<script type="text/javascript">      

+      var tabIndex = 0;

+            

+      google.load('search', '1');

+

+      function OnLoad() {

+        document.getElementById("search_autocomplete").style.color = "#000";

+

+        // create search control

+        searchControl = new google.search.SearchControl();

+

+        // use our existing search form and use tabs when multiple searchers are used

+        drawOptions = new google.search.DrawOptions();

+        drawOptions.setDrawMode(google.search.SearchControl.DRAW_MODE_TABBED);

+        drawOptions.setInput(document.getElementById("search_autocomplete"));

+

+        // configure search result options

+        searchOptions = new google.search.SearcherOptions();

+        searchOptions.setExpandMode(GSearchControl.EXPAND_MODE_OPEN);

+

+        // configure each of the searchers, for each tab

+        devSiteSearcher = new google.search.WebSearch();

+        devSiteSearcher.setUserDefinedLabel("All");

+        devSiteSearcher.setSiteRestriction("001482626316274216503:zu90b7s047u");

+

+        devGuideSearcher = new google.search.WebSearch();

+        devGuideSearcher.setUserDefinedLabel("Dev Guide");

+        devGuideSearcher.setSiteRestriction("http://developer.android.com/guide/");

+

+        referenceSearcher = new google.search.WebSearch();

+        referenceSearcher.setUserDefinedLabel("Reference");

+        referenceSearcher.setSiteRestriction("http://developer.android.com/reference/");

+

+        blogSearcher = new google.search.WebSearch();

+        blogSearcher.setUserDefinedLabel("Blog");

+        blogSearcher.setSiteRestriction("http://android-developers.blogspot.com");

+

+        groupsSearcher = new google.search.WebSearch();

+        groupsSearcher.setUserDefinedLabel("Developer Groups");

+        groupsSearcher.setSiteRestriction("001283715400630100512:ggqrtvkztwm");

+

+        sourceSiteSearcher = new google.search.WebSearch();

+        sourceSiteSearcher.setUserDefinedLabel("Android Source");

+        sourceSiteSearcher.setSiteRestriction("http://source.android.com");

+ 

+        // add each searcher to the search control

+        searchControl.addSearcher(devSiteSearcher, searchOptions);

+        searchControl.addSearcher(devGuideSearcher, searchOptions);

+        searchControl.addSearcher(referenceSearcher, searchOptions);

+        searchControl.addSearcher(groupsSearcher, searchOptions);

+        searchControl.addSearcher(sourceSiteSearcher, searchOptions);

+        searchControl.addSearcher(blogSearcher, searchOptions);

+

+        // configure result options

+        searchControl.setResultSetSize(google.search.Search.LARGE_RESULTSET);

+        searchControl.setLinkTarget(google.search.Search.LINK_TARGET_SELF);

+        searchControl.setTimeoutInterval(google.search.SearchControl.TIMEOUT_LONG);

+        searchControl.setNoResultsString(google.search.SearchControl.NO_RESULTS_DEFAULT_STRING);

+

+        // upon ajax search, refresh the url and search title

+        searchControl.setSearchStartingCallback(this, function(control, searcher, query) {

+            // save the tab index from the hash

+            tabIndex = location.hash.split("&t=")[1];

+

+            $("#searchTitle").html("search results for <em>" + escapeHTML(query) + "</em>");

+            $.history.add('q=' + query + '&t=' + tabIndex);

+            openTab();

+        });

+

+        // draw the search results box

+        searchControl.draw(document.getElementById("leftSearchControl"), drawOptions);

+

+        // get query and execute the search

+        if (location.hash.indexOf("&t=") != -1) {

+          searchControl.execute(decodeURI(getQuery(location.hash)));

+        }

+

+        document.getElementById("search_autocomplete").focus();

+        addTabListeners();

+      }

+      // End of OnLoad

+

+

+      google.setOnLoadCallback(OnLoad, true);

+

+      // when an event on the browser history occurs (back, forward, load) perform a search

+      $(window).history(function(e, hash) {

+        var query = decodeURI(getQuery(hash));

+        searchControl.execute(query);

+

+        $("#searchTitle").html("search results for <em>" + escapeHTML(query) + "</em>");

+      });

+

+      // forcefully regain key-up event control (previously jacked by search api)

+      $("#search_autocomplete").keyup(function(event) {

+        return search_changed(event, false, '/');

+      });

+

+      // open a tab, specified by its array position

+      function openTab() {

+        tabIndex = location.hash.split("&t=")[1];

+

+        // show the appropriate tab

+        var tabHeaders = $(".gsc-tabHeader");

+        $(tabHeaders[tabIndex]).click();

+      }

+

+      // add event listeners to each tab so we can track the browser history

+      function addTabListeners() {

+        var tabHeaders = $(".gsc-tabHeader");

+        for (var i = 0; i < tabHeaders.length; i++) {

+          $(tabHeaders[i]).attr("id",i).click(function() {

+            var tabHeaders = $(".gsc-tabHeader");

             var tabIndex = $(this).attr("id");

-            $.history.add('q=' + getQuery(location.hash) + '&t=' + tabIndex); // update the hash with the new tab
-          });
-        }
-      }
-      
+            $.history.add('q=' + getQuery(location.hash) + '&t=' + tabIndex); // update the hash with the new tab

+          });

+        }

+      }

+

       function getQuery(hash) {

-        var hashParts = hash.split('&t=');
-        var queryParts = hashParts[0].split('=');
-        return queryParts[1];
-      }
-
-      /* returns the given string with all HTML brackets converted to entities
-         TODO: move this to the site's JS library */
-      function escapeHTML(string) {
-        return string.replace(/</g,"&lt;")
-                     .replace(/>/g,"&gt;");
-      }
-
+        var hashParts = hash.split('&t=');

+        var queryParts = hashParts[0].split('=');

+        return queryParts[1];

+      }

+

+      /* returns the given string with all HTML brackets converted to entities

+         TODO: move this to the site's JS library */

+      function escapeHTML(string) {

+        return string.replace(/</g,"&lt;")

+                     .replace(/>/g,"&gt;");

+      }

+

 </script>

 

   <div id="mainBodyFixed" style="width:auto; margin:20px">

diff --git a/drm/common/DrmSupportInfo.cpp b/drm/common/DrmSupportInfo.cpp
index 3dee435..5400bdd 100644
--- a/drm/common/DrmSupportInfo.cpp
+++ b/drm/common/DrmSupportInfo.cpp
@@ -43,6 +43,10 @@
 }
 
 bool DrmSupportInfo::isSupportedMimeType(const String8& mimeType) const {
+    if (String8("") == mimeType) {
+        return false;
+    }
+
     for (unsigned int i = 0; i < mMimeTypeVector.size(); i++) {
         const String8 item = mMimeTypeVector.itemAt(i);
 
diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp
index 458f1b6..2d8e877 100644
--- a/drm/common/IDrmManagerService.cpp
+++ b/drm/common/IDrmManagerService.cpp
@@ -107,6 +107,7 @@
         handle->decryptInfo = NULL;
     }
     handle->copyControlVector.clear();
+    handle->extendedData.clear();
 }
 
 int BpDrmManagerService::addUniqueId(int uniqueId) {
diff --git a/drm/java/android/drm/DrmRights.java b/drm/java/android/drm/DrmRights.java
index 5907956..ef9c21d 100755
--- a/drm/java/android/drm/DrmRights.java
+++ b/drm/java/android/drm/DrmRights.java
@@ -107,31 +107,24 @@
 
     /**
      * Creates a <code>DrmRights</code> object with the given parameters.
-     *<p>
-     * The application can pass the processed data as a <code>String</code> or as binary data.
-     *<p>
-     * The following code snippet shows how to pass the processed data as a <code>String</code>:
-     *<p>
-     * new DrmRights(data.getBytes(), mimeType)
-     *<p>
-     * The following code snippet shows how to pass the processed data as binary data:
-     *<p>
-     * new DrmRights(binaryData[], mimeType)
      *
-     * @param data A {@link ProcessedData} object.
+     * @param data A {@link ProcessedData} object containing rights information.
+     *             data could be null because it's optional for some DRM schemes.
      * @param mimeType The MIME type.
      */
     public DrmRights(ProcessedData data, String mimeType) {
-        mData = data.getData();
+        if (data != null) {
+            mData = data.getData();
 
-        String accountId = data.getAccountId();
-        if (null != accountId && !accountId.equals("")) {
-            mAccountId = accountId;
-        }
+            String accountId = data.getAccountId();
+            if (null != accountId && !accountId.equals("")) {
+                mAccountId = accountId;
+            }
 
-        String subscriptionId = data.getSubscriptionId();
-        if (null != subscriptionId && !subscriptionId.equals("")) {
-            mSubscriptionId = subscriptionId;
+            String subscriptionId = data.getSubscriptionId();
+            if (null != subscriptionId && !subscriptionId.equals("")) {
+                mSubscriptionId = subscriptionId;
+            }
         }
 
         mMimeType = mimeType;
diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp
index a57dd98..a36bd4a 100644
--- a/drm/libdrmframework/DrmManagerClientImpl.cpp
+++ b/drm/libdrmframework/DrmManagerClientImpl.cpp
@@ -145,7 +145,6 @@
 
 status_t DrmManagerClientImpl::saveRights(int uniqueId, const DrmRights& drmRights,
             const String8& rightsPath, const String8& contentPath) {
-    status_t status = DRM_ERROR_UNKNOWN;
     return getDrmManagerService()->saveRights(
                 uniqueId, drmRights, rightsPath, contentPath);
 }
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index a41d7ab..4c4aad0 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -70,7 +70,7 @@
     size <<= 1;
 
     if (audio_is_linear_pcm(format)) {
-        size /= channelCount * (format == AUDIO_FORMAT_PCM_16_BIT ? 2 : 1);
+        size /= channelCount * audio_bytes_per_sample(format);
     }
 
     *frameCount = size;
@@ -258,7 +258,7 @@
 int AudioRecord::frameSize() const
 {
     if (audio_is_linear_pcm(mFormat)) {
-        return channelCount()*((format() == AUDIO_FORMAT_PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t));
+        return channelCount()*audio_bytes_per_sample(mFormat);
     } else {
         return sizeof(uint8_t);
     }
@@ -481,7 +481,7 @@
 {
     AutoMutex lock(mLock);
     int active;
-    status_t result;
+    status_t result = NO_ERROR;
     audio_track_cblk_t* cblk = mCblk;
     uint32_t framesReq = audioBuffer->frameCount;
     uint32_t waitTimeMs = (waitCount < 0) ? cblk->bufferTimeoutMs : WAIT_PERIOD_MS;
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 37fe182..31eb97a 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -298,7 +298,7 @@
 int AudioTrack::frameSize() const
 {
     if (audio_is_linear_pcm(mFormat)) {
-        return channelCount()*((format() == AUDIO_FORMAT_PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t));
+        return channelCount()*audio_bytes_per_sample(mFormat);
     } else {
         return sizeof(uint8_t);
     }
@@ -314,7 +314,7 @@
 void AudioTrack::start()
 {
     sp<AudioTrackThread> t = mAudioTrackThread;
-    status_t status;
+    status_t status = NO_ERROR;
 
     LOGV("start %p", this);
     if (t != 0) {
@@ -825,7 +825,7 @@
 {
     AutoMutex lock(mLock);
     int active;
-    status_t result;
+    status_t result = NO_ERROR;
     audio_track_cblk_t* cblk = mCblk;
     uint32_t framesReq = audioBuffer->frameCount;
     uint32_t waitTimeMs = (waitCount < 0) ? cblk->bufferTimeoutMs : WAIT_PERIOD_MS;
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index f03f7a2..fd0505e 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -301,6 +301,9 @@
 // static
 bool ASessionDescription::parseNTPRange(
         const char *s, float *npt1, float *npt2) {
+    *npt1 = 0.0f;
+    *npt2 = 0.0f;
+
     if (s[0] == '-') {
         return false;  // no start time available.
     }
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index d15d9c5..f89f8e2 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -995,10 +995,12 @@
         AString val;
         CHECK(GetAttribute(range.c_str(), "npt", &val));
 
+        bool seekable = true;
+
         float npt1, npt2;
         if (!ASessionDescription::parseNTPRange(val.c_str(), &npt1, &npt2)) {
             // This is a live stream and therefore not seekable.
-            return;
+            seekable = false;
         }
 
         i = response->mHeaders.indexOfKey("rtp-info");
@@ -1044,7 +1046,7 @@
             ++n;
         }
 
-        mSeekable = true;
+        mSeekable = seekable;
     }
 
     sp<APacketSource> getPacketSource(size_t index) {
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java b/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java
index 93f7af3..fe279c8 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java
@@ -1040,7 +1040,7 @@
             }
         });
 
-        WindowManager.LayoutParams lp = mNotificationPanelParams = new WindowManager.LayoutParams(
+        WindowManager.LayoutParams lp = new WindowManager.LayoutParams(
                 ViewGroup.LayoutParams.MATCH_PARENT,
                 ViewGroup.LayoutParams.MATCH_PARENT,
                 WindowManager.LayoutParams.TYPE_SYSTEM_DIALOG,
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 44df5b5..f716e63 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2936,7 +2936,7 @@
         mStreamType = streamType;
         // NOTE: audio_track_cblk_t::frameSize for 8 bit PCM data is based on a sample size of
         // 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack
-        mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : sizeof(int8_t);
+        mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * audio_bytes_per_sample(format) : sizeof(uint8_t);
     }
 }
 
diff --git a/telephony/java/com/android/internal/telephony/ApnContext.java b/telephony/java/com/android/internal/telephony/ApnContext.java
index 496c43c..5ec00e8 100644
--- a/telephony/java/com/android/internal/telephony/ApnContext.java
+++ b/telephony/java/com/android/internal/telephony/ApnContext.java
@@ -89,6 +89,11 @@
     }
 
     public synchronized void setDataConnectionAc(DataConnectionAc dcac) {
+        if (dcac != null) {
+            dcac.addApnContext(this);
+        } else {
+            if (mDataConnectionAc != null) mDataConnectionAc.removeApnContext(this);
+        }
         mDataConnectionAc = dcac;
     }
 
diff --git a/telephony/java/com/android/internal/telephony/DataCallState.java b/telephony/java/com/android/internal/telephony/DataCallState.java
index f5651e0..fba3184 100644
--- a/telephony/java/com/android/internal/telephony/DataCallState.java
+++ b/telephony/java/com/android/internal/telephony/DataCallState.java
@@ -47,6 +47,7 @@
     public String [] addresses = new String[0];
     public String [] dnses = new String[0];
     public String[] gateways = new String[0];
+    public int suggestedRetryTime = -1;
 
     /**
      * Class returned by onSetupConnectionCompleted.
@@ -77,6 +78,7 @@
         sb.append("DataCallState: {")
            .append("version=").append(version)
            .append(" status=").append(status)
+           .append(" retry=").append(suggestedRetryTime)
            .append(" cid=").append(cid)
            .append(" active=").append(active)
            .append(" type=").append(type)
diff --git a/telephony/java/com/android/internal/telephony/DataConnection.java b/telephony/java/com/android/internal/telephony/DataConnection.java
index 5c030fd..cb8b0e5 100644
--- a/telephony/java/com/android/internal/telephony/DataConnection.java
+++ b/telephony/java/com/android/internal/telephony/DataConnection.java
@@ -182,6 +182,18 @@
         }
     }
 
+    public static class CallSetupException extends Exception {
+        private int mRetryOverride = -1;
+
+        CallSetupException (int retryOverride) {
+            mRetryOverride = retryOverride;
+        }
+
+        public int getRetryOverride() {
+            return mRetryOverride;
+        }
+    }
+
     // ***** Event codes for driving the state machine
     protected static final int BASE = Protocol.BASE_DATA_CONNECTION;
     protected static final int EVENT_CONNECT = BASE + 0;
@@ -205,6 +217,7 @@
     protected long createTime;
     protected long lastFailTime;
     protected FailCause lastFailCause;
+    protected int mRetryOverride = -1;
     protected static final String NULL_IP = "0.0.0.0";
     private int mRefCount;
     Object userData;
@@ -288,7 +301,8 @@
         } else {
             lastFailCause = cause;
             lastFailTime = timeStamp;
-            AsyncResult.forMessage(connectionCompletedMsg, cause, new Exception());
+            AsyncResult.forMessage(connectionCompletedMsg, cause,
+                                   new CallSetupException(mRetryOverride));
         }
         if (DBG) log("notifyConnectionCompleted at " + timeStamp + " cause=" + cause);
 
@@ -430,6 +444,7 @@
         createTime = -1;
         lastFailTime = -1;
         lastFailCause = FailCause.NONE;
+        mRetryOverride = -1;
         mRefCount = 0;
 
         mLinkProperties = new LinkProperties();
@@ -482,6 +497,15 @@
         return result;
     }
 
+    private int getSuggestedRetryTime(AsyncResult ar) {
+        int retry = -1;
+        if (ar.exception == null) {
+            DataCallState response = (DataCallState) ar.result;
+            retry =  response.suggestedRetryTime;
+        }
+        return retry;
+    }
+
     private DataCallState.SetupResult setLinkProperties(DataCallState response,
             LinkProperties lp) {
         // Check if system property dns usable
@@ -685,10 +709,12 @@
         private FailCause mFailCause = null;
         private DisconnectParams mDisconnectParams = null;
 
-        public void setEnterNotificationParams(ConnectionParams cp, FailCause cause) {
+        public void setEnterNotificationParams(ConnectionParams cp, FailCause cause,
+                                               int retryOverride) {
             if (VDBG) log("DcInactiveState: setEnterNoticationParams cp,cause");
             mConnectionParams = cp;
             mFailCause = cause;
+            mRetryOverride = retryOverride;
         }
 
         public void setEnterNotificationParams(DisconnectParams dp) {
@@ -808,7 +834,7 @@
                             // Vendor ril rejected the command and didn't connect.
                             // Transition to inactive but send notifications after
                             // we've entered the mInactive state.
-                            mInactiveState.setEnterNotificationParams(cp, result.mFailCause);
+                            mInactiveState.setEnterNotificationParams(cp, result.mFailCause, -1);
                             transitionTo(mInactiveState);
                             break;
                         case ERR_UnacceptableParameter:
@@ -823,7 +849,8 @@
                             break;
                         case ERR_RilError:
                             // Request failed and mFailCause has the reason
-                            mInactiveState.setEnterNotificationParams(cp, result.mFailCause);
+                            mInactiveState.setEnterNotificationParams(cp, result.mFailCause,
+                                                                      getSuggestedRetryTime(ar));
                             transitionTo(mInactiveState);
                             break;
                         case ERR_Stale:
@@ -848,8 +875,8 @@
                         }
                         // Transition to inactive but send notifications after
                         // we've entered the mInactive state.
-                         mInactiveState.setEnterNotificationParams(cp, cause);
-                         transitionTo(mInactiveState);
+                        mInactiveState.setEnterNotificationParams(cp, cause, -1);
+                        transitionTo(mInactiveState);
                     } else {
                         if (DBG) {
                             log("DcActivatingState EVENT_GET_LAST_FAIL_DONE is stale cp.tag="
@@ -1016,7 +1043,7 @@
                         // Transition to inactive but send notifications after
                         // we've entered the mInactive state.
                         mInactiveState.setEnterNotificationParams(cp,
-                                FailCause.UNACCEPTABLE_NETWORK_PARAMETER);
+                                FailCause.UNACCEPTABLE_NETWORK_PARAMETER, -1);
                         transitionTo(mInactiveState);
                     } else {
                         if (DBG) {
diff --git a/telephony/java/com/android/internal/telephony/DataConnectionAc.java b/telephony/java/com/android/internal/telephony/DataConnectionAc.java
index 62b90ae..e23f1cc 100644
--- a/telephony/java/com/android/internal/telephony/DataConnectionAc.java
+++ b/telephony/java/com/android/internal/telephony/DataConnectionAc.java
@@ -24,12 +24,18 @@
 import android.net.ProxyProperties;
 import android.os.Message;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
 /**
  * AsyncChannel to a DataConnection
  */
 public class DataConnectionAc extends AsyncChannel {
     private static final boolean DBG = false;
     private String mLogTag;
+    private List<ApnContext> mApnList = null;
 
     public DataConnection dataConnection;
 
@@ -85,6 +91,7 @@
     public DataConnectionAc(DataConnection dc, String logTag) {
         dataConnection = dc;
         mLogTag = logTag;
+        mApnList = Collections.synchronizedList(new ArrayList<ApnContext>());
     }
 
     /**
@@ -371,6 +378,35 @@
         }
     }
 
+    /**
+     * Add ApnContext association.
+     *
+     * @param ApnContext to associate
+     */
+    public void addApnContext(ApnContext apnContext) {
+        if (!mApnList.contains(apnContext)) {
+            mApnList.add(apnContext);
+        }
+    }
+
+    /**
+     * Remove ApnContext associateion.
+     *
+     * @param ApnContext to dissociate
+     */
+    public void removeApnContext(ApnContext apnContext) {
+        mApnList.remove(apnContext);
+    }
+
+    /**
+     * Retrieve collection of ApnContext currently associated with the DataConnectionAc.
+     *
+     * @return Collection of ApnContext
+     */
+    public Collection<ApnContext> getApnList() {
+        return mApnList;
+    }
+
     private void log(String s) {
         android.util.Log.d(mLogTag, "DataConnectionAc " + s);
     }
diff --git a/telephony/java/com/android/internal/telephony/RIL.java b/telephony/java/com/android/internal/telephony/RIL.java
index 76f1ab7..c6ed405 100644
--- a/telephony/java/com/android/internal/telephony/RIL.java
+++ b/telephony/java/com/android/internal/telephony/RIL.java
@@ -3009,6 +3009,7 @@
             }
         } else {
             dataCall.status = p.readInt();
+            dataCall.suggestedRetryTime = p.readInt();
             dataCall.cid = p.readInt();
             dataCall.active = p.readInt();
             dataCall.type = p.readString();
diff --git a/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java b/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java
index 6a95b67..a31b704 100644
--- a/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java
+++ b/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java
@@ -129,6 +129,12 @@
         super.setSystemLocale(language, country, false);
     }
 
+    // return IMSI from USIM as subscriber ID.
+    @Override
+    public String getSubscriberId() {
+        return mIccRecords.getIMSI();
+    }
+
     @Override
     protected void log(String s) {
         if (DBG)
diff --git a/telephony/java/com/android/internal/telephony/gsm/GsmDataConnectionTracker.java b/telephony/java/com/android/internal/telephony/gsm/GsmDataConnectionTracker.java
index 19c06f6..1ac012f 100644
--- a/telephony/java/com/android/internal/telephony/gsm/GsmDataConnectionTracker.java
+++ b/telephony/java/com/android/internal/telephony/gsm/GsmDataConnectionTracker.java
@@ -68,7 +68,9 @@
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -844,9 +846,16 @@
         return result;
     }
 
+    private boolean dataConnectionNotInUse(DataConnectionAc dcac) {
+        for (ApnContext apnContext : mApnContexts.values()) {
+            if (apnContext.getDataConnectionAc() == dcac) return false;
+        }
+        return true;
+    }
+
     private GsmDataConnection findFreeDataConnection() {
         for (DataConnectionAc dcac : mDataConnectionAsyncChannels.values()) {
-            if (dcac.isInactiveSync()) {
+            if (dcac.isInactiveSync() && dataConnectionNotInUse(dcac)) {
                 log("findFreeDataConnection: found free GsmDataConnection");
                 return (GsmDataConnection) dcac.dataConnection;
             }
@@ -926,7 +935,7 @@
             if (refCount == 0) {
                 configureRetry(dc, apnContext.getApnType());
             }
-            apnContext.setDataConnectionAc(mDataConnectionAsyncChannels.get(dc.getDataConnectionId()));
+            apnContext.setDataConnectionAc(dcac);
             apnContext.setApnSetting(apn);
             apnContext.setDataConnection(dc);
         }
@@ -969,6 +978,49 @@
     }
 
     /**
+     * @param cid Connection id provided from RIL.
+     * @return DataConnectionAc associated with specified cid.
+     */
+    private DataConnectionAc findDataConnectionAcByCid(int cid) {
+        for (DataConnectionAc dcac : mDataConnectionAsyncChannels.values()) {
+            if (dcac.getCidSync() == cid) {
+                return dcac;
+            }
+        }
+        return null;
+    }
+
+    /**
+     * @param dcacs Collection of DataConnectionAc reported from RIL.
+     * @return List of ApnContext whihc is connected, but does not present in
+     *         data connection list reported from RIL.
+     */
+    private List<ApnContext> findApnContextToClean(Collection<DataConnectionAc> dcacs) {
+        if (dcacs == null) return null;
+
+        ArrayList<ApnContext> list = new ArrayList<ApnContext>();
+        for (ApnContext apnContext : mApnContexts.values()) {
+            if (apnContext.getState() == State.CONNECTED) {
+                boolean found = false;
+                for (DataConnectionAc dcac : dcacs) {
+                    if (dcac == apnContext.getDataConnectionAc()) {
+                        // ApnContext holds the ref to dcac present in data call list.
+                        found = true;
+                        break;
+                    }
+                }
+                if (!found) {
+                    // ApnContext does not have dcan reorted in data call list.
+                    if (DBG) log("onDataStateChanged(ar): Connected apn not found in the list (" +
+                                 apnContext.toString() + ")");
+                    list.add(apnContext);
+                }
+            }
+        }
+        return list;
+    }
+
+    /**
      * @param ar is the result of RIL_REQUEST_DATA_CALL_LIST
      * or RIL_UNSOL_DATA_CALL_LIST_CHANGED
      */
@@ -985,91 +1037,103 @@
             if (DBG) log("onDataStateChanged(ar): exception; likely radio not available, ignore");
             return;
         }
+        if (DBG) log("onDataStateChanged(ar): DataCallState size=" + dataCallStates.size());
 
-        // Create a hash map to store the dataCallState of each call id
+        // Create a hash map to store the dataCallState of each DataConnectionAc
         // TODO: Depends on how frequent the DATA_CALL_LIST got updated,
         //       may cache response to reduce comparison.
-        HashMap<Integer, DataCallState> response;
-        response = new HashMap<Integer, DataCallState>();
-        if (DBG) log("onDataStateChanged(ar): DataCallState size=" + dataCallStates.size());
-        for (DataCallState dc : dataCallStates) {
-            response.put(dc.cid, dc);
-            if (DBG) log("onDataStateChanged(ar): " + dc.cid + ": " + dc.toString());
+        HashMap<DataCallState, DataConnectionAc> response;
+        response = new HashMap<DataCallState, DataConnectionAc>();
+        for (DataCallState dataCallState : dataCallStates) {
+            DataConnectionAc dcac = findDataConnectionAcByCid(dataCallState.cid);
+
+            if (dcac != null) response.put(dataCallState, dcac);
         }
 
-        // For each connected apn, check if there is a matched active
-        // data call state, which has the same link properties.
-        if (DBG) log("    ApnContext size=" + mApnContexts.values().size());
-        for (ApnContext apnContext : mApnContexts.values()) {
-            if (DBG){
-                log("onDataStateChanged(ar): " + apnContext.toString());
-                if (apnContext.getDataConnection() != null) {
-                    log("onDataStateChanged(ar): " +  apnContext.getDataConnection().toString());
+        // step1: Find a list of "connected" APN which does not have reference to
+        //        calls listed in the Data Call List.
+        List<ApnContext> apnsToClear = findApnContextToClean(response.values());
+
+        // step2: Check status of each calls in Data Call List.
+        //        Collect list of ApnContext associated with the data call if the link
+        //        has to be cleared.
+        for (DataCallState newState : dataCallStates) {
+            DataConnectionAc dcac = response.get(newState);
+
+            // no associated DataConnection found. Ignore.
+            if (dcac == null) continue;
+
+            Collection<ApnContext> apns = dcac.getApnList();
+
+            // filter out ApnContext with "Connected" state.
+            ArrayList<ApnContext> connectedApns = new ArrayList<ApnContext>();
+            for (ApnContext apnContext : apns) {
+                if ((apnContext != null) &&
+                    (apnContext.getState() == State.CONNECTED)) {
+                    connectedApns.add(apnContext);
                 }
             }
-            DataConnectionAc dcac = apnContext.getDataConnectionAc();
-            if (dcac == null) {
+
+            // No "Connected" ApnContext associated with this CID. Ignore.
+            if (connectedApns.isEmpty()) {
                 continue;
             }
-            int connectionId = dcac.getCidSync();
 
-            if (apnContext.getState() == State.CONNECTED) {
-                // The way things are supposed to work, the PDP list
-                // should not contain the CID after it disconnects.
-                // However, the way things really work, sometimes the PDP
-                // context is still listed with active = false, which
-                // makes it hard to distinguish an activating context from
-                // an activated-and-then de-activated one.
-                if (response.containsKey(connectionId)) {
-                    DataCallState newState = response.get(connectionId);
-                    if (DBG) log("onDataStateChanged(ar): Found ConnId=" + connectionId
+            if (DBG) log("onDataStateChanged(ar): Found ConnId=" + newState.cid
                             + " newState=" + newState.toString());
-                    if (newState.active != 0) {
-                        boolean resetConnection;
-                        switch (dcac.updateLinkPropertiesDataCallStateSync(newState)) {
-                        case NONE:
-                            if (DBG) log("onDataStateChanged(ar): Found but no change, skip");
-                            resetConnection = false;
-                            break;
-                        case CHANGED:
-                            if (DBG) log("onDataStateChanged(ar): Found and changed, notify");
-                            mPhone.notifyDataConnection(Phone.REASON_LINK_PROPERTIES_CHANGED,
-                                                        apnContext.getApnType());
-                            // Temporary hack, at this time a transition from CDMA -> Global
-                            // fails so we'll hope for the best and not reset the connection.
-                            // @see bug/4455071
-                            if (SystemProperties.getBoolean("telephony.ignore-state-changes",
-                                                            true)) {
-                                log("onDataStateChanged(ar): STOPSHIP don't reset, continue");
-                                resetConnection = false;
-                            } else {
-                                // Things changed so reset connection, when hack is removed
-                                // this is the normal path.
-                                log("onDataStateChanged(ar): changed so resetting connection");
-                                resetConnection = true;
-                            }
-                            break;
-                        case RESET:
-                        default:
-                            if (DBG) log("onDataStateChanged(ar): an error, reset connection");
-                            resetConnection = true;
-                            break;
-                        }
-                        if (resetConnection == false) continue;
+            if (newState.active != 0) {
+                boolean resetConnection;
+                switch (dcac.updateLinkPropertiesDataCallStateSync(newState)) {
+                case NONE:
+                    if (DBG) log("onDataStateChanged(ar): Found but no change, skip");
+                    resetConnection = false;
+                    break;
+                case CHANGED:
+                    for (ApnContext apnContext : connectedApns) {
+                        if (DBG) log("onDataStateChanged(ar): Found and changed, notify (" +
+                                     apnContext.toString() + ")");
+                        mPhone.notifyDataConnection(Phone.REASON_LINK_PROPERTIES_CHANGED,
+                                                    apnContext.getApnType());
                     }
+                    // Temporary hack, at this time a transition from CDMA -> Global
+                    // fails so we'll hope for the best and not reset the connection.
+                    // @see bug/4455071
+                    if (SystemProperties.getBoolean("telephony.ignore-state-changes",
+                                                    true)) {
+                        log("onDataStateChanged(ar): STOPSHIP don't reset, continue");
+                        resetConnection = false;
+                    } else {
+                        // Things changed so reset connection, when hack is removed
+                        // this is the normal path.
+                        log("onDataStateChanged(ar): changed so resetting connection");
+                        resetConnection = true;
+                    }
+                    break;
+                case RESET:
+                default:
+                    if (DBG) log("onDataStateChanged(ar): an error, reset connection");
+                    resetConnection = true;
+                    break;
                 }
-
-                if (DBG) log("onDataStateChanged(ar): reset connection.");
-
-                // Add an event log when the network drops PDP
-                int cid = getCellLocationId();
-                EventLog.writeEvent(EventLogTags.PDP_NETWORK_DROP, cid,
-                        TelephonyManager.getDefault().getNetworkType());
-
-                cleanUpConnection(true, apnContext);
+                if (resetConnection == false) continue;
             }
+
+            if (DBG) log("onDataStateChanged(ar): reset connection.");
+
+            apnsToClear.addAll(connectedApns);
         }
 
+        // step3: Clear apn connection if applicable.
+        if (!apnsToClear.isEmpty()) {
+            // Add an event log when the network drops PDP
+            int cid = getCellLocationId();
+            EventLog.writeEvent(EventLogTags.PDP_NETWORK_DROP, cid,
+                                TelephonyManager.getDefault().getNetworkType());
+        }
+
+        for (ApnContext apnContext : apnsToClear) {
+            cleanUpConnection(true, apnContext);
+        }
         if (DBG) log("onDataStateChanged(ar): X");
     }
 
@@ -1332,7 +1396,8 @@
         return retry;
     }
 
-    private void reconnectAfterFail(FailCause lastFailCauseCode, ApnContext apnContext) {
+    private void reconnectAfterFail(FailCause lastFailCauseCode,
+                                    ApnContext apnContext, int retryOverride) {
         if (apnContext == null) {
             loge("reconnectAfterFail: apnContext == null, impossible");
             return;
@@ -1357,9 +1422,14 @@
                 }
             }
 
-            int nextReconnectDelay = apnContext.getDataConnection().getRetryTimer();
+            // If retry needs to be backed off for specific case (determined by RIL/Modem)
+            // use the specified timer instead of pre-configured retry pattern.
+            int nextReconnectDelay = retryOverride;
+            if (nextReconnectDelay < 0) {
+                nextReconnectDelay = apnContext.getDataConnection().getRetryTimer();
+                apnContext.getDataConnection().increaseRetryCount();
+            }
             startAlarmForReconnect(nextReconnectDelay, apnContext);
-            apnContext.getDataConnection().increaseRetryCount();
 
             if (!shouldPostNotification(lastFailCauseCode)) {
                 if (DBG) {
@@ -1664,7 +1734,13 @@
                     }
                 } else {
                     if (DBG) log("onDataSetupComplete: Not all permanent failures, retry");
-                    startDelayedRetry(cause, apnContext);
+                    // check to see if retry should be overridden for this failure.
+                    int retryOverride = -1;
+                    if (ar.exception instanceof DataConnection.CallSetupException) {
+                        retryOverride =
+                            ((DataConnection.CallSetupException)ar.exception).getRetryOverride();
+                    }
+                    startDelayedRetry(cause, apnContext, retryOverride);
                 }
             } else {
                 if (DBG) log("onDataSetupComplete: Try next APN");
@@ -1936,9 +2012,10 @@
         return result.toString();
     }
 
-    private void startDelayedRetry(GsmDataConnection.FailCause cause, ApnContext apnContext) {
+    private void startDelayedRetry(GsmDataConnection.FailCause cause,
+                                   ApnContext apnContext, int retryOverride) {
         notifyNoData(cause, apnContext);
-        reconnectAfterFail(cause, apnContext);
+        reconnectAfterFail(cause, apnContext, retryOverride);
     }
 
     private void setPreferredApn(int pos) {
diff --git a/voip/java/com/android/server/sip/SipService.java b/voip/java/com/android/server/sip/SipService.java
index 802e56d..f8e5b3a 100644
--- a/voip/java/com/android/server/sip/SipService.java
+++ b/voip/java/com/android/server/sip/SipService.java
@@ -890,6 +890,12 @@
                 startPortMappingLifetimeMeasurement(mSession.getLocalProfile());
 
                 if (!mRunning || !portChanged) return;
+
+                // The keep alive process is stopped when port is changed;
+                // Nullify the session so that the process can be restarted
+                // again when the re-registration is done
+                mKeepAliveSession = null;
+
                 // Acquire wake lock for the registration process. The
                 // lock will be released when registration is complete.
                 mMyWakeLock.acquire(mSession);