Merge change 25490 into eclair
* changes:
Revert "Don't activate keyguard if screen is turned off while proximity sensor is active."
diff --git a/Android.mk b/Android.mk
index 138ff09..4e7b14e 100644
--- a/Android.mk
+++ b/Android.mk
@@ -126,6 +126,8 @@
core/java/android/view/IWindow.aidl \
core/java/android/view/IWindowManager.aidl \
core/java/android/view/IWindowSession.aidl \
+ core/java/android/speech/IRecognitionListener.aidl \
+ core/java/android/speech/IRecognitionService.aidl \
core/java/android/speech/tts/ITts.aidl \
core/java/android/speech/tts/ITtsCallback.aidl \
core/java/com/android/internal/app/IBatteryStats.aidl \
diff --git a/api/current.xml b/api/current.xml
index 448da7c..2c9a087 100644
--- a/api/current.xml
+++ b/api/current.xml
@@ -26137,6 +26137,16 @@
<parameter name="flags" type="int">
</parameter>
</method>
+<field name="CREATOR"
+ type="android.os.Parcelable.Creator"
+ transient="false"
+ volatile="false"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
</class>
<class name="BluetoothClass.Device"
extends="java.lang.Object"
@@ -27197,6 +27207,16 @@
visibility="public"
>
</field>
+<field name="CREATOR"
+ type="android.os.Parcelable.Creator"
+ transient="false"
+ volatile="false"
+ static="true"
+ final="true"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</field>
<field name="ERROR"
type="int"
transient="false"
diff --git a/core/java/android/bluetooth/BluetoothClass.java b/core/java/android/bluetooth/BluetoothClass.java
index 1fbbf78..6210380 100644
--- a/core/java/android/bluetooth/BluetoothClass.java
+++ b/core/java/android/bluetooth/BluetoothClass.java
@@ -72,12 +72,10 @@
return Integer.toHexString(mClass);
}
- /** @hide */
public int describeContents() {
return 0;
}
- /** @hide */
public static final Parcelable.Creator<BluetoothClass> CREATOR =
new Parcelable.Creator<BluetoothClass>() {
public BluetoothClass createFromParcel(Parcel in) {
@@ -88,7 +86,6 @@
}
};
- /** @hide */
public void writeToParcel(Parcel out, int flags) {
out.writeInt(mClass);
}
diff --git a/core/java/android/bluetooth/BluetoothDevice.java b/core/java/android/bluetooth/BluetoothDevice.java
index 0b3f3c7..f81ba73 100644
--- a/core/java/android/bluetooth/BluetoothDevice.java
+++ b/core/java/android/bluetooth/BluetoothDevice.java
@@ -349,12 +349,10 @@
return mAddress;
}
- /** @hide */
public int describeContents() {
return 0;
}
- /** @hide */
public static final Parcelable.Creator<BluetoothDevice> CREATOR =
new Parcelable.Creator<BluetoothDevice>() {
public BluetoothDevice createFromParcel(Parcel in) {
@@ -365,7 +363,6 @@
}
};
- /** @hide */
public void writeToParcel(Parcel out, int flags) {
out.writeString(mAddress);
}
diff --git a/core/java/android/provider/Calendar.java b/core/java/android/provider/Calendar.java
index d57155c..1de971b 100644
--- a/core/java/android/provider/Calendar.java
+++ b/core/java/android/provider/Calendar.java
@@ -506,6 +506,13 @@
* <P>Type: INTEGER (boolean, readonly)</P>
*/
public static final String CAN_INVITE_OTHERS = "canInviteOthers";
+
+ /**
+ * The owner account for this calendar, based on the calendar (foreign
+ * key into the calendars table).
+ * <P>Type: String</P>
+ */
+ public static final String OWNER_ACCOUNT = "ownerAccount";
}
/**
diff --git a/core/java/android/provider/Contacts.java b/core/java/android/provider/Contacts.java
index 667ec5a..d87018d 100644
--- a/core/java/android/provider/Contacts.java
+++ b/core/java/android/provider/Contacts.java
@@ -533,7 +533,7 @@
Uri photoUri = Uri.withAppendedPath(person, Contacts.Photos.CONTENT_DIRECTORY);
Cursor cursor = cr.query(photoUri, new String[]{Photos.DATA}, null, null, null);
try {
- if (!cursor.moveToNext()) {
+ if (cursor == null || !cursor.moveToNext()) {
return null;
}
byte[] data = cursor.getBlob(0);
diff --git a/core/java/android/service/wallpaper/WallpaperService.java b/core/java/android/service/wallpaper/WallpaperService.java
index cd5cf10..da8d62c0 100644
--- a/core/java/android/service/wallpaper/WallpaperService.java
+++ b/core/java/android/service/wallpaper/WallpaperService.java
@@ -321,7 +321,7 @@
* Called as the user performs touch-screen interaction with the
* window that is currently showing this wallpaper. Note that the
* events you receive here are driven by the actual application the
- * user is interacting with, so if it is slow you will get viewer
+ * user is interacting with, so if it is slow you will get fewer
* move events.
*/
public void onTouchEvent(MotionEvent event) {
diff --git a/core/java/android/speech/IRecognitionListener.aidl b/core/java/android/speech/IRecognitionListener.aidl
new file mode 100644
index 0000000..2da2258
--- /dev/null
+++ b/core/java/android/speech/IRecognitionListener.aidl
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.speech;
+
+import android.os.Bundle;
+import android.speech.RecognitionResult;
+
+/**
+ * Listener for speech recognition events, used with RecognitionService.
+ * This gives you both the final recognition results, as well as various
+ * intermediate events that can be used to show visual feedback to the user.
+ * {@hide}
+ */
+interface IRecognitionListener {
+ /** Called when the endpointer is ready for the user to start speaking. */
+ void onReadyForSpeech(in Bundle noiseParams);
+
+ /** The user has started to speak. */
+ void onBeginningOfSpeech();
+
+ /** The sound level in the audio stream has changed. */
+ void onRmsChanged(in float rmsdB);
+
+ /**
+ * More sound has been received. Buffer is a byte buffer containing
+ * a sequence of 16-bit shorts.
+ */
+ void onBufferReceived(in byte[] buffer);
+
+ /** Called after the user stops speaking. */
+ void onEndOfSpeech();
+
+ /**
+ * A network or recognition error occurred. The code is defined in
+ * {@link android.speech.RecognitionResult}
+ */
+ void onError(in int error);
+
+ /**
+ * Called when recognition results are ready.
+ * @param results: an ordered list of the most likely results (N-best list).
+ * @param key: a key associated with the results. The same results can
+ * be retrieved asynchronously later using the key, if available.
+ */
+ void onResults(in List<RecognitionResult> results, long key);
+}
diff --git a/core/java/android/speech/IRecognitionService.aidl b/core/java/android/speech/IRecognitionService.aidl
new file mode 100644
index 0000000..a18c380
--- /dev/null
+++ b/core/java/android/speech/IRecognitionService.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.speech;
+
+import android.content.Intent;
+import android.speech.IRecognitionListener;
+import android.speech.RecognitionResult;
+
+// A Service interface to speech recognition. Call startListening when
+// you want to begin capturing audio; RecognitionService will automatically
+// determine when the user has finished speaking, stream the audio to the
+// recognition servers, and notify you when results are ready.
+/** {@hide} */
+interface IRecognitionService {
+ // Start listening for speech. Can only call this from one thread at once.
+ // see RecognizerIntent.java for constants used to specify the intent.
+ void startListening(in Intent recognizerIntent,
+ in IRecognitionListener listener);
+
+ List<RecognitionResult> getRecognitionResults(in long key);
+
+ void cancel();
+}
diff --git a/core/java/android/speech/RecognitionResult.aidl b/core/java/android/speech/RecognitionResult.aidl
new file mode 100644
index 0000000..59e53ab
--- /dev/null
+++ b/core/java/android/speech/RecognitionResult.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.speech;
+
+parcelable RecognitionResult;
diff --git a/core/java/android/speech/RecognitionResult.java b/core/java/android/speech/RecognitionResult.java
new file mode 100644
index 0000000..95715ee
--- /dev/null
+++ b/core/java/android/speech/RecognitionResult.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.speech;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * RecognitionResult is a passive object that stores a single recognized query
+ * and its search result.
+ *
+ * TODO: Revisit and improve this class, reconciling the different types of actions and
+ * the different ways they are represented. Maybe we should have a separate result object
+ * for each type, and put them (type/value) in bundle?
+ * {@hide}
+ */
+public class RecognitionResult implements Parcelable {
+ /**
+ * Status of the recognize request.
+ */
+ public static final int NETWORK_TIMEOUT = 1; // Network operation timed out.
+
+ public static final int NETWORK_ERROR = 2; // Other network related errors.
+
+ public static final int AUDIO_ERROR = 3; // Audio recording error.
+
+ public static final int SERVER_ERROR = 4; // Server sends error status.
+
+ public static final int CLIENT_ERROR = 5; // Other client side errors.
+
+ public static final int SPEECH_TIMEOUT = 6; // No speech input
+
+ public static final int NO_MATCH = 7; // No recognition result matched.
+
+ public static final int SERVICE_BUSY = 8; // RecognitionService busy.
+
+ /**
+ * Type of the recognition results.
+ */
+ public static final int RAW_RECOGNITION_RESULT = 0;
+
+ public static final int WEB_SEARCH_RESULT = 1;
+
+ public static final int CONTACT_RESULT = 2;
+
+ public static final int ACTION_RESULT = 3;
+
+ /**
+ * A factory method to create a raw RecognitionResult
+ *
+ * @param sentence the recognized text.
+ */
+ public static RecognitionResult newRawRecognitionResult(String sentence) {
+ return new RecognitionResult(RAW_RECOGNITION_RESULT, sentence, null, null);
+ }
+
+ /**
+ * A factory method to create a RecognitionResult for contacts.
+ *
+ * @param contact the contact name.
+ * @param phoneType the phone type.
+ * @param callAction whether this result included a command to "call", or
+ * just the contact name.
+ */
+ public static RecognitionResult newContactResult(String contact, int phoneType,
+ boolean callAction) {
+ return new RecognitionResult(CONTACT_RESULT, contact, phoneType, callAction);
+ }
+
+ /**
+ * A factory method to create a RecognitionResult for a web search query.
+ *
+ * @param query the query string.
+ * @param html the html page of the search result.
+ * @param url the url that performs the search with the query.
+ */
+ public static RecognitionResult newWebResult(String query, String html, String url) {
+ return new RecognitionResult(WEB_SEARCH_RESULT, query, html, url);
+ }
+
+ /**
+ * A factory method to create a RecognitionResult for an action.
+ *
+ * @param action the action type
+ * @param query the query string associated with that action.
+ */
+ public static RecognitionResult newActionResult(int action, String query) {
+ return new RecognitionResult(ACTION_RESULT, action, query);
+ }
+
+ public static final Parcelable.Creator<RecognitionResult> CREATOR =
+ new Parcelable.Creator<RecognitionResult>() {
+
+ public RecognitionResult createFromParcel(Parcel in) {
+ return new RecognitionResult(in);
+ }
+
+ public RecognitionResult[] newArray(int size) {
+ return new RecognitionResult[size];
+ }
+ };
+
+ /**
+ * Result type.
+ */
+ public final int mResultType;
+
+ /**
+ * The recognized string when mResultType is WEB_SEARCH_RESULT. The name of
+ * the contact when mResultType is CONTACT_RESULT. The relevant query when
+ * mResultType is ACTION_RESULT.
+ */
+ public final String mText;
+
+ /**
+ * The HTML result page for the query. If this is null, then the application
+ * must use the url field to get the HTML result page.
+ */
+ public final String mHtml;
+
+ /**
+ * The url to get the result page for the query string. The application must
+ * use this url instead of performing the search with the query.
+ */
+ public final String mUrl;
+
+ /**
+ * Phone number type. This is valid only when mResultType == CONTACT_RESULT.
+ */
+ public final int mPhoneType;
+
+ /**
+ * Action type. This is valid only when mResultType == ACTION_RESULT.
+ */
+ public final int mAction;
+
+ /**
+ * Whether a contact recognition result included a command to "call". This
+ * is valid only when mResultType == CONTACT_RESULT.
+ */
+ public final boolean mCallAction;
+
+ private RecognitionResult(int type, int action, String query) {
+ mResultType = type;
+ mAction = action;
+ mText = query;
+ mHtml = null;
+ mUrl = null;
+ mPhoneType = -1;
+ mCallAction = false;
+ }
+
+ private RecognitionResult(int type, String query, String html, String url) {
+ mResultType = type;
+ mText = query;
+ mHtml = html;
+ mUrl = url;
+ mPhoneType = -1;
+ mAction = -1;
+ mCallAction = false;
+ }
+
+ private RecognitionResult(int type, String query, int phoneType, boolean callAction) {
+ mResultType = type;
+ mText = query;
+ mPhoneType = phoneType;
+ mHtml = null;
+ mUrl = null;
+ mAction = -1;
+ mCallAction = callAction;
+ }
+
+ private RecognitionResult(Parcel in) {
+ mResultType = in.readInt();
+ mText = in.readString();
+ mHtml = in.readString();
+ mUrl = in.readString();
+ mPhoneType = in.readInt();
+ mAction = in.readInt();
+ mCallAction = (in.readInt() == 1);
+ }
+
+ public void writeToParcel(Parcel out, int flags) {
+ out.writeInt(mResultType);
+ out.writeString(mText);
+ out.writeString(mHtml);
+ out.writeString(mUrl);
+ out.writeInt(mPhoneType);
+ out.writeInt(mAction);
+ out.writeInt(mCallAction ? 1 : 0);
+ }
+
+ @Override
+ public String toString() {
+ String resultType[] = {
+ "RAW", "WEB", "CONTACT", "ACTION"
+ };
+ return "[type=" + resultType[mResultType] + ", text=" + mText + ", mUrl=" + mUrl
+ + ", html=" + mHtml + ", mAction=" + mAction + ", mCallAction=" + mCallAction + "]";
+ }
+
+ public int describeContents() {
+ // no special description
+ return 0;
+ }
+}
diff --git a/core/java/android/speech/RecognitionServiceUtil.java b/core/java/android/speech/RecognitionServiceUtil.java
new file mode 100644
index 0000000..4207543
--- /dev/null
+++ b/core/java/android/speech/RecognitionServiceUtil.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.speech;
+
+import android.content.ComponentName;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.speech.RecognitionResult;
+import android.util.Log;
+
+import java.util.List;
+
+/**
+ * Utils for Google's network-based speech recognizer, which lets you perform
+ * speech-to-text translation through RecognitionService. IRecognitionService
+ * and IRecognitionListener are the core interfaces; you begin recognition
+ * through IRecognitionService and subscribe to callbacks about when the user
+ * stopped speaking, results come in, errors, etc. through IRecognitionListener.
+ * RecognitionServiceUtil includes default IRecognitionListener and
+ * ServiceConnection implementations to reduce the amount of boilerplate.
+ *
+ * The Service provides no user interface. See RecognitionActivity if you
+ * want the standard voice search UI.
+ *
+ * Below is a small skeleton of how to use the recognizer:
+ *
+ * ServiceConnection conn = new RecognitionServiceUtil.Connection();
+ * mContext.bindService(RecognitionServiceUtil.sDefaultIntent,
+ * conn, Context.BIND_AUTO_CREATE);
+ * IRecognitionListener listener = new RecognitionServiceWrapper.NullListener() {
+ * public void onResults(List<String> results) {
+ * // Do something with recognition transcripts
+ * }
+ * }
+ *
+ * // Must wait for conn.mService to be populated, then call below
+ * conn.mService.startListening(null, listener);
+ *
+ * {@hide}
+ */
+public class RecognitionServiceUtil {
+ public static final Intent sDefaultIntent = new Intent(
+ RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
+
+ // Recognize request parameters
+ public static final String USE_LOCATION = "useLocation";
+ public static final String CONTACT_AUTH_TOKEN = "contactAuthToken";
+
+ // Bundles
+ public static final String NOISE_LEVEL = "NoiseLevel";
+ public static final String SIGNAL_NOISE_RATIO = "SignalNoiseRatio";
+
+ private RecognitionServiceUtil() {}
+
+ /**
+ * IRecognitionListener which does nothing in response to recognition
+ * callbacks. You can subclass from this and override only the methods
+ * whose events you want to respond to.
+ */
+ public static class NullListener extends IRecognitionListener.Stub {
+ public void onReadyForSpeech(Bundle bundle) {}
+ public void onBeginningOfSpeech() {}
+ public void onRmsChanged(float rmsdB) {}
+ public void onBufferReceived(byte[] buf) {}
+ public void onEndOfSpeech() {}
+ public void onError(int error) {}
+ public void onResults(List<RecognitionResult> results, long key) {}
+ }
+
+ /**
+ * Basic ServiceConnection which just records mService variable.
+ */
+ public static class Connection implements ServiceConnection {
+ public IRecognitionService mService;
+
+ public synchronized void onServiceConnected(ComponentName name, IBinder service) {
+ mService = IRecognitionService.Stub.asInterface(service);
+ }
+
+ public void onServiceDisconnected(ComponentName name) {
+ mService = null;
+ }
+ }
+}
diff --git a/core/java/android/text/util/Rfc822InputFilter.java b/core/java/android/text/util/Rfc822InputFilter.java
new file mode 100644
index 0000000..8c8b7fc
--- /dev/null
+++ b/core/java/android/text/util/Rfc822InputFilter.java
@@ -0,0 +1,58 @@
+package android.text.util;
+
+import android.text.InputFilter;
+import android.text.Spanned;
+import android.text.SpannableStringBuilder;
+
+/**
+ * Implements special address cleanup rules:
+ * The first space key entry following an "@" symbol that is followed by any combination
+ * of letters and symbols, including one+ dots and zero commas, should insert an extra
+ * comma (followed by the space).
+ *
+ * @hide
+ */
+public class Rfc822InputFilter implements InputFilter {
+
+ public CharSequence filter(CharSequence source, int start, int end, Spanned dest,
+ int dstart, int dend) {
+
+ // quick check - did they enter a single space?
+ if (end-start != 1 || source.charAt(start) != ' ') {
+ return null;
+ }
+
+ // determine if the characters before the new space fit the pattern
+ // follow backwards and see if we find a comma, dot, or @
+ int scanBack = dstart;
+ boolean dotFound = false;
+ while (scanBack > 0) {
+ char c = dest.charAt(--scanBack);
+ switch (c) {
+ case '.':
+ dotFound = true; // one or more dots are req'd
+ break;
+ case ',':
+ return null;
+ case '@':
+ if (!dotFound) {
+ return null;
+ }
+ // we have found a comma-insert case. now just do it
+ // in the least expensive way we can.
+ if (source instanceof Spanned) {
+ SpannableStringBuilder sb = new SpannableStringBuilder(",");
+ sb.append(source);
+ return sb;
+ } else {
+ return ", ";
+ }
+ default:
+ // just keep going
+ }
+ }
+
+ // no termination cases were found, so don't edit the input
+ return null;
+ }
+}
diff --git a/core/java/android/webkit/HTML5VideoViewProxy.java b/core/java/android/webkit/HTML5VideoViewProxy.java
index 14bc33b..c10355c 100644
--- a/core/java/android/webkit/HTML5VideoViewProxy.java
+++ b/core/java/android/webkit/HTML5VideoViewProxy.java
@@ -47,11 +47,14 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashMap;
+import java.util.Map;
/**
* <p>Proxy for HTML5 video views.
*/
-class HTML5VideoViewProxy extends Handler {
+class HTML5VideoViewProxy extends Handler
+ implements MediaPlayer.OnPreparedListener,
+ MediaPlayer.OnCompletionListener {
// Logging tag.
private static final String LOGTAG = "HTML5VideoViewProxy";
@@ -59,9 +62,15 @@
private static final int INIT = 100;
private static final int PLAY = 101;
private static final int SET_POSTER = 102;
+ private static final int SEEK = 103;
+ private static final int PAUSE = 104;
// Message Ids to be handled on the WebCore thread
+ private static final int PREPARED = 200;
+ private static final int ENDED = 201;
+ // The C++ MediaPlayerPrivateAndroid object.
+ int mNativePointer;
// The handler for WebCore thread messages;
private Handler mWebCoreHandler;
// The WebView instance that created this view.
@@ -72,6 +81,8 @@
private ImageView mPosterView;
// The poster downloader.
private PosterDownloader mPosterDownloader;
+ // The seek position.
+ private int mSeekPosition;
// A helper class to control the playback. This executes on the UI thread!
private static final class VideoPlayer {
// The proxy that is currently playing (if any).
@@ -94,7 +105,8 @@
}
};
- public static void play(String url, HTML5VideoViewProxy proxy, WebChromeClient client) {
+ public static void play(String url, int time, HTML5VideoViewProxy proxy,
+ WebChromeClient client) {
if (mCurrentProxy != null) {
// Some other video is already playing. Notify the caller that its playback ended.
proxy.playbackEnded();
@@ -105,9 +117,46 @@
mVideoView.setWillNotDraw(false);
mVideoView.setMediaController(new MediaController(proxy.getContext()));
mVideoView.setVideoURI(Uri.parse(url));
+ mVideoView.setOnCompletionListener(proxy);
+ mVideoView.setOnPreparedListener(proxy);
+ mVideoView.seekTo(time);
mVideoView.start();
client.onShowCustomView(mVideoView, mCallback);
}
+
+ public static void seek(int time, HTML5VideoViewProxy proxy) {
+ if (mCurrentProxy == proxy && time >= 0 && mVideoView != null) {
+ mVideoView.seekTo(time);
+ }
+ }
+
+ public static void pause(HTML5VideoViewProxy proxy) {
+ if (mCurrentProxy == proxy && mVideoView != null) {
+ mVideoView.pause();
+ }
+ }
+ }
+
+ // A bunch event listeners for our VideoView
+ // MediaPlayer.OnPreparedListener
+ public void onPrepared(MediaPlayer mp) {
+ Message msg = Message.obtain(mWebCoreHandler, PREPARED);
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("dur", new Integer(mp.getDuration()));
+ map.put("width", new Integer(mp.getVideoWidth()));
+ map.put("height", new Integer(mp.getVideoHeight()));
+ msg.obj = map;
+ mWebCoreHandler.sendMessage(msg);
+ }
+
+ // MediaPlayer.OnCompletionListener;
+ public void onCompletion(MediaPlayer mp) {
+ playbackEnded();
+ }
+
+ public void playbackEnded() {
+ Message msg = Message.obtain(mWebCoreHandler, ENDED);
+ mWebCoreHandler.sendMessage(msg);
}
// Handler for the messages from WebCore thread to the UI thread.
@@ -124,7 +173,7 @@
String url = (String) msg.obj;
WebChromeClient client = mWebView.getWebChromeClient();
if (client != null) {
- VideoPlayer.play(url, this, client);
+ VideoPlayer.play(url, mSeekPosition, this, client);
}
break;
}
@@ -133,13 +182,19 @@
mPosterView.setImageBitmap(poster);
break;
}
+ case SEEK: {
+ Integer time = (Integer) msg.obj;
+ mSeekPosition = time;
+ VideoPlayer.seek(mSeekPosition, this);
+ break;
+ }
+ case PAUSE: {
+ VideoPlayer.pause(this);
+ break;
+ }
}
}
- public void playbackEnded() {
- // TODO: notify WebKit
- }
-
// Everything below this comment executes on the WebCore thread, except for
// the EventHandler methods, which are called on the network thread.
@@ -273,13 +328,16 @@
/**
* Private constructor.
- * @param context is the application context.
+ * @param webView is the WebView that hosts the video.
+ * @param nativePtr is the C++ pointer to the MediaPlayerPrivate object.
*/
- private HTML5VideoViewProxy(WebView webView) {
+ private HTML5VideoViewProxy(WebView webView, int nativePtr) {
// This handler is for the main (UI) thread.
super(Looper.getMainLooper());
// Save the WebView object.
mWebView = webView;
+ // Save the native ptr
+ mNativePointer = nativePtr;
// create the message handler for this thread
createWebCoreHandler();
}
@@ -289,8 +347,18 @@
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
- // TODO here we will process the messages from the VideoPlayer
- // and will call native WebKit methods.
+ case PREPARED: {
+ Map<String, Object> map = (Map<String, Object>) msg.obj;
+ Integer duration = (Integer) map.get("dur");
+ Integer width = (Integer) map.get("width");
+ Integer height = (Integer) map.get("height");
+ nativeOnPrepared(duration.intValue(), width.intValue(),
+ height.intValue(), mNativePointer);
+ break;
+ }
+ case ENDED:
+ nativeOnEnded(mNativePointer);
+ break;
}
}
};
@@ -314,19 +382,35 @@
/**
* Play a video stream.
* @param url is the URL of the video stream.
- * @param webview is the WebViewCore that is requesting the playback.
*/
public void play(String url) {
if (url == null) {
return;
}
- // We need to know the webview that is requesting the playback.
Message message = obtainMessage(PLAY);
message.obj = url;
sendMessage(message);
}
/**
+ * Seek into the video stream.
+ * @param time is the position in the video stream.
+ */
+ public void seek(int time) {
+ Message message = obtainMessage(SEEK);
+ message.obj = new Integer(time);
+ sendMessage(message);
+ }
+
+ /**
+ * Pause the playback.
+ */
+ public void pause() {
+ Message message = obtainMessage(PAUSE);
+ sendMessage(message);
+ }
+
+ /**
* Create the child view that will cary the poster.
*/
public void createView() {
@@ -384,7 +468,10 @@
*
* @return a new HTML5VideoViewProxy object.
*/
- public static HTML5VideoViewProxy getInstance(WebViewCore webViewCore) {
- return new HTML5VideoViewProxy(webViewCore.getWebView());
+ public static HTML5VideoViewProxy getInstance(WebViewCore webViewCore, int nativePtr) {
+ return new HTML5VideoViewProxy(webViewCore.getWebView(), nativePtr);
}
+
+ private native void nativeOnPrepared(int duration, int width, int height, int nativePointer);
+ private native void nativeOnEnded(int nativePointer);
}
diff --git a/core/java/android/webkit/WebView.java b/core/java/android/webkit/WebView.java
index abbbc00..a5536dd 100644
--- a/core/java/android/webkit/WebView.java
+++ b/core/java/android/webkit/WebView.java
@@ -2763,7 +2763,8 @@
zoomScale = mZoomScale;
// set mZoomScale to be 0 as we have done animation
mZoomScale = 0;
- animateZoom = false; // inform drawContentPicture we're done
+ // call invalidate() again to draw with the final filters
+ invalidate();
if (mNeedToAdjustWebTextView) {
mNeedToAdjustWebTextView = false;
mWebTextView.setTextSize(TypedValue.COMPLEX_UNIT_PX,
@@ -2908,6 +2909,8 @@
getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
if (isTextView) {
+ if (mWebTextView == null) return;
+
imm.showSoftInput(mWebTextView, 0);
// Now we need to fake a touch event to place the cursor where the
// user touched.
diff --git a/opengl/tests/gl2_basic/gl2_basic.cpp b/opengl/tests/gl2_basic/gl2_basic.cpp
index f969a46..d4887ba 100644
--- a/opengl/tests/gl2_basic/gl2_basic.cpp
+++ b/opengl/tests/gl2_basic/gl2_basic.cpp
@@ -34,14 +34,16 @@
static void printGLString(const char *name, GLenum s)
{
fprintf(stderr, "printGLString %s, %d\n", name, s);
+#if 0 // causes hangs
const char *v = (const char *)glGetString(s);
int error = glGetError();
fprintf(stderr, "glGetError() = %d, result of glGetString = %x\n", error,
(unsigned int)v);
- if ((v < (const char*) 0) || (v > (const char*) 0x1000))
+ if ((v < (const char*) 0) || (v > (const char*) 0x10000))
fprintf(stderr, "GL %s = %s\n", name, v);
else
- fprintf(stderr, "GL %s = (null)\n", name);
+ fprintf(stderr, "GL %s = (null) 0x%08x\n", name, (unsigned int) v);
+#endif
}
static const char* eglErrorToString[] = {
@@ -61,7 +63,11 @@
"EGL_BAD_SURFACE"
};
-static void checkEglError(const char* op) {
+static void checkEglError(const char* op, EGLBoolean returnVal = EGL_TRUE) {
+ if (returnVal != EGL_TRUE) {
+ fprintf(stderr, "%s() returned %d\n", op, returnVal);
+ }
+
for(EGLint error = eglGetError();
error != EGL_SUCCESS;
error = eglGetError()) {
@@ -69,25 +75,30 @@
if (error >= EGL_SUCCESS && error <= EGL_BAD_SURFACE) {
errorString = eglErrorToString[error - EGL_SUCCESS];
}
- fprintf(stderr, "%s() returned eglError %s (0x%x)\n", op,
+ fprintf(stderr, "after %s() eglError %s (0x%x)\n", op,
errorString, error);
}
}
int main(int argc, char** argv)
{
+ EGLBoolean returnValue;
+ EGLConfig configs[2];
+ EGLint config_count;
+
+ EGLint context_attribs[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
EGLint s_configAttribs[] = {
- EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
- EGL_RED_SIZE, 5,
- EGL_GREEN_SIZE, 6,
- EGL_BLUE_SIZE, 5,
- EGL_NONE
+ EGL_BUFFER_SIZE, EGL_DONT_CARE,
+ EGL_RED_SIZE, 5,
+ EGL_GREEN_SIZE, 6,
+ EGL_BLUE_SIZE, 5,
+ EGL_DEPTH_SIZE, 8,
+ EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_NONE
};
- EGLint numConfigs = -1;
EGLint majorVersion;
EGLint minorVersion;
- EGLConfig config;
EGLContext context;
EGLSurface surface;
EGLint w, h;
@@ -100,20 +111,50 @@
checkEglError("<init>");
dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
checkEglError("eglGetDisplay");
- eglInitialize(dpy, &majorVersion, &minorVersion);
- checkEglError("eglInitialize");
+ if (dpy == EGL_NO_DISPLAY) {
+ printf("eglGetDisplay returned EGL_NO_DISPLAY.\n");
+ return 0;
+ }
+ returnValue = eglInitialize(dpy, &majorVersion, &minorVersion);
+ checkEglError("eglInitialize", returnValue);
fprintf(stderr, "EGL version %d.%d\n", majorVersion, minorVersion);
+
+ returnValue = eglGetConfigs (dpy, configs, 2, &config_count);
+ checkEglError("eglGetConfigs", returnValue);
+ fprintf(stderr, "Config count: %d\n", config_count);
+ for(int i = 0; i < config_count; i++) {
+ fprintf(stderr, "%d: 0x%08x\n", i, (unsigned int) configs[i]);
+ }
+#if 0
+ EGLConfig config;
EGLUtils::selectConfigForNativeWindow(dpy, s_configAttribs, window, &config);
- fprintf(stderr, "Chosen config: 0x%08x\n", (unsigned long) config);
-
checkEglError("EGLUtils::selectConfigForNativeWindow");
- surface = eglCreateWindowSurface(dpy, config, window, NULL);
- checkEglError("eglCreateWindowSurface");
+#else
+ int chooseConfigResult = eglChooseConfig(dpy, s_configAttribs, configs, 2, &config_count);
+ checkEglError("eglChooseConfig", chooseConfigResult);
+ if (chooseConfigResult != EGL_TRUE )
+ {
+ printf("eglChooseConfig failed\n");
+ return 0;
+ }
+#endif
+ surface = eglCreateWindowSurface(dpy, configs[0], window, NULL);
+ checkEglError("eglCreateWindowSurface");
+ if (surface == EGL_NO_SURFACE)
+ {
+ printf("gelCreateWindowSurface failed.\n");
+ return 0;
+ }
EGLint gl2_0Attribs[] = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE};
- context = eglCreateContext(dpy, config, NULL, gl2_0Attribs);
+ context = eglCreateContext(dpy, configs[0], EGL_NO_CONTEXT, context_attribs);
checkEglError("eglCreateContext");
+ if (context == EGL_NO_CONTEXT)
+ {
+ printf("eglCreateContext failed\n");
+ return 0;
+ }
eglMakeCurrent(dpy, surface, surface, context);
checkEglError("eglMakeCurrent");
eglQuerySurface(dpy, surface, EGL_WIDTH, &w);
diff --git a/services/java/com/android/server/LocationManagerService.java b/services/java/com/android/server/LocationManagerService.java
index af60556..f6a1be7 100644
--- a/services/java/com/android/server/LocationManagerService.java
+++ b/services/java/com/android/server/LocationManagerService.java
@@ -1069,13 +1069,13 @@
return mIntent;
}
- boolean isInProximity(double latitude, double longitude) {
+ boolean isInProximity(double latitude, double longitude, float accuracy) {
Location loc = new Location("");
loc.setLatitude(latitude);
loc.setLongitude(longitude);
double radius = loc.distanceTo(mLocation);
- return radius <= mRadius;
+ return radius <= Math.max(mRadius,accuracy);
}
@Override
@@ -1115,6 +1115,7 @@
long now = System.currentTimeMillis();
double latitude = loc.getLatitude();
double longitude = loc.getLongitude();
+ float accuracy = loc.getAccuracy();
ArrayList<PendingIntent> intentsToRemove = null;
for (ProximityAlert alert : mProximityAlerts.values()) {
@@ -1124,7 +1125,7 @@
if ((expiration == -1) || (now <= expiration)) {
boolean entered = mProximitiesEntered.contains(alert);
boolean inProximity =
- alert.isInProximity(latitude, longitude);
+ alert.isInProximity(latitude, longitude, accuracy);
if (!entered && inProximity) {
if (LOCAL_LOGV) {
Log.v(TAG, "Entered alert");