Merge "Switched to use the header files in /frameworks/native and deleted the duplicate header files in /frameworks/base"
diff --git a/api/current.txt b/api/current.txt
index 29f093a..ad84895 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -75,6 +75,7 @@
field public static final deprecated java.lang.String PERSISTENT_ACTIVITY = "android.permission.PERSISTENT_ACTIVITY";
field public static final java.lang.String PROCESS_OUTGOING_CALLS = "android.permission.PROCESS_OUTGOING_CALLS";
field public static final java.lang.String READ_CALENDAR = "android.permission.READ_CALENDAR";
+ field public static final java.lang.String READ_CALL_LOG = "android.permission.READ_CALL_LOG";
field public static final java.lang.String READ_CONTACTS = "android.permission.READ_CONTACTS";
field public static final java.lang.String READ_EXTERNAL_STORAGE = "android.permission.READ_EXTERNAL_STORAGE";
field public static final java.lang.String READ_FRAME_BUFFER = "android.permission.READ_FRAME_BUFFER";
@@ -121,6 +122,7 @@
field public static final java.lang.String WAKE_LOCK = "android.permission.WAKE_LOCK";
field public static final java.lang.String WRITE_APN_SETTINGS = "android.permission.WRITE_APN_SETTINGS";
field public static final java.lang.String WRITE_CALENDAR = "android.permission.WRITE_CALENDAR";
+ field public static final java.lang.String WRITE_CALL_LOG = "android.permission.WRITE_CALL_LOG";
field public static final java.lang.String WRITE_CONTACTS = "android.permission.WRITE_CONTACTS";
field public static final java.lang.String WRITE_EXTERNAL_STORAGE = "android.permission.WRITE_EXTERNAL_STORAGE";
field public static final java.lang.String WRITE_GSERVICES = "android.permission.WRITE_GSERVICES";
diff --git a/core/java/android/webkit/WebViewClassic.java b/core/java/android/webkit/WebViewClassic.java
index 812d89a..09ce1e2 100644
--- a/core/java/android/webkit/WebViewClassic.java
+++ b/core/java/android/webkit/WebViewClassic.java
@@ -548,9 +548,13 @@
if (!initData.mIsSpellCheckEnabled) {
inputType |= InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS;
}
- if (WebTextView.TEXT_AREA != type
- && initData.mIsTextFieldNext) {
- imeOptions |= EditorInfo.IME_FLAG_NAVIGATE_NEXT;
+ if (WebTextView.TEXT_AREA != type) {
+ if (initData.mIsTextFieldNext) {
+ imeOptions |= EditorInfo.IME_FLAG_NAVIGATE_NEXT;
+ }
+ if (initData.mIsTextFieldPrev) {
+ imeOptions |= EditorInfo.IME_FLAG_NAVIGATE_PREVIOUS;
+ }
}
switch (type) {
case WebTextView.NORMAL_TEXT_FIELD:
diff --git a/core/java/android/webkit/WebViewCore.java b/core/java/android/webkit/WebViewCore.java
index 8def74b..77d2641 100644
--- a/core/java/android/webkit/WebViewCore.java
+++ b/core/java/android/webkit/WebViewCore.java
@@ -954,14 +954,15 @@
public TextFieldInitData(int fieldPointer,
String text, int type, boolean isSpellCheckEnabled,
boolean autoComplete, boolean isTextFieldNext,
- String name, String label, int maxLength,
- Rect nodeBounds, int nodeLayerId) {
+ boolean isTextFieldPrev, String name, String label,
+ int maxLength, Rect nodeBounds, int nodeLayerId) {
mFieldPointer = fieldPointer;
mText = text;
mType = type;
mIsAutoCompleteEnabled = autoComplete;
mIsSpellCheckEnabled = isSpellCheckEnabled;
mIsTextFieldNext = isTextFieldNext;
+ mIsTextFieldPrev = isTextFieldPrev;
mName = name;
mLabel = label;
mMaxLength = maxLength;
@@ -973,6 +974,7 @@
int mType;
boolean mIsSpellCheckEnabled;
boolean mIsTextFieldNext;
+ boolean mIsTextFieldPrev;
boolean mIsAutoCompleteEnabled;
String mName;
String mLabel;
@@ -2798,7 +2800,7 @@
// called by JNI
private void initEditField(int pointer, String text, int inputType,
boolean isSpellCheckEnabled, boolean isAutoCompleteEnabled,
- boolean nextFieldIsText, String name,
+ boolean nextFieldIsText, boolean prevFieldIsText, String name,
String label, int start, int end, int selectionPtr, int maxLength,
Rect nodeRect, int nodeLayer) {
if (mWebView == null) {
@@ -2806,7 +2808,8 @@
}
TextFieldInitData initData = new TextFieldInitData(pointer,
text, inputType, isSpellCheckEnabled, isAutoCompleteEnabled,
- nextFieldIsText, name, label, maxLength, nodeRect, nodeLayer);
+ nextFieldIsText, prevFieldIsText, name, label, maxLength,
+ nodeRect, nodeLayer);
Message.obtain(mWebView.mPrivateHandler,
WebViewClassic.INIT_EDIT_FIELD, initData).sendToTarget();
Message.obtain(mWebView.mPrivateHandler,
diff --git a/core/jni/android_media_AudioRecord.cpp b/core/jni/android_media_AudioRecord.cpp
index b34dbce..7984b9c 100644
--- a/core/jni/android_media_AudioRecord.cpp
+++ b/core/jni/android_media_AudioRecord.cpp
@@ -72,7 +72,7 @@
#define AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED -20
jint android_media_translateRecorderErrorCode(int code) {
- switch(code) {
+ switch (code) {
case NO_ERROR:
return AUDIORECORD_SUCCESS;
case BAD_VALUE:
@@ -81,7 +81,7 @@
return AUDIORECORD_ERROR_INVALID_OPERATION;
default:
return AUDIORECORD_ERROR;
- }
+ }
}
@@ -90,14 +90,14 @@
if (event == AudioRecord::EVENT_MORE_DATA) {
// set size to 0 to signal we're not using the callback to read more data
AudioRecord::Buffer* pBuff = (AudioRecord::Buffer*)info;
- pBuff->size = 0;
-
+ pBuff->size = 0;
+
} else if (event == AudioRecord::EVENT_MARKER) {
audiorecord_callback_cookie *callbackInfo = (audiorecord_callback_cookie *)user;
JNIEnv *env = AndroidRuntime::getJNIEnv();
if (user && env) {
env->CallStaticVoidMethod(
- callbackInfo->audioRecord_class,
+ callbackInfo->audioRecord_class,
javaAudioRecordFields.postNativeEventInJava,
callbackInfo->audioRecord_ref, event, 0,0, NULL);
if (env->ExceptionCheck()) {
@@ -111,7 +111,7 @@
JNIEnv *env = AndroidRuntime::getJNIEnv();
if (user && env) {
env->CallStaticVoidMethod(
- callbackInfo->audioRecord_class,
+ callbackInfo->audioRecord_class,
javaAudioRecordFields.postNativeEventInJava,
callbackInfo->audioRecord_ref, event, 0,0, NULL);
if (env->ExceptionCheck()) {
@@ -140,7 +140,7 @@
uint32_t nbChannels = popcount(channels);
// compare the format against the Java constants
- if ((audioFormat != javaAudioRecordFields.PCM16)
+ if ((audioFormat != javaAudioRecordFields.PCM16)
&& (audioFormat != javaAudioRecordFields.PCM8)) {
ALOGE("Error creating AudioRecord: unsupported audio format.");
return AUDIORECORD_ERROR_SETUP_INVALIDFORMAT;
@@ -156,7 +156,7 @@
}
int frameSize = nbChannels * bytesPerSample;
size_t frameCount = buffSizeInBytes / frameSize;
-
+
if (uint32_t(source) >= AUDIO_SOURCE_CNT) {
ALOGE("Error creating AudioRecord: unknown source.");
return AUDIORECORD_ERROR_SETUP_INVALIDSOURCE;
@@ -181,11 +181,11 @@
// create an uninitialized AudioRecord object
lpRecorder = new AudioRecord();
- if(lpRecorder == NULL) {
+ if (lpRecorder == NULL) {
ALOGE("Error creating AudioRecord instance.");
return AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED;
}
-
+
// create the callback information:
// this data will be passed with every AudioRecord callback
jclass clazz = env->GetObjectClass(thiz);
@@ -197,7 +197,7 @@
lpCallbackData->audioRecord_class = (jclass)env->NewGlobalRef(clazz);
// we use a weak reference so the AudioRecord object can be garbage collected.
lpCallbackData->audioRecord_ref = env->NewGlobalRef(weak_this);
-
+
lpRecorder->set((audio_source_t) source,
sampleRateInHertz,
format, // word length, PCM
@@ -210,7 +210,7 @@
true, // threadCanCallJava)
sessionId);
- if(lpRecorder->initCheck() != NO_ERROR) {
+ if (lpRecorder->initCheck() != NO_ERROR) {
ALOGE("Error creating AudioRecord instance: initialization check failed.");
goto native_init_failure;
}
@@ -225,16 +225,16 @@
env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
nSession = NULL;
- // save our newly created C++ AudioRecord in the "nativeRecorderInJavaObj" field
+ // save our newly created C++ AudioRecord in the "nativeRecorderInJavaObj" field
// of the Java object
env->SetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj, (int)lpRecorder);
-
+
// save our newly created callback information in the "nativeCallbackCookie" field
// of the Java object (in mNativeCallbackCookie) so we can free the memory in finalize()
env->SetIntField(thiz, javaAudioRecordFields.nativeCallbackCookie, (int)lpCallbackData);
-
+
return AUDIORECORD_SUCCESS;
-
+
// failure:
native_init_failure:
env->DeleteGlobalRef(lpCallbackData->audioRecord_class);
@@ -246,7 +246,7 @@
env->SetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj, 0);
env->SetIntField(thiz, javaAudioRecordFields.nativeCallbackCookie, 0);
-
+
return AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED;
}
@@ -256,13 +256,13 @@
static int
android_media_AudioRecord_start(JNIEnv *env, jobject thiz)
{
- AudioRecord *lpRecorder =
+ AudioRecord *lpRecorder =
(AudioRecord *)env->GetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
if (lpRecorder == NULL ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
return AUDIORECORD_ERROR;
}
-
+
return android_media_translateRecorderErrorCode(lpRecorder->start());
}
@@ -271,7 +271,7 @@
static void
android_media_AudioRecord_stop(JNIEnv *env, jobject thiz)
{
- AudioRecord *lpRecorder =
+ AudioRecord *lpRecorder =
(AudioRecord *)env->GetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
if (lpRecorder == NULL ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
@@ -288,7 +288,7 @@
// serialize access. Ugly, but functional.
Mutex::Autolock lock(&sLock);
- AudioRecord *lpRecorder =
+ AudioRecord *lpRecorder =
(AudioRecord *)env->GetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
audiorecord_callback_cookie *lpCookie = (audiorecord_callback_cookie *)env->GetIntField(
thiz, javaAudioRecordFields.nativeCallbackCookie);
@@ -304,7 +304,7 @@
lpRecorder->stop();
delete lpRecorder;
}
-
+
// delete the callback information
if (lpCookie) {
ALOGV("deleting lpCookie: %x\n", (int)lpCookie);
@@ -329,7 +329,7 @@
AudioRecord *lpRecorder = NULL;
// get the audio recorder from which we'll read new audio samples
- lpRecorder =
+ lpRecorder =
(AudioRecord *)env->GetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
if (lpRecorder == NULL) {
ALOGE("Unable to retrieve AudioRecord object, can't record");
@@ -355,8 +355,8 @@
// read the new audio data from the native AudioRecord object
ssize_t recorderBuffSize = lpRecorder->frameCount()*lpRecorder->frameSize();
- ssize_t readSize = lpRecorder->read(recordBuff + offsetInBytes,
- sizeInBytes > (jint)recorderBuffSize ?
+ ssize_t readSize = lpRecorder->read(recordBuff + offsetInBytes,
+ sizeInBytes > (jint)recorderBuffSize ?
(jint)recorderBuffSize : sizeInBytes );
env->ReleaseByteArrayElements(javaAudioData, recordBuff, 0);
@@ -381,41 +381,41 @@
//ALOGV("Entering android_media_AudioRecord_readInBuffer");
// get the audio recorder from which we'll read new audio samples
- lpRecorder =
+ lpRecorder =
(AudioRecord *)env->GetIntField(thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
- if(lpRecorder==NULL)
+ if (lpRecorder==NULL)
return 0;
// direct buffer and direct access supported?
long capacity = env->GetDirectBufferCapacity(jBuffer);
- if(capacity == -1) {
+ if (capacity == -1) {
// buffer direct access is not supported
ALOGE("Buffer direct access is not supported, can't record");
return 0;
}
//ALOGV("capacity = %ld", capacity);
jbyte* nativeFromJavaBuf = (jbyte*) env->GetDirectBufferAddress(jBuffer);
- if(nativeFromJavaBuf==NULL) {
+ if (nativeFromJavaBuf==NULL) {
ALOGE("Buffer direct access is not supported, can't record");
return 0;
- }
+ }
// read new data from the recorder
- return (jint) lpRecorder->read(nativeFromJavaBuf,
+ return (jint) lpRecorder->read(nativeFromJavaBuf,
capacity < sizeInBytes ? capacity : sizeInBytes);
}
// ----------------------------------------------------------------------------
-static jint android_media_AudioRecord_set_marker_pos(JNIEnv *env, jobject thiz,
+static jint android_media_AudioRecord_set_marker_pos(JNIEnv *env, jobject thiz,
jint markerPos) {
-
+
AudioRecord *lpRecorder = (AudioRecord *)env->GetIntField(
thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
-
+
if (lpRecorder) {
- return
- android_media_translateRecorderErrorCode( lpRecorder->setMarkerPosition(markerPos) );
+ return
+ android_media_translateRecorderErrorCode( lpRecorder->setMarkerPosition(markerPos) );
} else {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioRecord pointer for setMarkerPosition()");
@@ -426,11 +426,11 @@
// ----------------------------------------------------------------------------
static jint android_media_AudioRecord_get_marker_pos(JNIEnv *env, jobject thiz) {
-
+
AudioRecord *lpRecorder = (AudioRecord *)env->GetIntField(
thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
uint32_t markerPos = 0;
-
+
if (lpRecorder) {
lpRecorder->getMarkerPosition(&markerPos);
return (jint)markerPos;
@@ -445,28 +445,28 @@
// ----------------------------------------------------------------------------
static jint android_media_AudioRecord_set_pos_update_period(JNIEnv *env, jobject thiz,
jint period) {
-
+
AudioRecord *lpRecorder = (AudioRecord *)env->GetIntField(
thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
-
+
if (lpRecorder) {
- return
- android_media_translateRecorderErrorCode( lpRecorder->setPositionUpdatePeriod(period) );
+ return
+ android_media_translateRecorderErrorCode( lpRecorder->setPositionUpdatePeriod(period) );
} else {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioRecord pointer for setPositionUpdatePeriod()");
return AUDIORECORD_ERROR;
- }
+ }
}
// ----------------------------------------------------------------------------
static jint android_media_AudioRecord_get_pos_update_period(JNIEnv *env, jobject thiz) {
-
+
AudioRecord *lpRecorder = (AudioRecord *)env->GetIntField(
thiz, javaAudioRecordFields.nativeRecorderInJavaObj);
uint32_t period = 0;
-
+
if (lpRecorder) {
lpRecorder->getPositionUpdatePeriod(&period);
return (jint)period;
@@ -514,7 +514,7 @@
(void *)android_media_AudioRecord_setup},
{"native_finalize", "()V", (void *)android_media_AudioRecord_finalize},
{"native_release", "()V", (void *)android_media_AudioRecord_release},
- {"native_read_in_byte_array",
+ {"native_read_in_byte_array",
"([BII)I", (void *)android_media_AudioRecord_readInByteArray},
{"native_read_in_short_array",
"([SII)I", (void *)android_media_AudioRecord_readInShortArray},
@@ -541,7 +541,7 @@
// ----------------------------------------------------------------------------
-extern bool android_media_getIntConstantFromClass(JNIEnv* pEnv,
+extern bool android_media_getIntConstantFromClass(JNIEnv* pEnv,
jclass theClass, const char* className, const char* constName, int* constVal);
// ----------------------------------------------------------------------------
@@ -550,7 +550,7 @@
javaAudioRecordFields.postNativeEventInJava = NULL;
javaAudioRecordFields.nativeRecorderInJavaObj = NULL;
javaAudioRecordFields.nativeCallbackCookie = NULL;
-
+
// Get the AudioRecord class
jclass audioRecordClass = env->FindClass(kClassPathName);
@@ -569,7 +569,7 @@
// Get the variables
// mNativeRecorderInJavaObj
- javaAudioRecordFields.nativeRecorderInJavaObj =
+ javaAudioRecordFields.nativeRecorderInJavaObj =
env->GetFieldID(audioRecordClass,
JAVA_NATIVERECORDERINJAVAOBJ_FIELD_NAME, "I");
if (javaAudioRecordFields.nativeRecorderInJavaObj == NULL) {
@@ -592,13 +592,13 @@
ALOGE("Can't find %s", JAVA_AUDIOFORMAT_CLASS_NAME);
return -1;
}
- if ( !android_media_getIntConstantFromClass(env, audioFormatClass,
- JAVA_AUDIOFORMAT_CLASS_NAME,
+ if ( !android_media_getIntConstantFromClass(env, audioFormatClass,
+ JAVA_AUDIOFORMAT_CLASS_NAME,
JAVA_CONST_PCM16_NAME, &(javaAudioRecordFields.PCM16))
- || !android_media_getIntConstantFromClass(env, audioFormatClass,
- JAVA_AUDIOFORMAT_CLASS_NAME,
+ || !android_media_getIntConstantFromClass(env, audioFormatClass,
+ JAVA_AUDIOFORMAT_CLASS_NAME,
JAVA_CONST_PCM8_NAME, &(javaAudioRecordFields.PCM8)) ) {
- // error log performed in getIntConstantFromClass()
+ // error log performed in getIntConstantFromClass()
return -1;
}
diff --git a/core/jni/android_media_AudioSystem.cpp b/core/jni/android_media_AudioSystem.cpp
index ee5eb7e..bff5994 100644
--- a/core/jni/android_media_AudioSystem.cpp
+++ b/core/jni/android_media_AudioSystem.cpp
@@ -268,7 +268,7 @@
int register_android_media_AudioSystem(JNIEnv *env)
{
AudioSystem::setErrorCallback(android_media_AudioSystem_error_callback);
-
+
return AndroidRuntime::registerNativeMethods(env,
kClassPathName, gMethods, NELEM(gMethods));
}
diff --git a/core/jni/android_media_AudioTrack.cpp b/core/jni/android_media_AudioTrack.cpp
index 57f5d3d..d4ed06c 100644
--- a/core/jni/android_media_AudioTrack.cpp
+++ b/core/jni/android_media_AudioTrack.cpp
@@ -105,7 +105,7 @@
jint android_media_translateErrorCode(int code) {
- switch(code) {
+ switch (code) {
case NO_ERROR:
return AUDIOTRACK_SUCCESS;
case BAD_VALUE:
@@ -114,7 +114,7 @@
return AUDIOTRACK_ERROR_INVALID_OPERATION;
default:
return AUDIOTRACK_ERROR;
- }
+ }
}
@@ -123,14 +123,14 @@
if (event == AudioTrack::EVENT_MORE_DATA) {
// set size to 0 to signal we're not using the callback to write more data
AudioTrack::Buffer* pBuff = (AudioTrack::Buffer*)info;
- pBuff->size = 0;
-
+ pBuff->size = 0;
+
} else if (event == AudioTrack::EVENT_MARKER) {
audiotrack_callback_cookie *callbackInfo = (audiotrack_callback_cookie *)user;
JNIEnv *env = AndroidRuntime::getJNIEnv();
if (user && env) {
env->CallStaticVoidMethod(
- callbackInfo->audioTrack_class,
+ callbackInfo->audioTrack_class,
javaAudioTrackFields.postNativeEventInJava,
callbackInfo->audioTrack_ref, event, 0,0, NULL);
if (env->ExceptionCheck()) {
@@ -144,7 +144,7 @@
JNIEnv *env = AndroidRuntime::getJNIEnv();
if (user && env) {
env->CallStaticVoidMethod(
- callbackInfo->audioTrack_class,
+ callbackInfo->audioTrack_class,
javaAudioTrackFields.postNativeEventInJava,
callbackInfo->audioTrack_ref, event, 0,0, NULL);
if (env->ExceptionCheck()) {
@@ -186,7 +186,7 @@
}
int nbChannels = popcount(nativeChannelMask);
-
+
// check the stream type
audio_stream_type_t atStreamType;
switch (streamType) {
@@ -215,7 +215,7 @@
// for the moment 8bitPCM in MODE_STATIC is not supported natively in the AudioTrack C++ class
// so we declare everything as 16bitPCM, the 8->16bit conversion for MODE_STATIC will be handled
// in android_media_AudioTrack_native_write_byte()
- if ((audioFormat == javaAudioTrackFields.PCM8)
+ if ((audioFormat == javaAudioTrackFields.PCM8)
&& (memoryMode == javaAudioTrackFields.MODE_STATIC)) {
ALOGV("android_media_AudioTrack_native_setup(): requesting MODE_STATIC for 8bit \
buff size of %dbytes, switching to 16bit, buff size of %dbytes",
@@ -230,9 +230,9 @@
audio_format_t format = audioFormat == javaAudioTrackFields.PCM16 ?
AUDIO_FORMAT_PCM_16_BIT : AUDIO_FORMAT_PCM_8_BIT;
int frameCount = buffSizeInBytes / (nbChannels * bytesPerSample);
-
+
AudioTrackJniStorage* lpJniStorage = new AudioTrackJniStorage();
-
+
// initialize the callback information:
// this data will be passed with every AudioTrack callback
jclass clazz = env->GetObjectClass(thiz);
@@ -244,7 +244,7 @@
lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
// we use a weak reference so the AudioTrack object can be garbage collected.
lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
-
+
lpJniStorage->mStreamType = atStreamType;
if (jSession == NULL) {
@@ -269,7 +269,7 @@
ALOGE("Error creating uninitialized AudioTrack");
goto native_track_failure;
}
-
+
// initialize the native AudioTrack object
if (memoryMode == javaAudioTrackFields.MODE_STREAM) {
@@ -285,15 +285,15 @@
0,// shared mem
true,// thread can call Java
sessionId);// audio session ID
-
+
} else if (memoryMode == javaAudioTrackFields.MODE_STATIC) {
// AudioTrack is using shared memory
-
+
if (!lpJniStorage->allocSharedMem(buffSizeInBytes)) {
ALOGE("Error creating AudioTrack in static mode: error creating mem heap base");
goto native_init_failure;
}
-
+
lpTrack->set(
atStreamType,// stream type
sampleRateInHertz,
@@ -302,7 +302,7 @@
frameCount,
AUDIO_POLICY_OUTPUT_FLAG_NONE,
audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user));
- 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
+ 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
lpJniStorage->mMemBase,// shared mem
true,// thread can call Java
sessionId);// audio session ID
@@ -323,21 +323,21 @@
env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
nSession = NULL;
- // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field
+ // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field
// of the Java object (in mNativeTrackInJavaObj)
env->SetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (int)lpTrack);
-
+
// save the JNI resources so we can free them later
//ALOGV("storing lpJniStorage: %x\n", (int)lpJniStorage);
env->SetIntField(thiz, javaAudioTrackFields.jniData, (int)lpJniStorage);
return AUDIOTRACK_SUCCESS;
-
+
// failures:
native_init_failure:
delete lpTrack;
env->SetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, 0);
-
+
native_track_failure:
if (nSession != NULL) {
env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
@@ -347,7 +347,7 @@
delete lpJniStorage;
env->SetIntField(thiz, javaAudioTrackFields.jniData, 0);
return AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
-
+
}
@@ -432,7 +432,7 @@
// ----------------------------------------------------------------------------
static void android_media_AudioTrack_native_finalize(JNIEnv *env, jobject thiz) {
//ALOGV("android_media_AudioTrack_native_finalize jobject: %x\n", (int)thiz);
-
+
// delete the AudioTrack object
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
@@ -441,7 +441,7 @@
lpTrack->stop();
delete lpTrack;
}
-
+
// delete the JNI data
AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetIntField(
thiz, javaAudioTrackFields.jniData);
@@ -456,7 +456,7 @@
// ----------------------------------------------------------------------------
static void android_media_AudioTrack_native_release(JNIEnv *env, jobject thiz) {
-
+
// do everything a call to finalize would
android_media_AudioTrack_native_finalize(env, thiz);
// + reset the native resources in the Java object so any attempt to access
@@ -493,7 +493,7 @@
int count = sizeInBytes;
int16_t *dst = (int16_t *)pTrack->sharedBuffer()->pointer();
const int8_t *src = (const int8_t *)(data + offsetInBytes);
- while(count--) {
+ while (count--) {
*dst++ = (int16_t)(*src++^0x80) << 8;
}
// even though we wrote 2*sizeInBytes, we only report sizeInBytes as written to hide
@@ -514,7 +514,7 @@
AudioTrack *lpTrack = NULL;
//ALOGV("android_media_AudioTrack_native_write_byte(offset=%d, sizeInBytes=%d) called",
// offsetInBytes, sizeInBytes);
-
+
// get the audio track to load with samples
lpTrack = (AudioTrack *)env->GetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
if (lpTrack == NULL) {
@@ -599,7 +599,7 @@
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
if (lpTrack) {
- return (jint) lpTrack->getSampleRate();
+ return (jint) lpTrack->getSampleRate();
} else {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioTrack pointer for getSampleRate()");
@@ -609,14 +609,14 @@
// ----------------------------------------------------------------------------
-static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env, jobject thiz,
+static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env, jobject thiz,
jint markerPos) {
-
+
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
-
+
if (lpTrack) {
- return android_media_translateErrorCode( lpTrack->setMarkerPosition(markerPos) );
+ return android_media_translateErrorCode( lpTrack->setMarkerPosition(markerPos) );
} else {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioTrack pointer for setMarkerPosition()");
@@ -627,11 +627,11 @@
// ----------------------------------------------------------------------------
static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env, jobject thiz) {
-
+
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
uint32_t markerPos = 0;
-
+
if (lpTrack) {
lpTrack->getMarkerPosition(&markerPos);
return (jint)markerPos;
@@ -646,27 +646,27 @@
// ----------------------------------------------------------------------------
static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env, jobject thiz,
jint period) {
-
+
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
-
+
if (lpTrack) {
- return android_media_translateErrorCode( lpTrack->setPositionUpdatePeriod(period) );
+ return android_media_translateErrorCode( lpTrack->setPositionUpdatePeriod(period) );
} else {
jniThrowException(env, "java/lang/IllegalStateException",
"Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()");
return AUDIOTRACK_ERROR;
- }
+ }
}
// ----------------------------------------------------------------------------
static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env, jobject thiz) {
-
+
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
uint32_t period = 0;
-
+
if (lpTrack) {
lpTrack->getPositionUpdatePeriod(&period);
return (jint)period;
@@ -679,12 +679,12 @@
// ----------------------------------------------------------------------------
-static jint android_media_AudioTrack_set_position(JNIEnv *env, jobject thiz,
+static jint android_media_AudioTrack_set_position(JNIEnv *env, jobject thiz,
jint position) {
-
+
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
-
+
if (lpTrack) {
return android_media_translateErrorCode( lpTrack->setPosition(position) );
} else {
@@ -697,11 +697,11 @@
// ----------------------------------------------------------------------------
static jint android_media_AudioTrack_get_position(JNIEnv *env, jobject thiz) {
-
+
AudioTrack *lpTrack = (AudioTrack *)env->GetIntField(
thiz, javaAudioTrackFields.nativeTrackInJavaObj);
uint32_t position = 0;
-
+
if (lpTrack) {
lpTrack->getPosition(&position);
return (jint)position;
@@ -944,12 +944,12 @@
// Get the memory mode constants
if ( !android_media_getIntConstantFromClass(env, audioTrackClass,
- kClassPathName,
+ kClassPathName,
JAVA_CONST_MODE_STATIC_NAME, &(javaAudioTrackFields.MODE_STATIC))
|| !android_media_getIntConstantFromClass(env, audioTrackClass,
- kClassPathName,
+ kClassPathName,
JAVA_CONST_MODE_STREAM_NAME, &(javaAudioTrackFields.MODE_STREAM)) ) {
- // error log performed in android_media_getIntConstantFromClass()
+ // error log performed in android_media_getIntConstantFromClass()
return -1;
}
@@ -960,16 +960,16 @@
ALOGE("Can't find %s", JAVA_AUDIOFORMAT_CLASS_NAME);
return -1;
}
- if ( !android_media_getIntConstantFromClass(env, audioFormatClass,
- JAVA_AUDIOFORMAT_CLASS_NAME,
+ if ( !android_media_getIntConstantFromClass(env, audioFormatClass,
+ JAVA_AUDIOFORMAT_CLASS_NAME,
JAVA_CONST_PCM16_NAME, &(javaAudioTrackFields.PCM16))
- || !android_media_getIntConstantFromClass(env, audioFormatClass,
- JAVA_AUDIOFORMAT_CLASS_NAME,
+ || !android_media_getIntConstantFromClass(env, audioFormatClass,
+ JAVA_AUDIOFORMAT_CLASS_NAME,
JAVA_CONST_PCM8_NAME, &(javaAudioTrackFields.PCM8)) ) {
- // error log performed in android_media_getIntConstantFromClass()
+ // error log performed in android_media_getIntConstantFromClass()
return -1;
}
-
+
return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
}
diff --git a/core/jni/android_media_JetPlayer.cpp b/core/jni/android_media_JetPlayer.cpp
index 9f9bedb..6fedc6b 100644
--- a/core/jni/android_media_JetPlayer.cpp
+++ b/core/jni/android_media_JetPlayer.cpp
@@ -56,7 +56,7 @@
jetPlayerEventCallback(int what, int arg1=0, int arg2=0, void* javaTarget = NULL)
{
JNIEnv *env = AndroidRuntime::getJNIEnv();
- if(env) {
+ if (env) {
env->CallStaticVoidMethod(
javaJetPlayerFields.jetClass, javaJetPlayerFields.postNativeEventInJava,
javaTarget,
@@ -84,7 +84,7 @@
EAS_RESULT result = lpJet->init();
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
// save our newly created C++ JetPlayer in the "nativePlayerInJavaObj" field
// of the Java object (in mNativePlayerInJavaObj)
env->SetIntField(thiz, javaJetPlayerFields.nativePlayerInJavaObj, (int)lpJet);
@@ -105,7 +105,7 @@
ALOGV("android_media_JetPlayer_finalize(): entering.");
JetPlayer *lpJet = (JetPlayer *)env->GetIntField(
thiz, javaJetPlayerFields.nativePlayerInJavaObj);
- if(lpJet != NULL) {
+ if (lpJet != NULL) {
lpJet->release();
delete lpJet;
}
@@ -148,7 +148,7 @@
EAS_RESULT result = lpJet->loadFromFile(pathStr);
env->ReleaseStringUTFChars(path, pathStr);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_openFile(): file successfully opened");
return JNI_TRUE;
} else {
@@ -178,7 +178,7 @@
EAS_RESULT result = lpJet->loadFromFD(jniGetFDFromFileDescriptor(env, fileDescriptor),
(long long)offset, (long long)length); // cast params to types used by EAS_FILE
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
ALOGV("android_media_JetPlayer_openFileDescr(): file successfully opened");
return JNI_TRUE;
} else {
@@ -200,7 +200,7 @@
"Unable to retrieve JetPlayer pointer for closeFile()");
}
- if( lpJet->closeFile()==EAS_SUCCESS) {
+ if (lpJet->closeFile()==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_closeFile(): file successfully closed");
return JNI_TRUE;
} else {
@@ -222,7 +222,7 @@
}
EAS_RESULT result = lpJet->play();
- if( result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_play(): play successful");
return JNI_TRUE;
} else {
@@ -245,11 +245,11 @@
}
EAS_RESULT result = lpJet->pause();
- if( result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_pause(): pause successful");
return JNI_TRUE;
} else {
- if(result==EAS_ERROR_QUEUE_IS_EMPTY) {
+ if (result==EAS_ERROR_QUEUE_IS_EMPTY) {
ALOGV("android_media_JetPlayer_pause(): paused with an empty queue");
return JNI_TRUE;
} else
@@ -275,7 +275,7 @@
EAS_RESULT result
= lpJet->queueSegment(segmentNum, libNum, repeatCount, transpose, muteFlags, userID);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_queueSegment(): segment successfully queued");
return JNI_TRUE;
} else {
@@ -311,7 +311,7 @@
EAS_U32 muteMask=0;
int maxTracks = lpJet->getMaxTracks();
for (jint trackIndex=0; trackIndex<maxTracks; trackIndex++) {
- if(muteTracks[maxTracks-1-trackIndex]==JNI_TRUE)
+ if (muteTracks[maxTracks-1-trackIndex]==JNI_TRUE)
muteMask = (muteMask << 1) | 0x00000001;
else
muteMask = muteMask << 1;
@@ -321,7 +321,7 @@
result = lpJet->queueSegment(segmentNum, libNum, repeatCount, transpose, muteMask, userID);
env->ReleaseBooleanArrayElements(muteArray, muteTracks, 0);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_queueSegmentMuteArray(): segment successfully queued");
return JNI_TRUE;
} else {
@@ -346,7 +346,7 @@
EAS_RESULT result;
result = lpJet->setMuteFlags(muteFlags, bSync==JNI_TRUE ? true : false);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_setMuteFlags(): mute flags successfully updated");
return JNI_TRUE;
} else {
@@ -380,7 +380,7 @@
EAS_U32 muteMask=0;
int maxTracks = lpJet->getMaxTracks();
for (jint trackIndex=0; trackIndex<maxTracks; trackIndex++) {
- if(muteTracks[maxTracks-1-trackIndex]==JNI_TRUE)
+ if (muteTracks[maxTracks-1-trackIndex]==JNI_TRUE)
muteMask = (muteMask << 1) | 0x00000001;
else
muteMask = muteMask << 1;
@@ -390,7 +390,7 @@
result = lpJet->setMuteFlags(muteMask, bSync==JNI_TRUE ? true : false);
env->ReleaseBooleanArrayElements(muteArray, muteTracks, 0);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_setMuteArray(): mute flags successfully updated");
return JNI_TRUE;
} else {
@@ -416,7 +416,7 @@
EAS_RESULT result;
result = lpJet->setMuteFlag(trackId,
muteFlag==JNI_TRUE ? true : false, bSync==JNI_TRUE ? true : false);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_setMuteFlag(): mute flag successfully updated for track %d", trackId);
return JNI_TRUE;
} else {
@@ -440,7 +440,7 @@
EAS_RESULT result;
result = lpJet->triggerClip(clipId);
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_triggerClip(): triggerClip successful for clip %d", clipId);
return JNI_TRUE;
} else {
@@ -463,7 +463,7 @@
}
EAS_RESULT result = lpJet->clearQueue();
- if(result==EAS_SUCCESS) {
+ if (result==EAS_SUCCESS) {
//ALOGV("android_media_JetPlayer_clearQueue(): clearQueue successful");
return JNI_TRUE;
} else {
diff --git a/core/res/AndroidManifest.xml b/core/res/AndroidManifest.xml
index 8f03fe0..d1e3642 100644
--- a/core/res/AndroidManifest.xml
+++ b/core/res/AndroidManifest.xml
@@ -248,6 +248,21 @@
android:label="@string/permlab_writeContacts"
android:description="@string/permdesc_writeContacts" />
+ <!-- Allows an application to read the user's call log. -->
+ <permission android:name="android.permission.READ_CALL_LOG"
+ android:permissionGroup="android.permission-group.PERSONAL_INFO"
+ android:protectionLevel="dangerous"
+ android:label="@string/permlab_readCallLog"
+ android:description="@string/permdesc_readCallLog" />
+
+ <!-- Allows an application to write (but not read) the user's
+ contacts data. -->
+ <permission android:name="android.permission.WRITE_CALL_LOG"
+ android:permissionGroup="android.permission-group.PERSONAL_INFO"
+ android:protectionLevel="dangerous"
+ android:label="@string/permlab_writeCallLog"
+ android:description="@string/permdesc_writeCallLog" />
+
<!-- Allows an application to read the user's personal profile data. -->
<permission android:name="android.permission.READ_PROFILE"
diff --git a/core/res/res/values/strings.xml b/core/res/res/values/strings.xml
index 7137b7c..7799f74 100755
--- a/core/res/res/values/strings.xml
+++ b/core/res/res/values/strings.xml
@@ -942,6 +942,26 @@
contact (address) data stored on your phone. Malicious
apps may use this to erase or modify your contact data.</string>
+
+ <!-- Title of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permlab_readCallLog">read call log</string>
+ <!-- Description of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permdesc_readCallLog" product="tablet">Allows the app to read your tablet\'s call log, including data about incoming and outgoing calls.
+ Malicious apps may use this to send your data to other people.</string>
+ <!-- Description of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permdesc_readCallLog" product="default">Allows the app to read your phone\'s call log, including data about incoming and outgoing calls.
+ Malicious apps may use this to send your data to other people.</string>
+
+ <!-- Title of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permlab_writeCallLog">write call log</string>
+ <!-- Description of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permdesc_writeCallLog" product="tablet">Allows the app to modify your tablet\'s call log, including data about incoming and outgoing calls.
+ Malicious apps may use this to erase or modify your call log.</string>
+ <!-- Description of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permdesc_writeCallLog" product="default">Allows the app to modify your phone\'s call log, including data about incoming and outgoing calls.
+ Malicious apps may use this to erase or modify your call log.</string>
+
+
<!-- Title of the read profile permission, listed so the user can decide whether to allow the application to read the user's personal profile data. [CHAR LIMIT=30] -->
<string name="permlab_readProfile">read your profile data</string>
<!-- Description of the read profile permission, listed so the user can decide whether to allow the application to read the user's personal profile data. [CHAR LIMIT=NONE] -->
diff --git a/include/media/IAudioRecord.h b/include/media/IAudioRecord.h
index 7869020..089be3b 100644
--- a/include/media/IAudioRecord.h
+++ b/include/media/IAudioRecord.h
@@ -32,7 +32,7 @@
class IAudioRecord : public IInterface
{
-public:
+public:
DECLARE_META_INTERFACE(AudioRecord);
/* After it's created the track is not active. Call start() to
@@ -42,13 +42,13 @@
virtual status_t start(pid_t tid) = 0;
/* Stop a track. If set, the callback will cease being called and
- * obtainBuffer will return an error. Buffers that are already released
+ * obtainBuffer will return an error. Buffers that are already released
* will be processed, unless flush() is called.
*/
virtual void stop() = 0;
/* get this tracks control block */
- virtual sp<IMemory> getCblk() const = 0;
+ virtual sp<IMemory> getCblk() const = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h
index 77f3e21..577b095 100644
--- a/include/media/IAudioTrack.h
+++ b/include/media/IAudioTrack.h
@@ -32,7 +32,7 @@
class IAudioTrack : public IInterface
{
-public:
+public:
DECLARE_META_INTERFACE(AudioTrack);
/* Get this track's control block */
@@ -45,7 +45,7 @@
virtual status_t start(pid_t tid) = 0;
/* Stop a track. If set, the callback will cease being called and
- * obtainBuffer will return an error. Buffers that are already released
+ * obtainBuffer will return an error. Buffers that are already released
* will continue to be processed, unless/until flush() is called.
*/
virtual void stop() = 0;
@@ -59,9 +59,9 @@
* While muted, the callback, if set, is still called.
*/
virtual void mute(bool) = 0;
-
+
/* Pause a track. If set, the callback will cease being called and
- * obtainBuffer will return an error. Buffers that are already released
+ * obtainBuffer will return an error. Buffers that are already released
* will continue to be processed, unless/until flush() is called.
*/
virtual void pause() = 0;
diff --git a/include/media/IMediaMetadataRetriever.h b/include/media/IMediaMetadataRetriever.h
index 1c1c268..6dbb2d7 100644
--- a/include/media/IMediaMetadataRetriever.h
+++ b/include/media/IMediaMetadataRetriever.h
@@ -56,4 +56,3 @@
}; // namespace android
#endif // ANDROID_IMEDIAMETADATARETRIEVER_H
-
diff --git a/include/media/IMediaPlayerClient.h b/include/media/IMediaPlayerClient.h
index daec1c7..8f1843e 100644
--- a/include/media/IMediaPlayerClient.h
+++ b/include/media/IMediaPlayerClient.h
@@ -45,4 +45,3 @@
}; // namespace android
#endif // ANDROID_IMEDIAPLAYERCLIENT_H
-
diff --git a/include/media/IMediaRecorderClient.h b/include/media/IMediaRecorderClient.h
index 0058ef2..e7d0229 100644
--- a/include/media/IMediaRecorderClient.h
+++ b/include/media/IMediaRecorderClient.h
@@ -45,4 +45,3 @@
}; // namespace android
#endif // ANDROID_IMEDIARECORDERCLIENT_H
-
diff --git a/include/media/JetPlayer.h b/include/media/JetPlayer.h
index 491a950..9f6ff4c 100644
--- a/include/media/JetPlayer.h
+++ b/include/media/JetPlayer.h
@@ -40,13 +40,13 @@
static const int JET_NUMQUEUEDSEGMENT_UPDATE = 3;
static const int JET_PAUSE_UPDATE = 4;
- JetPlayer(jobject javaJetPlayer,
- int maxTracks = 32,
+ JetPlayer(jobject javaJetPlayer,
+ int maxTracks = 32,
int trackBufferSize = 1200);
~JetPlayer();
int init();
int release();
-
+
int loadFromFile(const char* url);
int loadFromFD(const int fd, const long long offset, const long long length);
int closeFile();
@@ -60,7 +60,7 @@
int clearQueue();
void setEventCallback(jetevent_callback callback);
-
+
int getMaxTracks() { return mMaxTracks; };
@@ -88,7 +88,7 @@
int mMaxTracks; // max number of MIDI tracks, usually 32
EAS_DATA_HANDLE mEasData;
EAS_FILE_LOCATOR mEasJetFileLoc;
- EAS_PCM* mAudioBuffer;// EAS renders the MIDI data into this buffer,
+ EAS_PCM* mAudioBuffer;// EAS renders the MIDI data into this buffer,
AudioTrack* mAudioTrack; // and we play it in this audio track
int mTrackBufferSize;
S_JET_STATUS mJetStatus;
diff --git a/include/media/MediaMetadataRetrieverInterface.h b/include/media/MediaMetadataRetrieverInterface.h
index 27b7e4d..ecc3b65 100644
--- a/include/media/MediaMetadataRetrieverInterface.h
+++ b/include/media/MediaMetadataRetrieverInterface.h
@@ -1,6 +1,6 @@
/*
**
-** Copyright (C) 2008 The Android Open Source Project
+** Copyright (C) 2008 The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
@@ -56,4 +56,3 @@
}; // namespace android
#endif // ANDROID_MEDIAMETADATARETRIEVERINTERFACE_H
-
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index 250f267..9fc962c 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -516,4 +516,3 @@
}; // namespace android
#endif // ANDROID_MEDIAPROFILES_H
-
diff --git a/include/media/MemoryLeakTrackUtil.h b/include/media/MemoryLeakTrackUtil.h
index ac0f6b2..d2618aa 100644
--- a/include/media/MemoryLeakTrackUtil.h
+++ b/include/media/MemoryLeakTrackUtil.h
@@ -1,4 +1,3 @@
-
/*
* Copyright 2011, The Android Open Source Project
*
diff --git a/include/private/media/VideoFrame.h b/include/private/media/VideoFrame.h
index 3aff0c6..0ecc348 100644
--- a/include/private/media/VideoFrame.h
+++ b/include/private/media/VideoFrame.h
@@ -26,7 +26,7 @@
namespace android {
// A simple buffer to hold binary data
-class MediaAlbumArt
+class MediaAlbumArt
{
public:
MediaAlbumArt(): mSize(0), mData(0) {}
@@ -57,9 +57,9 @@
fclose(in);
}
- MediaAlbumArt(const MediaAlbumArt& copy) {
- mSize = copy.mSize;
- mData = NULL; // initialize it first
+ MediaAlbumArt(const MediaAlbumArt& copy) {
+ mSize = copy.mSize;
+ mData = NULL; // initialize it first
if (mSize > 0 && copy.mData != NULL) {
mData = new uint8_t[copy.mSize];
if (mData != NULL) {
@@ -89,7 +89,7 @@
{
public:
VideoFrame(): mWidth(0), mHeight(0), mDisplayWidth(0), mDisplayHeight(0), mSize(0), mData(0) {}
-
+
VideoFrame(const VideoFrame& copy) {
mWidth = copy.mWidth;
mHeight = copy.mHeight;
diff --git a/libs/hwui/DisplayListRenderer.cpp b/libs/hwui/DisplayListRenderer.cpp
index 24f4f1c..24f52e0 100644
--- a/libs/hwui/DisplayListRenderer.cpp
+++ b/libs/hwui/DisplayListRenderer.cpp
@@ -16,11 +16,11 @@
#define LOG_TAG "OpenGLRenderer"
+#include <SkCamera.h>
#include "DisplayListLogBuffer.h"
#include "DisplayListRenderer.h"
#include "Caches.h"
-#include "SkCamera.h"
namespace android {
namespace uirenderer {
@@ -91,7 +91,9 @@
fflush(file);
}
-DisplayList::DisplayList(const DisplayListRenderer& recorder) {
+DisplayList::DisplayList(const DisplayListRenderer& recorder) :
+ mTransformMatrix(NULL), mTransformCamera(NULL), mTransformMatrix3D(NULL) {
+
initFromDisplayListRenderer(recorder);
}
@@ -124,9 +126,6 @@
mWidth = 0;
mHeight = 0;
mPivotExplicitlySet = false;
- mTransformMatrix = NULL;
- mTransformCamera = NULL;
- mTransformMatrix3D = NULL;
mCaching = false;
}
@@ -140,17 +139,19 @@
void DisplayList::clearResources() {
sk_free((void*) mReader.base());
- if (mTransformMatrix) {
- delete mTransformMatrix;
- mTransformMatrix = NULL;
- }
- if (mTransformCamera) {
- delete mTransformCamera;
- mTransformCamera = NULL;
- }
- if (mTransformMatrix3D) {
- delete mTransformMatrix3D;
- mTransformMatrix3D = NULL;
+ if (USE_DISPLAY_LIST_PROPERTIES) {
+ if (mTransformMatrix) {
+ delete mTransformMatrix;
+ mTransformMatrix = NULL;
+ }
+ if (mTransformCamera) {
+ delete mTransformCamera;
+ mTransformCamera = NULL;
+ }
+ if (mTransformMatrix3D) {
+ delete mTransformMatrix3D;
+ mTransformMatrix3D = NULL;
+ }
}
Caches& caches = Caches::getInstance();
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 6808aa2..34451ca 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -174,7 +174,7 @@
mIEffect->disconnect();
mIEffect->asBinder()->unlinkToDeath(mIEffectClient);
}
- IPCThreadState::self()->flushCommands();
+ IPCThreadState::self()->flushCommands();
}
mIEffect.clear();
mIEffectClient.clear();
@@ -480,4 +480,3 @@
}; // namespace android
-
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 22c3a18..05ade75 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -293,13 +293,13 @@
return WOULD_BLOCK;
}
}
- }
+ }
AutoMutex lock(mLock);
// acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed
// while we are accessing the cblk
- sp <IAudioRecord> audioRecord = mAudioRecord;
- sp <IMemory> iMem = mCblkMemory;
+ sp<IAudioRecord> audioRecord = mAudioRecord;
+ sp<IMemory> iMem = mCblkMemory;
audio_track_cblk_t* cblk = mCblk;
if (mActive == 0) {
mActive = 1;
@@ -638,8 +638,8 @@
mLock.lock();
// acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed
// while we are accessing the cblk
- sp <IAudioRecord> audioRecord = mAudioRecord;
- sp <IMemory> iMem = mCblkMemory;
+ sp<IAudioRecord> audioRecord = mAudioRecord;
+ sp<IMemory> iMem = mCblkMemory;
mLock.unlock();
do {
@@ -684,8 +684,8 @@
mLock.lock();
// acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed
// while we are accessing the cblk
- sp <IAudioRecord> audioRecord = mAudioRecord;
- sp <IMemory> iMem = mCblkMemory;
+ sp<IAudioRecord> audioRecord = mAudioRecord;
+ sp<IMemory> iMem = mCblkMemory;
audio_track_cblk_t* cblk = mCblk;
mLock.unlock();
@@ -806,7 +806,7 @@
}
}
ALOGV("restoreRecord_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x",
- result, mActive, mCblk, cblk, mCblk->flags, cblk->flags);
+ result, mActive, mCblk, cblk, mCblk->flags, cblk->flags);
if (result == NO_ERROR) {
// from now on we switch to the newly created cblk
@@ -843,4 +843,3 @@
// -------------------------------------------------------------------------
}; // namespace android
-
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index a1cbf0f..33c7d03 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -59,14 +59,14 @@
break;
ALOGW("AudioFlinger not published, waiting...");
usleep(500000); // 0.5 s
- } while(true);
+ } while (true);
if (gAudioFlingerClient == NULL) {
gAudioFlingerClient = new AudioFlingerClient();
} else {
if (gAudioErrorCallback) {
gAudioErrorCallback(NO_ERROR);
}
- }
+ }
binder->linkToDeath(gAudioFlingerClient);
gAudioFlinger = interface_cast<IAudioFlinger>(binder);
gAudioFlinger->registerClient(gAudioFlingerClient);
@@ -482,7 +482,7 @@
}
bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType) {
- switch(streamType) {
+ switch (streamType) {
case AUDIO_STREAM_MUSIC:
case AUDIO_STREAM_VOICE_CALL:
case AUDIO_STREAM_BLUETOOTH_SCO:
@@ -512,7 +512,7 @@
break;
ALOGW("AudioPolicyService not published, waiting...");
usleep(500000); // 0.5 s
- } while(true);
+ } while (true);
if (gAudioPolicyServiceClient == NULL) {
gAudioPolicyServiceClient = new AudioPolicyServiceClient();
}
@@ -768,4 +768,3 @@
}
}; // namespace android
-
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 34563ca..048be1d 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -72,7 +72,7 @@
if (minBufCount < 2) minBufCount = 2;
*frameCount = (sampleRate == 0) ? afFrameCount * minBufCount :
- afFrameCount * minBufCount * sampleRate / afSampleRate;
+ afFrameCount * minBufCount * sampleRate / afSampleRate;
return NO_ERROR;
}
@@ -352,13 +352,13 @@
return;
}
}
- }
+ }
AutoMutex lock(mLock);
// acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed
// while we are accessing the cblk
- sp <IAudioTrack> audioTrack = mAudioTrack;
- sp <IMemory> iMem = mCblkMemory;
+ sp<IAudioTrack> audioTrack = mAudioTrack;
+ sp<IMemory> iMem = mCblkMemory;
audio_track_cblk_t* cblk = mCblk;
if (!mActive) {
@@ -743,8 +743,8 @@
status_t status;
const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
if (audioFlinger == 0) {
- ALOGE("Could not get audioflinger");
- return NO_INIT;
+ ALOGE("Could not get audioflinger");
+ return NO_INIT;
}
int afSampleRate;
@@ -830,7 +830,7 @@
mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t);
} else {
mCblk->buffers = sharedBuffer->pointer();
- // Force buffer full condition as data is already present in shared memory
+ // Force buffer full condition as data is already present in shared memory
mCblk->stepUser(mCblk->frameCount);
}
@@ -991,8 +991,8 @@
// acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed
// while we are accessing the cblk
mLock.lock();
- sp <IAudioTrack> audioTrack = mAudioTrack;
- sp <IMemory> iMem = mCblkMemory;
+ sp<IAudioTrack> audioTrack = mAudioTrack;
+ sp<IMemory> iMem = mCblkMemory;
mLock.unlock();
ssize_t written = 0;
@@ -1095,8 +1095,8 @@
mLock.lock();
// acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed
// while we are accessing the cblk
- sp <IAudioTrack> audioTrack = mAudioTrack;
- sp <IMemory> iMem = mCblkMemory;
+ sp<IAudioTrack> audioTrack = mAudioTrack;
+ sp<IMemory> iMem = mCblkMemory;
audio_track_cblk_t* cblk = mCblk;
bool active = mActive;
mLock.unlock();
@@ -1224,7 +1224,7 @@
if (!(android_atomic_or(CBLK_RESTORING_ON, &cblk->flags) & CBLK_RESTORING_MSK)) {
ALOGW("dead IAudioTrack, creating a new one from %s TID %d",
- fromStart ? "start()" : "obtainBuffer()", gettid());
+ fromStart ? "start()" : "obtainBuffer()", gettid());
// signal old cblk condition so that other threads waiting for available buffers stop
// waiting now
@@ -1310,7 +1310,7 @@
}
}
ALOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x",
- result, mActive, mCblk, cblk, mCblk->flags, cblk->flags);
+ result, mActive, mCblk, cblk, mCblk->flags, cblk->flags);
if (result == NO_ERROR) {
// from now on we switch to the newly created cblk
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 47c261da..07b12e4 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -622,11 +622,11 @@
sp<IEffect> effect;
if (pDesc == NULL) {
- return effect;
- if (status) {
- *status = BAD_VALUE;
- }
- }
+ return effect;
+ if (status) {
+ *status = BAD_VALUE;
+ }
+ }
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(pid);
@@ -679,7 +679,7 @@
status_t BnAudioFlinger::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case CREATE_TRACK: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
pid_t pid = data.readInt32();
@@ -745,7 +745,7 @@
reply->writeInt32( latency((audio_io_handle_t) data.readInt32()) );
return NO_ERROR;
} break;
- case SET_MASTER_VOLUME: {
+ case SET_MASTER_VOLUME: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
reply->writeInt32( setMasterVolume(data.readFloat()) );
return NO_ERROR;
@@ -815,14 +815,14 @@
String8 keyValuePairs(data.readString8());
reply->writeInt32(setParameters(ioHandle, keyValuePairs));
return NO_ERROR;
- } break;
+ } break;
case GET_PARAMETERS: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
String8 keys(data.readString8());
reply->writeString8(getParameters(ioHandle, keys));
return NO_ERROR;
- } break;
+ } break;
case REGISTER_CLIENT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index 1db39a3..4178b29 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -68,7 +68,7 @@
status_t BnAudioFlingerClient::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case IO_CONFIG_CHANGED: {
CHECK_INTERFACE(IAudioFlingerClient, data, reply);
int event = data.readInt32();
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index da7c124..5040bd9 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -365,7 +365,7 @@
status_t BnAudioPolicyService::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case SET_DEVICE_CONNECTION_STATE: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_devices_t device =
diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp
index 6b473c9..377b9a8 100644
--- a/media/libmedia/IAudioRecord.cpp
+++ b/media/libmedia/IAudioRecord.cpp
@@ -2,16 +2,16 @@
**
** Copyright 2007, The Android Open Source Project
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
**
-** http://www.apache.org/licenses/LICENSE-2.0
+** http://www.apache.org/licenses/LICENSE-2.0
**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
** limitations under the License.
*/
@@ -41,7 +41,7 @@
: BpInterface<IAudioRecord>(impl)
{
}
-
+
virtual status_t start(pid_t tid)
{
Parcel data, reply;
@@ -55,14 +55,14 @@
}
return status;
}
-
+
virtual void stop()
{
Parcel data, reply;
data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
remote()->transact(STOP, data, &reply);
}
-
+
virtual sp<IMemory> getCblk() const
{
Parcel data, reply;
@@ -73,7 +73,7 @@
cblk = interface_cast<IMemory>(reply.readStrongBinder());
}
return cblk;
- }
+ }
};
IMPLEMENT_META_INTERFACE(AudioRecord, "android.media.IAudioRecord");
@@ -83,8 +83,8 @@
status_t BnAudioRecord::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
- case GET_CBLK: {
+ switch (code) {
+ case GET_CBLK: {
CHECK_INTERFACE(IAudioRecord, data, reply);
reply->writeStrongBinder(getCblk()->asBinder());
return NO_ERROR;
@@ -105,4 +105,3 @@
}
}; // namespace android
-
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index 28ebbbfc..09f31a7 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -2,16 +2,16 @@
**
** Copyright 2007, The Android Open Source Project
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
**
-** http://www.apache.org/licenses/LICENSE-2.0
+** http://www.apache.org/licenses/LICENSE-2.0
**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
** limitations under the License.
*/
@@ -48,7 +48,7 @@
: BpInterface<IAudioTrack>(impl)
{
}
-
+
virtual sp<IMemory> getCblk() const
{
Parcel data, reply;
@@ -74,14 +74,14 @@
}
return status;
}
-
+
virtual void stop()
{
Parcel data, reply;
data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
remote()->transact(STOP, data, &reply);
}
-
+
virtual void flush()
{
Parcel data, reply;
@@ -96,14 +96,14 @@
data.writeInt32(e);
remote()->transact(MUTE, data, &reply);
}
-
+
virtual void pause()
{
Parcel data, reply;
data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
remote()->transact(PAUSE, data, &reply);
}
-
+
virtual status_t attachAuxEffect(int effectId)
{
Parcel data, reply;
@@ -172,8 +172,8 @@
status_t BnAudioTrack::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
- case GET_CBLK: {
+ switch (code) {
+ case GET_CBLK: {
CHECK_INTERFACE(IAudioTrack, data, reply);
reply->writeStrongBinder(getCblk()->asBinder());
return NO_ERROR;
diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp
index 5d40cc8..a303a8f 100644
--- a/media/libmedia/IEffect.cpp
+++ b/media/libmedia/IEffect.cpp
@@ -129,7 +129,7 @@
status_t BnEffect::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case ENABLE: {
ALOGV("ENABLE");
CHECK_INTERFACE(IEffect, data, reply);
@@ -186,10 +186,10 @@
} break;
case GET_CBLK: {
- CHECK_INTERFACE(IEffect, data, reply);
- reply->writeStrongBinder(getCblk()->asBinder());
- return NO_ERROR;
- } break;
+ CHECK_INTERFACE(IEffect, data, reply);
+ reply->writeStrongBinder(getCblk()->asBinder());
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
@@ -199,4 +199,3 @@
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/media/libmedia/IEffectClient.cpp b/media/libmedia/IEffectClient.cpp
index 4693b45..aef4371 100644
--- a/media/libmedia/IEffectClient.cpp
+++ b/media/libmedia/IEffectClient.cpp
@@ -94,7 +94,7 @@
status_t BnEffectClient::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case CONTROL_STATUS_CHANGED: {
ALOGV("CONTROL_STATUS_CHANGED");
CHECK_INTERFACE(IEffectClient, data, reply);
@@ -142,4 +142,3 @@
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index aeb35a5..9199db6 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -43,10 +43,10 @@
binder = sm->getService(String16("media.player"));
if (binder != 0) {
break;
- }
- ALOGW("Media player service not published, waiting...");
- usleep(500000); // 0.5 s
- } while(true);
+ }
+ ALOGW("Media player service not published, waiting...");
+ usleep(500000); // 0.5 s
+ } while (true);
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index c47fa41..f586b02 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -319,7 +319,7 @@
status_t BnMediaPlayer::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case DISCONNECT: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
disconnect();
diff --git a/media/libmedia/IMediaPlayerClient.cpp b/media/libmedia/IMediaPlayerClient.cpp
index 1f135c4..a670c96 100644
--- a/media/libmedia/IMediaPlayerClient.cpp
+++ b/media/libmedia/IMediaPlayerClient.cpp
@@ -56,7 +56,7 @@
status_t BnMediaPlayerClient::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case NOTIFY: {
CHECK_INTERFACE(IMediaPlayerClient, data, reply);
int msg = data.readInt32();
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index f5b5cbd..f5fccef 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -132,7 +132,7 @@
status_t BnMediaPlayerService::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case CREATE: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 2f4e31a..a710fd7 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -289,7 +289,7 @@
status_t BnMediaRecorder::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case RELEASE: {
ALOGV("RELEASE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
diff --git a/media/libmedia/IMediaRecorderClient.cpp b/media/libmedia/IMediaRecorderClient.cpp
index ff235c9..e7907e3 100644
--- a/media/libmedia/IMediaRecorderClient.cpp
+++ b/media/libmedia/IMediaRecorderClient.cpp
@@ -53,7 +53,7 @@
status_t BnMediaRecorderClient::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
- switch(code) {
+ switch (code) {
case NOTIFY: {
CHECK_INTERFACE(IMediaRecorderClient, data, reply);
int msg = data.readInt32();
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index a85956c..312a493 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -74,14 +74,14 @@
// init the EAS library
result = EAS_Init(&mEasData);
- if( result != EAS_SUCCESS) {
+ if (result != EAS_SUCCESS) {
ALOGE("JetPlayer::init(): Error initializing Sonivox EAS library, aborting.");
mState = EAS_STATE_ERROR;
return result;
}
// init the JET library with the default app event controller range
result = JET_Init(mEasData, NULL, sizeof(S_JET_CONFIG));
- if( result != EAS_SUCCESS) {
+ if (result != EAS_SUCCESS) {
ALOGE("JetPlayer::init(): Error initializing JET library, aborting.");
mState = EAS_STATE_ERROR;
return result;
@@ -151,7 +151,7 @@
mAudioBuffer = NULL;
}
mEasData = NULL;
-
+
return EAS_SUCCESS;
}
@@ -166,7 +166,7 @@
ALOGV("JetPlayer::render(): entering");
// allocate render buffer
- mAudioBuffer =
+ mAudioBuffer =
new EAS_PCM[pLibConfig->mixBufferSize * pLibConfig->numChannels * MIX_NUM_BUFFERS];
// signal main thread that we started
@@ -177,8 +177,8 @@
mCondition.signal();
}
- while (1) {
-
+ while (1) {
+
mMutex.lock(); // [[[[[[[[ LOCK ---------------------------------------
if (mEasData == NULL) {
@@ -186,20 +186,20 @@
ALOGV("JetPlayer::render(): NULL EAS data, exiting render.");
goto threadExit;
}
-
+
// nothing to render, wait for client thread to wake us up
while (!mRender)
{
ALOGV("JetPlayer::render(): signal wait");
- if (audioStarted) {
- mAudioTrack->pause();
+ if (audioStarted) {
+ mAudioTrack->pause();
// we have to restart the playback once we start rendering again
audioStarted = false;
}
mCondition.wait(mMutex);
ALOGV("JetPlayer::render(): signal rx'd");
}
-
+
// render midi data into the input buffer
int num_output = 0;
EAS_PCM* p = mAudioBuffer;
@@ -210,8 +210,8 @@
}
p += count * pLibConfig->numChannels;
num_output += count * pLibConfig->numChannels * sizeof(EAS_PCM);
-
- // send events that were generated (if any) to the event callback
+
+ // send events that were generated (if any) to the event callback
fireEventsFromJetQueue();
}
@@ -265,9 +265,9 @@
// precondition: mMutex locked
void JetPlayer::fireUpdateOnStatusChange()
{
- if( (mJetStatus.currentUserID != mPreviousJetStatus.currentUserID)
+ if ( (mJetStatus.currentUserID != mPreviousJetStatus.currentUserID)
||(mJetStatus.segmentRepeatCount != mPreviousJetStatus.segmentRepeatCount) ) {
- if(mEventCallback) {
+ if (mEventCallback) {
mEventCallback(
JetPlayer::JET_USERID_UPDATE,
mJetStatus.currentUserID,
@@ -278,8 +278,8 @@
mPreviousJetStatus.segmentRepeatCount = mJetStatus.segmentRepeatCount;
}
- if(mJetStatus.numQueuedSegments != mPreviousJetStatus.numQueuedSegments) {
- if(mEventCallback) {
+ if (mJetStatus.numQueuedSegments != mPreviousJetStatus.numQueuedSegments) {
+ if (mEventCallback) {
mEventCallback(
JetPlayer::JET_NUMQUEUEDSEGMENT_UPDATE,
mJetStatus.numQueuedSegments,
@@ -289,8 +289,8 @@
mPreviousJetStatus.numQueuedSegments = mJetStatus.numQueuedSegments;
}
- if(mJetStatus.paused != mPreviousJetStatus.paused) {
- if(mEventCallback) {
+ if (mJetStatus.paused != mPreviousJetStatus.paused) {
+ if (mEventCallback) {
mEventCallback(JetPlayer::JET_PAUSE_UPDATE,
mJetStatus.paused,
-1,
@@ -307,7 +307,7 @@
// precondition: mMutex locked
void JetPlayer::fireEventsFromJetQueue()
{
- if(!mEventCallback) {
+ if (!mEventCallback) {
// no callback, just empty the event queue
while (JET_GetEvent(mEasData, NULL, NULL)) { }
return;
@@ -341,7 +341,7 @@
mEasJetFileLoc->offset = 0;
EAS_RESULT result = JET_OpenFile(mEasData, mEasJetFileLoc);
- if(result != EAS_SUCCESS)
+ if (result != EAS_SUCCESS)
mState = EAS_STATE_ERROR;
else
mState = EAS_STATE_OPEN;
@@ -353,7 +353,7 @@
int JetPlayer::loadFromFD(const int fd, const long long offset, const long long length)
{
ALOGV("JetPlayer::loadFromFD(): fd=%d offset=%lld length=%lld", fd, offset, length);
-
+
Mutex::Autolock lock(mMutex);
mEasJetFileLoc = (EAS_FILE_LOCATOR) malloc(sizeof(EAS_FILE));
@@ -361,9 +361,9 @@
mEasJetFileLoc->offset = offset;
mEasJetFileLoc->length = length;
mEasJetFileLoc->path = NULL;
-
+
EAS_RESULT result = JET_OpenFile(mEasData, mEasJetFileLoc);
- if(result != EAS_SUCCESS)
+ if (result != EAS_SUCCESS)
mState = EAS_STATE_ERROR;
else
mState = EAS_STATE_OPEN;
@@ -392,7 +392,7 @@
JET_Status(mEasData, &mJetStatus);
this->dumpJetStatus(&mJetStatus);
-
+
fireUpdateOnStatusChange();
// wake up render thread
@@ -468,7 +468,7 @@
void JetPlayer::dumpJetStatus(S_JET_STATUS* pJetStatus)
{
- if(pJetStatus!=NULL)
+ if (pJetStatus!=NULL)
ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d paused=%d",
pJetStatus->currentUserID, pJetStatus->segmentRepeatCount,
pJetStatus->numQueuedSegments, pJetStatus->paused);
@@ -478,4 +478,3 @@
} // end namespace android
-
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 9c460094a..c224f06 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -1099,12 +1099,12 @@
camcorder_quality quality) const
{
ALOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
- name, cameraId, quality);
+ name, cameraId, quality);
int index = getCamcorderProfileIndex(cameraId, quality);
if (index == -1) {
ALOGE("The given camcorder profile camera %d quality %d is not found",
- cameraId, quality);
+ cameraId, quality);
return -1;
}
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index 73d4519..28b5aa7 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -54,7 +54,7 @@
void MediaScanner::loadSkipList() {
mSkipList = (char *)malloc(PROPERTY_VALUE_MAX * sizeof(char));
if (mSkipList) {
- property_get("testing.mediascanner.skiplist", mSkipList, "");
+ property_get("testing.mediascanner.skiplist", mSkipList, "");
}
if (!mSkipList || (strlen(mSkipList) == 0)) {
free(mSkipList);
@@ -135,8 +135,8 @@
struct dirent* entry;
if (shouldSkipDirectory(path)) {
- ALOGD("Skipping: %s", path);
- return MEDIA_SCAN_RESULT_OK;
+ ALOGD("Skipping: %s", path);
+ return MEDIA_SCAN_RESULT_OK;
}
// Treat all files as non-media in directories that contain a ".nomedia" file
diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp
index cdfd477..e1e334836 100644
--- a/media/libmedia/MediaScannerClient.cpp
+++ b/media/libmedia/MediaScannerClient.cpp
@@ -228,4 +228,3 @@
}
} // namespace android
-
diff --git a/media/libmedia/Metadata.cpp b/media/libmedia/Metadata.cpp
index 546a9b0..ef8a9ed 100644
--- a/media/libmedia/Metadata.cpp
+++ b/media/libmedia/Metadata.cpp
@@ -57,7 +57,7 @@
Metadata::Metadata(Parcel *p)
:mData(p),
- mBegin(p->dataPosition()) { }
+ mBegin(p->dataPosition()) { }
Metadata::~Metadata() { }
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index 9c3170c..717d316 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -268,8 +268,8 @@
repeatCnt: 0,
repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_SP_PRI
{ segments: { { duration: 0, waveFreq: { 0 }, 0, 0} },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT3
+ repeatCnt: 0,
+ repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT3
{ segments: { { duration: 32, waveFreq: { 2091, 0 }, 0, 0 },
{ duration: 64, waveFreq: { 2556, 0 }, 4, 0 },
{ duration: 20, waveFreq: { 2091, 0 }, 0, 0 },
@@ -1015,7 +1015,7 @@
mpAudioTrack = NULL;
}
- // Open audio track in mono, PCM 16bit, default sampling rate, default buffer size
+ // Open audio track in mono, PCM 16bit, default sampling rate, default buffer size
mpAudioTrack = new AudioTrack();
ALOGV("Create Track: %p", mpAudioTrack);
@@ -1284,9 +1284,9 @@
ALOGV("Cbk starting track");
lpToneGen->mState = TONE_PLAYING;
lSignal = true;
- break;
+ break;
case TONE_PLAYING:
- break;
+ break;
default:
// Force loop exit
lNumSmp = 0;
@@ -1578,4 +1578,3 @@
}
} // end namespace android
-
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index 70f8c0c..bcd6ae4 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -66,7 +66,7 @@
}
}
t->mLock.lock();
- }
+ }
status_t status = AudioEffect::setEnabled(enabled);
@@ -320,4 +320,3 @@
}
}; // namespace android
-
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 8d53357..b0241aa 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -45,7 +45,7 @@
}
ALOGW("MediaPlayerService not published, waiting...");
usleep(500000); // 0.5 s
- } while(true);
+ } while (true);
if (sDeathNotifier == NULL) {
sDeathNotifier = new DeathNotifier();
}
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 4ff1862..7ea9490 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -195,8 +195,8 @@
(mCurrentState != MEDIA_PLAYER_STATE_ERROR) &&
((mCurrentState & MEDIA_PLAYER_IDLE) != MEDIA_PLAYER_IDLE);
if ((mPlayer != NULL) && hasBeenInitialized) {
- ALOGV("invoke %d", request.dataSize());
- return mPlayer->invoke(request, reply);
+ ALOGV("invoke %d", request.dataSize());
+ return mPlayer->invoke(request, reply);
}
ALOGE("invoke failed: wrong state %X", mCurrentState);
return INVALID_OPERATION;
@@ -556,9 +556,9 @@
return BAD_VALUE;
}
if (sessionId != mAudioSessionId) {
- AudioSystem::releaseAudioSessionId(mAudioSessionId);
- AudioSystem::acquireAudioSessionId(sessionId);
- mAudioSessionId = sessionId;
+ AudioSystem::releaseAudioSessionId(mAudioSessionId);
+ AudioSystem::acquireAudioSessionId(sessionId);
+ mAudioSessionId = sessionId;
}
return NO_ERROR;
}
@@ -610,7 +610,7 @@
ALOGV("MediaPlayer::getParameter(%d)", key);
Mutex::Autolock _l(mLock);
if (mPlayer != NULL) {
- return mPlayer->getParameter(key, reply);
+ return mPlayer->getParameter(key, reply);
}
ALOGV("getParameter: no active player");
return INVALID_OPERATION;
@@ -658,7 +658,7 @@
// and seekTo within the same process.
// FIXME: Remember, this is a hack, it's not even a hack that is applied
// consistently for all use-cases, this needs to be revisited.
- if (mLockThreadId != getThreadId()) {
+ if (mLockThreadId != getThreadId()) {
mLock.lock();
locked = true;
}
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index cc73014..9541015 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -31,7 +31,7 @@
status_t MediaRecorder::setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy)
{
ALOGV("setCamera(%p,%p)", camera.get(), proxy.get());
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -52,7 +52,7 @@
status_t MediaRecorder::setPreviewSurface(const sp<Surface>& surface)
{
ALOGV("setPreviewSurface(%p)", surface.get());
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -77,7 +77,7 @@
status_t MediaRecorder::init()
{
ALOGV("init");
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -107,7 +107,7 @@
status_t MediaRecorder::setVideoSource(int vs)
{
ALOGV("setVideoSource(%d)", vs);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -142,7 +142,7 @@
status_t MediaRecorder::setAudioSource(int as)
{
ALOGV("setAudioSource(%d)", as);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -175,7 +175,7 @@
status_t MediaRecorder::setOutputFormat(int of)
{
ALOGV("setOutputFormat(%d)", of);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -201,7 +201,7 @@
status_t MediaRecorder::setVideoEncoder(int ve)
{
ALOGV("setVideoEncoder(%d)", ve);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -231,7 +231,7 @@
status_t MediaRecorder::setAudioEncoder(int ae)
{
ALOGV("setAudioEncoder(%d)", ae);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -261,7 +261,7 @@
status_t MediaRecorder::setOutputFile(const char* path)
{
ALOGV("setOutputFile(%s)", path);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -287,7 +287,7 @@
status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
{
ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -324,7 +324,7 @@
status_t MediaRecorder::setVideoSize(int width, int height)
{
ALOGV("setVideoSize(%d, %d)", width, height);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -367,7 +367,7 @@
status_t MediaRecorder::setVideoFrameRate(int frames_per_second)
{
ALOGV("setVideoFrameRate(%d)", frames_per_second);
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -391,7 +391,7 @@
status_t MediaRecorder::setParameters(const String8& params) {
ALOGV("setParameters(%s)", params.string());
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -419,7 +419,7 @@
status_t MediaRecorder::prepare()
{
ALOGV("prepare");
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -458,7 +458,7 @@
status_t MediaRecorder::getMaxAmplitude(int* max)
{
ALOGV("getMaxAmplitude");
- if(mMediaRecorder == NULL) {
+ if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
@@ -536,7 +536,7 @@
doCleanUp();
status_t ret = UNKNOWN_ERROR;
- switch(mCurrentState) {
+ switch (mCurrentState) {
case MEDIA_RECORDER_IDLE:
ret = OK;
break;
@@ -547,7 +547,7 @@
case MEDIA_RECORDER_ERROR: {
ret = doReset();
if (OK != ret) {
- return ret; // No need to continue
+ return ret; // No need to continue
}
} // Intentional fall through
case MEDIA_RECORDER_INITIALIZED:
diff --git a/media/libstagefright/timedtext/TimedText3GPPSource.h b/media/libstagefright/timedtext/TimedText3GPPSource.h
index cb7e47c..dfc6418 100644
--- a/media/libstagefright/timedtext/TimedText3GPPSource.h
+++ b/media/libstagefright/timedtext/TimedText3GPPSource.h
@@ -28,26 +28,26 @@
class Parcel;
class TimedText3GPPSource : public TimedTextSource {
- public:
- TimedText3GPPSource(const sp<MediaSource>& mediaSource);
- virtual status_t start() { return mSource->start(); }
- virtual status_t stop() { return mSource->stop(); }
- virtual status_t read(
- int64_t *timeUs,
- Parcel *parcel,
- const MediaSource::ReadOptions *options = NULL);
- virtual status_t extractGlobalDescriptions(Parcel *parcel);
+public:
+ TimedText3GPPSource(const sp<MediaSource>& mediaSource);
+ virtual status_t start() { return mSource->start(); }
+ virtual status_t stop() { return mSource->stop(); }
+ virtual status_t read(
+ int64_t *timeUs,
+ Parcel *parcel,
+ const MediaSource::ReadOptions *options = NULL);
+ virtual status_t extractGlobalDescriptions(Parcel *parcel);
- protected:
- virtual ~TimedText3GPPSource();
+protected:
+ virtual ~TimedText3GPPSource();
- private:
- sp<MediaSource> mSource;
+private:
+ sp<MediaSource> mSource;
- status_t extractAndAppendLocalDescriptions(
- int64_t timeUs, const MediaBuffer *textBuffer, Parcel *parcel);
+ status_t extractAndAppendLocalDescriptions(
+ int64_t timeUs, const MediaBuffer *textBuffer, Parcel *parcel);
- DISALLOW_EVIL_CONSTRUCTORS(TimedText3GPPSource);
+ DISALLOW_EVIL_CONSTRUCTORS(TimedText3GPPSource);
};
} // namespace android
diff --git a/media/libstagefright/timedtext/TimedTextSRTSource.h b/media/libstagefright/timedtext/TimedTextSRTSource.h
index a0734d9..acc01f9 100644
--- a/media/libstagefright/timedtext/TimedTextSRTSource.h
+++ b/media/libstagefright/timedtext/TimedTextSRTSource.h
@@ -31,43 +31,43 @@
class Parcel;
class TimedTextSRTSource : public TimedTextSource {
- public:
- TimedTextSRTSource(const sp<DataSource>& dataSource);
- virtual status_t start();
- virtual status_t stop();
- virtual status_t read(
- int64_t *timeUs,
- Parcel *parcel,
- const MediaSource::ReadOptions *options = NULL);
+public:
+ TimedTextSRTSource(const sp<DataSource>& dataSource);
+ virtual status_t start();
+ virtual status_t stop();
+ virtual status_t read(
+ int64_t *timeUs,
+ Parcel *parcel,
+ const MediaSource::ReadOptions *options = NULL);
- protected:
- virtual ~TimedTextSRTSource();
+protected:
+ virtual ~TimedTextSRTSource();
- private:
- sp<DataSource> mSource;
+private:
+ sp<DataSource> mSource;
- struct TextInfo {
- int64_t endTimeUs;
- // The offset of the text in the original file.
- off64_t offset;
- int textLen;
- };
+ struct TextInfo {
+ int64_t endTimeUs;
+ // The offset of the text in the original file.
+ off64_t offset;
+ int textLen;
+ };
- int mIndex;
- KeyedVector<int64_t, TextInfo> mTextVector;
+ int mIndex;
+ KeyedVector<int64_t, TextInfo> mTextVector;
- void reset();
- status_t scanFile();
- status_t getNextSubtitleInfo(
- off64_t *offset, int64_t *startTimeUs, TextInfo *info);
- status_t readNextLine(off64_t *offset, AString *data);
- status_t getText(
- const MediaSource::ReadOptions *options,
- AString *text, int64_t *startTimeUs, int64_t *endTimeUs);
- status_t extractAndAppendLocalDescriptions(
- int64_t timeUs, const AString &text, Parcel *parcel);
+ void reset();
+ status_t scanFile();
+ status_t getNextSubtitleInfo(
+ off64_t *offset, int64_t *startTimeUs, TextInfo *info);
+ status_t readNextLine(off64_t *offset, AString *data);
+ status_t getText(
+ const MediaSource::ReadOptions *options,
+ AString *text, int64_t *startTimeUs, int64_t *endTimeUs);
+ status_t extractAndAppendLocalDescriptions(
+ int64_t timeUs, const AString &text, Parcel *parcel);
- DISALLOW_EVIL_CONSTRUCTORS(TimedTextSRTSource);
+ DISALLOW_EVIL_CONSTRUCTORS(TimedTextSRTSource);
};
} // namespace android
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 81b9464..bfa4a49 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -190,13 +190,13 @@
continue;
ALOGI("Loaded %s audio interface from %s (%s)", audio_interfaces[i],
- mod->name, mod->id);
+ mod->name, mod->id);
mAudioHwDevs.push(dev);
if (mPrimaryHardwareDev == NULL) {
mPrimaryHardwareDev = dev;
ALOGI("Using '%s' (%s.%s) as the primary audio interface",
- mod->name, mod->id, audio_interfaces[i]);
+ mod->name, mod->id, audio_interfaces[i]);
}
}
@@ -515,7 +515,7 @@
}
Exit:
- if(status) {
+ if (status != NULL) {
*status = lStatus;
}
return trackHandle;
@@ -610,7 +610,7 @@
mMasterVolume = value;
mMasterVolumeSW = swmv;
for (size_t i = 0; i < mPlaybackThreads.size(); i++)
- mPlaybackThreads.valueAt(i)->setMasterVolume(swmv);
+ mPlaybackThreads.valueAt(i)->setMasterVolume(swmv);
return NO_ERROR;
}
@@ -642,7 +642,7 @@
Mutex::Autolock _l(mLock);
mMode = mode;
for (size_t i = 0; i < mPlaybackThreads.size(); i++)
- mPlaybackThreads.valueAt(i)->setMode(mode);
+ mPlaybackThreads.valueAt(i)->setMode(mode);
}
return ret;
@@ -693,7 +693,7 @@
// This is an optimization, so PlaybackThread doesn't have to look at the one from AudioFlinger
mMasterMute = muted;
for (size_t i = 0; i < mPlaybackThreads.size(); i++)
- mPlaybackThreads.valueAt(i)->setMasterMute(muted);
+ mPlaybackThreads.valueAt(i)->setMasterMute(muted);
return NO_ERROR;
}
@@ -723,8 +723,9 @@
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_MASTER_VOLUME;
- assert(NULL != mPrimaryHardwareDev);
- assert(NULL != mPrimaryHardwareDev->get_master_volume);
+ ALOG_ASSERT((NULL != mPrimaryHardwareDev) &&
+ (NULL != mPrimaryHardwareDev->get_master_volume),
+ "can't get master volume");
mPrimaryHardwareDev->get_master_volume(mPrimaryHardwareDev, &ret_val);
mHardwareStatus = AUDIO_HW_IDLE;
@@ -760,7 +761,7 @@
if (thread == NULL) {
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
- mPlaybackThreads.valueAt(i)->setStreamVolume(stream, value);
+ mPlaybackThreads.valueAt(i)->setStreamVolume(stream, value);
}
} else {
thread->setStreamVolume(stream, value);
@@ -785,7 +786,7 @@
AutoMutex lock(mLock);
mStreamTypes[stream].mute = muted;
for (uint32_t i = 0; i < mPlaybackThreads.size(); i++)
- mPlaybackThreads.valueAt(i)->setStreamMute(stream, muted);
+ mPlaybackThreads.valueAt(i)->setStreamMute(stream, muted);
return NO_ERROR;
}
@@ -1160,7 +1161,7 @@
void AudioFlinger::ThreadBase::processConfigEvents()
{
mLock.lock();
- while(!mConfigEvents.isEmpty()) {
+ while (!mConfigEvents.isEmpty()) {
ALOGV("processConfigEvents() remaining events %d", mConfigEvents.size());
ConfigEvent configEvent = mConfigEvents[0];
mConfigEvents.removeAt(0);
@@ -1344,13 +1345,13 @@
mSuspendedSessions.editValueAt(index);
for (size_t i = 0; i < sessionEffects.size(); i++) {
- sp <SuspendedSessionDesc> desc = sessionEffects.valueAt(i);
+ sp<SuspendedSessionDesc> desc = sessionEffects.valueAt(i);
for (int j = 0; j < desc->mRefCount; j++) {
if (sessionEffects.keyAt(i) == EffectChain::kKeyForSuspendAll) {
chain->setEffectSuspendedAll_l(true);
} else {
ALOGV("checkSuspendOnAddEffectChain_l() suspending effects %08x",
- desc->mType.timeLow);
+ desc->mType.timeLow);
chain->setEffectSuspended_l(&desc->mType, true);
}
}
@@ -1385,7 +1386,7 @@
}
index = sessionEffects.indexOfKey(key);
- sp <SuspendedSessionDesc> desc;
+ sp<SuspendedSessionDesc> desc;
if (suspend) {
if (index >= 0) {
desc = sessionEffects.valueAt(index);
@@ -1658,14 +1659,14 @@
// createTrack() was called by the client process.
if (!mStreamTypes[streamType].valid) {
ALOGW("createTrack_l() on thread %p: invalidating track on stream %d",
- this, streamType);
+ this, streamType);
android_atomic_or(CBLK_INVALID_ON, &track->mCblk->flags);
}
}
lStatus = NO_ERROR;
Exit:
- if(status) {
+ if (status) {
*status = lStatus;
}
return track;
@@ -2644,10 +2645,10 @@
status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
keyValuePair.string());
if (!mStandby && status == INVALID_OPERATION) {
- mOutput->stream->common.standby(&mOutput->stream->common);
- mStandby = true;
- mBytesWritten = 0;
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
+ mOutput->stream->common.standby(&mOutput->stream->common);
+ mStandby = true;
+ mBytesWritten = 0;
+ status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
keyValuePair.string());
}
if (status == NO_ERROR && reconfig) {
@@ -2855,7 +2856,7 @@
tracksToRemove->add(trackToRemove);
mActiveTracks.remove(trackToRemove);
if (!mEffectChains.isEmpty()) {
- ALOGV("stopping track on chain %p for session Id: %d", effectChains[0].get(),
+ ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(),
trackToRemove->sessionId());
mEffectChains[0]->decActiveTrackCnt();
}
@@ -2901,7 +2902,7 @@
size_t count = mFrameCount * mChannelCount;
uint8_t *src = (uint8_t *)mMixBuffer + count-1;
int16_t *dst = mMixBuffer + count-1;
- while(count--) {
+ while (count--) {
*dst-- = (int16_t)(*src--^0x80) << 8;
}
}
@@ -2954,7 +2955,7 @@
size_t count = mFrameCount * mChannelCount;
int16_t *src = mMixBuffer;
uint8_t *dst = (uint8_t *)mMixBuffer;
- while(count--) {
+ while (count--) {
*dst++ = (uint8_t)(((int32_t)*src++ + (1<<7)) >> 8)^0x80;
}
}
@@ -3013,10 +3014,10 @@
status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
keyValuePair.string());
if (!mStandby && status == INVALID_OPERATION) {
- mOutput->stream->common.standby(&mOutput->stream->common);
- mStandby = true;
- mBytesWritten = 0;
- status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
+ mOutput->stream->common.standby(&mOutput->stream->common);
+ mStandby = true;
+ mBytesWritten = 0;
+ status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
keyValuePair.string());
}
if (status == NO_ERROR && reconfig) {
@@ -3207,7 +3208,7 @@
bool AudioFlinger::DuplicatingThread::outputsReady(const SortedVector< sp<OutputTrack> > &outputTracks)
{
for (size_t i = 0; i < outputTracks.size(); i++) {
- sp <ThreadBase> thread = outputTracks[i]->thread().promote();
+ sp<ThreadBase> thread = outputTracks[i]->thread().promote();
if (thread == 0) {
ALOGW("DuplicatingThread::outputsReady() could not promote thread on output track %p", outputTracks[i].get());
return false;
@@ -3263,14 +3264,14 @@
ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size());
// ALOGD("Creating track with %d buffers @ %d bytes", bufferCount, bufferSize);
- size_t size = sizeof(audio_track_cblk_t);
- uint8_t channelCount = popcount(channelMask);
- size_t bufferSize = frameCount*channelCount*sizeof(int16_t);
- if (sharedBuffer == 0) {
- size += bufferSize;
- }
+ size_t size = sizeof(audio_track_cblk_t);
+ uint8_t channelCount = popcount(channelMask);
+ size_t bufferSize = frameCount*channelCount*sizeof(int16_t);
+ if (sharedBuffer == 0) {
+ size += bufferSize;
+ }
- if (client != NULL) {
+ if (client != NULL) {
mCblkMemory = client->heap()->allocate(size);
if (mCblkMemory != 0) {
mCblk = static_cast<audio_track_cblk_t *>(mCblkMemory->pointer());
@@ -3297,22 +3298,22 @@
client->heap()->dump("AudioTrack");
return;
}
- } else {
- mCblk = (audio_track_cblk_t *)(new uint8_t[size]);
- // construct the shared structure in-place.
- new(mCblk) audio_track_cblk_t();
- // clear all buffers
- mCblk->frameCount = frameCount;
- mCblk->sampleRate = sampleRate;
- mChannelCount = channelCount;
- mChannelMask = channelMask;
- mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t);
- memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t));
- // Force underrun condition to avoid false underrun callback until first data is
- // written to buffer (other flags are cleared)
- mCblk->flags = CBLK_UNDERRUN_ON;
- mBufferEnd = (uint8_t *)mBuffer + bufferSize;
- }
+ } else {
+ mCblk = (audio_track_cblk_t *)(new uint8_t[size]);
+ // construct the shared structure in-place.
+ new(mCblk) audio_track_cblk_t();
+ // clear all buffers
+ mCblk->frameCount = frameCount;
+ mCblk->sampleRate = sampleRate;
+ mChannelCount = channelCount;
+ mChannelMask = channelMask;
+ mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t);
+ memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t));
+ // Force underrun condition to avoid false underrun callback until first data is
+ // written to buffer (other flags are cleared)
+ mCblk->flags = CBLK_UNDERRUN_ON;
+ mBufferEnd = (uint8_t *)mBuffer + bufferSize;
+ }
}
AudioFlinger::ThreadBase::TrackBase::~TrackBase()
@@ -3490,22 +3491,22 @@
// AudioBufferProvider interface
status_t AudioFlinger::PlaybackThread::Track::getNextBuffer(
- AudioBufferProvider::Buffer* buffer, int64_t pts)
+ AudioBufferProvider::Buffer* buffer, int64_t pts)
{
- audio_track_cblk_t* cblk = this->cblk();
- uint32_t framesReady;
- uint32_t framesReq = buffer->frameCount;
+ audio_track_cblk_t* cblk = this->cblk();
+ uint32_t framesReady;
+ uint32_t framesReq = buffer->frameCount;
- // Check if last stepServer failed, try to step now
- if (mStepServerFailed) {
- if (!step()) goto getNextBuffer_exit;
- ALOGV("stepServer recovered");
- mStepServerFailed = false;
- }
+ // Check if last stepServer failed, try to step now
+ if (mStepServerFailed) {
+ if (!step()) goto getNextBuffer_exit;
+ ALOGV("stepServer recovered");
+ mStepServerFailed = false;
+ }
- framesReady = cblk->framesReady();
+ framesReady = cblk->framesReady();
- if (CC_LIKELY(framesReady)) {
+ if (CC_LIKELY(framesReady)) {
uint32_t s = cblk->server;
uint32_t bufferEnd = cblk->serverBase + cblk->frameCount;
@@ -3517,21 +3518,21 @@
framesReq = bufferEnd - s;
}
- buffer->raw = getBuffer(s, framesReq);
- if (buffer->raw == NULL) goto getNextBuffer_exit;
+ buffer->raw = getBuffer(s, framesReq);
+ if (buffer->raw == NULL) goto getNextBuffer_exit;
- buffer->frameCount = framesReq;
+ buffer->frameCount = framesReq;
return NO_ERROR;
- }
+ }
getNextBuffer_exit:
- buffer->raw = NULL;
- buffer->frameCount = 0;
- ALOGV("getNextBuffer() no more data for track %d on thread %p", mName, mThread.unsafe_get());
- return NOT_ENOUGH_DATA;
+ buffer->raw = NULL;
+ buffer->frameCount = 0;
+ ALOGV("getNextBuffer() no more data for track %d on thread %p", mName, mThread.unsafe_get());
+ return NOT_ENOUGH_DATA;
}
-uint32_t AudioFlinger::PlaybackThread::Track::framesReady() const{
+uint32_t AudioFlinger::PlaybackThread::Track::framesReady() const {
return mCblk->framesReady();
}
@@ -3683,8 +3684,8 @@
status_t status = DEAD_OBJECT;
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
- PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
- status = playbackThread->attachAuxEffect(this, EffectId);
+ PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
+ status = playbackThread->attachAuxEffect(this, EffectId);
}
return status;
}
@@ -3884,7 +3885,7 @@
{
Mutex::Autolock mttLock(mMediaTimeTransformLock);
- assert(mMediaTimeTransformValid);
+ ALOG_ASSERT(mMediaTimeTransformValid, "media time transform invalid");
if (mMediaTimeTransform.a_to_b_denom == 0) {
// the transform represents a pause, so yield silence
@@ -4103,14 +4104,14 @@
mOverflow(false)
{
if (mCblk != NULL) {
- ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer);
- if (format == AUDIO_FORMAT_PCM_16_BIT) {
- mCblk->frameSize = mChannelCount * sizeof(int16_t);
- } else if (format == AUDIO_FORMAT_PCM_8_BIT) {
- mCblk->frameSize = mChannelCount * sizeof(int8_t);
- } else {
- mCblk->frameSize = sizeof(int8_t);
- }
+ ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer);
+ if (format == AUDIO_FORMAT_PCM_16_BIT) {
+ mCblk->frameSize = mChannelCount * sizeof(int16_t);
+ } else if (format == AUDIO_FORMAT_PCM_8_BIT) {
+ mCblk->frameSize = mChannelCount * sizeof(int8_t);
+ } else {
+ mCblk->frameSize = sizeof(int8_t);
+ }
}
}
@@ -4129,7 +4130,7 @@
uint32_t framesAvail;
uint32_t framesReq = buffer->frameCount;
- // Check if last stepServer failed, try to step now
+ // Check if last stepServer failed, try to step now
if (mStepServerFailed) {
if (!step()) goto getNextBuffer_exit;
ALOGV("stepServer recovered");
@@ -4975,7 +4976,7 @@
status_t AudioFlinger::RecordThread::start(RecordThread::RecordTrack* recordTrack, pid_t tid)
{
ALOGV("RecordThread::start tid=%d", tid);
- sp <ThreadBase> strongMe = this;
+ sp<ThreadBase> strongMe = this;
status_t status = NO_ERROR;
{
AutoMutex lock(mLock);
@@ -5028,7 +5029,7 @@
void AudioFlinger::RecordThread::stop(RecordThread::RecordTrack* recordTrack) {
ALOGV("RecordThread::stop");
- sp <ThreadBase> strongMe = this;
+ sp<ThreadBase> strongMe = this;
{
AutoMutex lock(mLock);
if (mActiveTrack != 0 && recordTrack == mActiveTrack.get()) {
@@ -5195,8 +5196,9 @@
if (status == NO_ERROR) {
status = mInput->stream->common.set_parameters(&mInput->stream->common, keyValuePair.string());
if (status == INVALID_OPERATION) {
- mInput->stream->common.standby(&mInput->stream->common);
- status = mInput->stream->common.set_parameters(&mInput->stream->common, keyValuePair.string());
+ mInput->stream->common.standby(&mInput->stream->common);
+ status = mInput->stream->common.set_parameters(&mInput->stream->common,
+ keyValuePair.string());
}
if (reconfig) {
if (status == BAD_VALUE &&
@@ -5284,8 +5286,8 @@
if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2)
{
int channelCount;
- // optmization: if mono to mono, use the resampler in stereo to stereo mode to avoid
- // stereo to mono post process as the resampler always outputs stereo.
+ // optimization: if mono to mono, use the resampler in stereo to stereo mode to avoid
+ // stereo to mono post process as the resampler always outputs stereo.
if (mChannelCount == 1 && mReqChannelCount == 2) {
channelCount = 1;
} else {
@@ -5459,7 +5461,7 @@
{
// keep strong reference on the playback thread so that
// it is not destroyed while exit() is executed
- sp <PlaybackThread> thread;
+ sp<PlaybackThread> thread;
{
Mutex::Autolock _l(mLock);
thread = checkPlaybackThread_l(output);
@@ -5486,7 +5488,7 @@
if (thread->type() != ThreadBase::DUPLICATING) {
AudioStreamOut *out = thread->clearOutput();
- assert(out != NULL);
+ ALOG_ASSERT(out != NULL, "out shouldn't be NULL");
// from now on thread->mOutput is NULL
out->hwDev->close_output_stream(out->hwDev, out->stream);
delete out;
@@ -5612,7 +5614,7 @@
{
// keep strong reference on the record thread so that
// it is not destroyed while exit() is executed
- sp <RecordThread> thread;
+ sp<RecordThread> thread;
{
Mutex::Autolock _l(mLock);
thread = checkRecordThread_l(input);
@@ -5629,7 +5631,7 @@
// but the ThreadBase container still exists.
AudioStreamIn *in = thread->clearInput();
- assert(in != NULL);
+ ALOG_ASSERT(in != NULL, "in shouldn't be NULL");
// from now on thread->mInput is NULL
in->hwDev->close_input_stream(in->hwDev, in->stream);
delete in;
@@ -5745,7 +5747,7 @@
AudioSessionRef *ref = mAudioSessionRefs.itemAt(k);
if (ref->mSessionid == sessionid) {
ALOGV(" session %d still exists for %d with %d refs",
- sessionid, ref->mPid, ref->mCnt);
+ sessionid, ref->mPid, ref->mCnt);
found = true;
break;
}
@@ -5978,7 +5980,7 @@
// because of code checking output when entering the function.
// Note: io is never 0 when creating an effect on an input
if (io == 0) {
- // look for the thread where the specified audio session is present
+ // look for the thread where the specified audio session is present
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) {
io = mPlaybackThreads.keyAt(i);
@@ -5986,12 +5988,12 @@
}
}
if (io == 0) {
- for (size_t i = 0; i < mRecordThreads.size(); i++) {
- if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) {
- io = mRecordThreads.keyAt(i);
- break;
- }
- }
+ for (size_t i = 0; i < mRecordThreads.size(); i++) {
+ if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) {
+ io = mRecordThreads.keyAt(i);
+ break;
+ }
+ }
}
// If no output thread contains the requested session ID, default to
// first output. The effect chain will be moved to the correct output
@@ -6022,7 +6024,7 @@
}
Exit:
- if(status) {
+ if (status != NULL) {
*status = lStatus;
}
return handle;
@@ -6225,7 +6227,7 @@
handle.clear();
}
- if(status) {
+ if (status != NULL) {
*status = lStatus;
}
return handle;
@@ -6295,7 +6297,7 @@
}
void AudioFlinger::ThreadBase::lockEffectChains_l(
- Vector<sp <AudioFlinger::EffectChain> >& effectChains)
+ Vector< sp<AudioFlinger::EffectChain> >& effectChains)
{
effectChains = mEffectChains;
for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -6304,7 +6306,7 @@
}
void AudioFlinger::ThreadBase::unlockEffectChains(
- const Vector<sp <AudioFlinger::EffectChain> >& effectChains)
+ const Vector< sp<AudioFlinger::EffectChain> >& effectChains)
{
for (size_t i = 0; i < effectChains.size(); i++) {
effectChains[i]->unlock();
@@ -6480,7 +6482,7 @@
void AudioFlinger::PlaybackThread::detachAuxEffect_l(int effectId)
{
- for (size_t i = 0; i < mTracks.size(); ++i) {
+ for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> track = mTracks[i];
if (track->auxEffectId() == effectId) {
attachAuxEffect_l(track, 0);
@@ -7264,7 +7266,7 @@
if (mCblk != NULL) {
new(mCblk) effect_param_cblk_t();
mBuffer = (uint8_t *)mCblk + bufOffset;
- }
+ }
} else {
ALOGE("not enough memory for Effect size=%u", EFFECT_PARAM_BUFFER_SIZE + sizeof(effect_param_cblk_t));
return;
@@ -8031,7 +8033,7 @@
}
}
ALOGV("checkSuspendOnEffectEnabled() enable suspending fx %08x",
- effect->desc().type.timeLow);
+ effect->desc().type.timeLow);
sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
// if effect is requested to suspended but was not yet enabled, supend it now.
if (desc->mEffect == 0) {
@@ -8044,7 +8046,7 @@
return;
}
ALOGV("checkSuspendOnEffectEnabled() disable restoring fx %08x",
- effect->desc().type.timeLow);
+ effect->desc().type.timeLow);
sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
desc->mEffect.clear();
effect->setSuspended(false);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index f3c8dd2..0e4b24aa 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -465,9 +465,9 @@
// ThreadBase mutex before processing the mixer and effects. This guarantees the
// integrity of the chains during the process.
// Also sets the parameter 'effectChains' to current value of mEffectChains.
- void lockEffectChains_l(Vector<sp <EffectChain> >& effectChains);
+ void lockEffectChains_l(Vector< sp<EffectChain> >& effectChains);
// unlock effect chains after process
- void unlockEffectChains(const Vector<sp<EffectChain> >& effectChains);
+ void unlockEffectChains(const Vector< sp<EffectChain> >& effectChains);
// set audio mode to all effect chains
void setMode(audio_mode_t mode);
// get effect module with corresponding ID on specified audio session
@@ -1056,7 +1056,7 @@
virtual uint32_t activeSleepTimeUs();
private:
- bool outputsReady(const SortedVector<sp<OutputTrack> > &outputTracks);
+ bool outputsReady(const SortedVector< sp<OutputTrack> > &outputTracks);
protected:
// threadLoop snippets
virtual void threadLoop_mix();
@@ -1504,7 +1504,7 @@
uint32_t strategy() const { return mStrategy; }
void setStrategy(uint32_t strategy)
- { mStrategy = strategy; }
+ { mStrategy = strategy; }
// suspend effect of the given type
void setEffectSuspended_l(const effect_uuid_t *type,
@@ -1544,7 +1544,7 @@
wp<ThreadBase> mThread; // parent mixer thread
Mutex mLock; // mutex protecting effect list
- Vector<sp<EffectModule> > mEffects; // list of effect modules
+ Vector< sp<EffectModule> > mEffects; // list of effect modules
int mSessionId; // audio session ID
int16_t *mInBuffer; // chain input buffer
int16_t *mOutBuffer; // chain output buffer
diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp
index 2cec525..1ec238b 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/services/audioflinger/AudioMixer.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "AudioMixer"
//#define LOG_NDEBUG 0
-#include <assert.h>
#include <stdint.h>
#include <string.h>
#include <stdlib.h>
@@ -29,6 +28,7 @@
#include <cutils/bitops.h>
#include <cutils/compiler.h>
+#include <utils/Debug.h>
#include <system/audio.h>
@@ -46,7 +46,7 @@
: mTrackNames(0), mSampleRate(sampleRate)
{
// AudioMixer is not yet capable of multi-channel beyond stereo
- assert(2 == MAX_NUM_CHANNELS);
+ COMPILE_TIME_ASSERT_FUNCTION_SCOPE(2 == MAX_NUM_CHANNELS);
LocalClock lc;
@@ -124,7 +124,7 @@
void AudioMixer::deleteTrackName(int name)
{
name -= TRACK0;
- assert(uint32_t(name) < MAX_NUM_TRACKS);
+ ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
ALOGV("deleteTrackName(%d)", name);
track_t& track(mState.tracks[ name ]);
if (track.enabled) {
@@ -146,7 +146,7 @@
void AudioMixer::enable(int name)
{
name -= TRACK0;
- assert(uint32_t(name) < MAX_NUM_TRACKS);
+ ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
track_t& track = mState.tracks[name];
if (!track.enabled) {
@@ -159,7 +159,7 @@
void AudioMixer::disable(int name)
{
name -= TRACK0;
- assert(uint32_t(name) < MAX_NUM_TRACKS);
+ ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
track_t& track = mState.tracks[name];
if (track.enabled) {
@@ -172,7 +172,7 @@
void AudioMixer::setParameter(int name, int target, int param, void *value)
{
name -= TRACK0;
- assert(uint32_t(name) < MAX_NUM_TRACKS);
+ ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
track_t& track = mState.tracks[name];
int valueInt = (int)value;
@@ -185,8 +185,9 @@
case CHANNEL_MASK: {
uint32_t mask = (uint32_t)value;
if (track.channelMask != mask) {
- uint8_t channelCount = popcount(mask);
- assert((channelCount <= MAX_NUM_CHANNELS) && (channelCount));
+ uint32_t channelCount = popcount(mask);
+ ALOG_ASSERT((channelCount <= MAX_NUM_CHANNELS) && (channelCount),
+ "bad channel count %u", channelCount);
track.channelMask = mask;
track.channelCount = channelCount;
ALOGV("setParameter(TRACK, CHANNEL_MASK, %x)", mask);
@@ -208,15 +209,14 @@
}
break;
default:
- // bad param
- assert(false);
+ LOG_FATAL("bad param");
}
break;
case RESAMPLE:
switch (param) {
case SAMPLE_RATE:
- assert(valueInt > 0);
+ ALOG_ASSERT(valueInt > 0, "bad sample rate %d", valueInt);
if (track.setResampler(uint32_t(valueInt), mSampleRate)) {
ALOGV("setParameter(RESAMPLE, SAMPLE_RATE, %u)",
uint32_t(valueInt));
@@ -228,8 +228,7 @@
invalidateState(1 << name);
break;
default:
- // bad param
- assert(false);
+ LOG_FATAL("bad param");
}
break;
@@ -257,7 +256,7 @@
}
break;
case AUXLEVEL:
- //assert(0 <= valueInt && valueInt <= MAX_GAIN_INT);
+ //ALOG_ASSERT(0 <= valueInt && valueInt <= MAX_GAIN_INT, "bad aux level %d", valueInt);
if (track.auxLevel != valueInt) {
ALOGV("setParameter(VOLUME, AUXLEVEL: %04x)", valueInt);
track.prevAuxLevel = track.auxLevel << 16;
@@ -277,14 +276,12 @@
}
break;
default:
- // bad param
- assert(false);
+ LOG_FATAL("bad param");
}
break;
default:
- // bad target
- assert(false);
+ LOG_FATAL("bad target");
}
}
@@ -335,7 +332,7 @@
void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider)
{
name -= TRACK0;
- assert(uint32_t(name) < MAX_NUM_TRACKS);
+ ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name);
mState.tracks[name].bufferProvider = bufferProvider;
}
@@ -979,9 +976,9 @@
// This method is only called when state->enabledTracks has exactly
// one bit set. The asserts below would verify this, but are commented out
// since the whole point of this method is to optimize performance.
- //assert(0 != state->enabledTracks);
+ //ALOG_ASSERT(0 != state->enabledTracks, "no tracks enabled");
const int i = 31 - __builtin_clz(state->enabledTracks);
- //assert((1 << i) == state->enabledTracks);
+ //ALOG_ASSERT((1 << i) == state->enabledTracks, "more than 1 track enabled");
const track_t& t = state->tracks[i];
AudioBufferProvider::Buffer& b(t.buffer);
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index d57326b..c23eb04 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -649,7 +649,7 @@
mLock.lock();
while (!exitPending())
{
- while(!mAudioCommands.isEmpty()) {
+ while (!mAudioCommands.isEmpty()) {
nsecs_t curTime = systemTime();
// commands are sorted by increasing time stamp: execute them from index 0 and up
if (mAudioCommands[0]->mTime <= curTime) {
@@ -693,16 +693,16 @@
delete data;
}break;
case SET_PARAMETERS: {
- ParametersData *data = (ParametersData *)command->mParam;
- ALOGV("AudioCommandThread() processing set parameters string %s, io %d",
- data->mKeyValuePairs.string(), data->mIO);
- command->mStatus = AudioSystem::setParameters(data->mIO, data->mKeyValuePairs);
- if (command->mWaitStatus) {
- command->mCond.signal();
- mWaitWorkCV.wait(mLock);
- }
- delete data;
- }break;
+ ParametersData *data = (ParametersData *)command->mParam;
+ ALOGV("AudioCommandThread() processing set parameters string %s, io %d",
+ data->mKeyValuePairs.string(), data->mIO);
+ command->mStatus = AudioSystem::setParameters(data->mIO, data->mKeyValuePairs);
+ if (command->mWaitStatus) {
+ command->mCond.signal();
+ mWaitWorkCV.wait(mLock);
+ }
+ delete data;
+ }break;
case SET_VOICE_VOLUME: {
VoiceVolumeData *data = (VoiceVolumeData *)command->mParam;
ALOGV("AudioCommandThread() processing set voice volume volume %f",
@@ -916,19 +916,19 @@
AudioParameter param = AudioParameter(data->mKeyValuePairs);
AudioParameter param2 = AudioParameter(data2->mKeyValuePairs);
for (size_t j = 0; j < param.size(); j++) {
- String8 key;
- String8 value;
- param.getAt(j, key, value);
- for (size_t k = 0; k < param2.size(); k++) {
- String8 key2;
- String8 value2;
- param2.getAt(k, key2, value2);
- if (key2 == key) {
- param2.remove(key2);
- ALOGV("Filtering out parameter %s", key2.string());
- break;
- }
- }
+ String8 key;
+ String8 value;
+ param.getAt(j, key, value);
+ for (size_t k = 0; k < param2.size(); k++) {
+ String8 key2;
+ String8 value2;
+ param2.getAt(k, key2, value2);
+ if (key2 == key) {
+ param2.remove(key2);
+ ALOGV("Filtering out parameter %s", key2.string());
+ break;
+ }
+ }
}
// if all keys have been filtered out, remove the command.
// otherwise, update the key value pairs
@@ -1020,7 +1020,7 @@
ALOGE("startTone: illegal tone requested (%d)", tone);
if (stream != AUDIO_STREAM_VOICE_CALL)
ALOGE("startTone: illegal stream (%d) requested for tone %d", stream,
- tone);
+ tone);
mTonePlaybackThread->startToneCommand(ToneGenerator::TONE_SUP_CALL_WAITING,
AUDIO_STREAM_VOICE_CALL);
return 0;
diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h
index 7119b90..9ed905d 100644
--- a/services/audioflinger/AudioPolicyService.h
+++ b/services/audioflinger/AudioPolicyService.h
@@ -311,8 +311,8 @@
mutable Mutex mLock; // prevents concurrent access to AudioPolicy manager functions changing
// device connection state or routing
- sp <AudioCommandThread> mAudioCommandThread; // audio commands thread
- sp <AudioCommandThread> mTonePlaybackThread; // tone playback thread
+ sp<AudioCommandThread> mAudioCommandThread; // audio commands thread
+ sp<AudioCommandThread> mTonePlaybackThread; // tone playback thread
struct audio_policy_device *mpAudioPolicyDev;
struct audio_policy *mpAudioPolicy;
KeyedVector< audio_source_t, InputSourceDesc* > mInputSources;
diff --git a/services/audioflinger/AudioResampler.cpp b/services/audioflinger/AudioResampler.cpp
index 398ba0b..fbb54cf 100644
--- a/services/audioflinger/AudioResampler.cpp
+++ b/services/audioflinger/AudioResampler.cpp
@@ -227,7 +227,7 @@
mX0L = mBuffer.i16[mBuffer.frameCount*2-2];
mX0R = mBuffer.i16[mBuffer.frameCount*2-1];
provider->releaseBuffer(&mBuffer);
- // mBuffer.frameCount == 0 now so we reload a new buffer
+ // mBuffer.frameCount == 0 now so we reload a new buffer
}
int16_t *in = mBuffer.i16;
diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h
index 9deb796..1610e00 100644
--- a/services/audioflinger/AudioResampler.h
+++ b/services/audioflinger/AudioResampler.h
@@ -33,7 +33,7 @@
// HIGH_QUALITY: fixed multi-tap FIR (e.g. 48KHz->44.1KHz)
// NOTE: high quality SRC will only be supported for
// certain fixed rate conversions. Sample rate cannot be
- // changed dynamically.
+ // changed dynamically.
enum src_quality {
DEFAULT=0,
LOW_QUALITY=1,
diff --git a/services/audioflinger/AudioResamplerCubic.h b/services/audioflinger/AudioResamplerCubic.h
index b72b62a..892785a 100644
--- a/services/audioflinger/AudioResamplerCubic.h
+++ b/services/audioflinger/AudioResamplerCubic.h
@@ -55,7 +55,7 @@
p->y1 = p->y2;
p->y2 = p->y3;
p->y3 = in;
- p->a = (3 * (p->y1 - p->y2) - p->y0 + p->y3) >> 1;
+ p->a = (3 * (p->y1 - p->y2) - p->y0 + p->y3) >> 1;
p->b = (p->y2 << 1) + p->y0 - (((5 * p->y1 + p->y3)) >> 1);
p->c = (p->y2 - p->y0) >> 1;
}
diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp
index d373c08..76662d8 100644
--- a/services/audioflinger/AudioResamplerSinc.cpp
+++ b/services/audioflinger/AudioResamplerSinc.cpp
@@ -222,7 +222,7 @@
} else {
read<CHANNELS>(impulse, phaseFraction, mBuffer.i16, inputIndex);
}
- }
+ }
}
int16_t *in = mBuffer.i16;
const size_t frameCount = mBuffer.frameCount;
@@ -247,7 +247,7 @@
if (inputIndex >= frameCount)
break; // need a new buffer
read<CHANNELS>(impulse, phaseFraction, in, inputIndex);
- } else if(phaseIndex == 2) { // maximum value
+ } else if (phaseIndex == 2) { // maximum value
inputIndex++;
if (inputIndex >= frameCount)
break; // 0 frame available, 2 frames needed
diff --git a/services/java/com/android/server/PowerManagerService.java b/services/java/com/android/server/PowerManagerService.java
index 63418db..52a4110 100644
--- a/services/java/com/android/server/PowerManagerService.java
+++ b/services/java/com/android/server/PowerManagerService.java
@@ -1858,6 +1858,7 @@
} else {
// Update the lights *before* taking care of turning the
// screen off, so we can initiate any animations that are desired.
+ mScreenOffReason = reason;
if (stateChanged) {
updateLightsLocked(newState, 0);
}
@@ -1876,7 +1877,6 @@
Binder.restoreCallingIdentity(identity);
}
mPowerState &= ~SCREEN_ON_BIT;
- mScreenOffReason = reason;
if (!mScreenBrightnessAnimator.isAnimating()) {
err = screenOffFinishedAnimatingLocked(reason);
} else {
diff --git a/tests/RenderScriptTests/SampleTest/src/com/android/rs/sample/sample.rs b/tests/RenderScriptTests/SampleTest/src/com/android/rs/sample/sample.rs
index 0f3c0a7..e2bf43d 100644
--- a/tests/RenderScriptTests/SampleTest/src/com/android/rs/sample/sample.rs
+++ b/tests/RenderScriptTests/SampleTest/src/com/android/rs/sample/sample.rs
@@ -28,176 +28,6 @@
allocSampler = sampler;
}
-static int32_t wrapI(rs_sampler_value wrap, int32_t coord, int32_t size) {
- if (wrap == RS_SAMPLER_WRAP) {
- coord = coord % size;
- if (coord < 0) {
- coord += size;
- }
- }
- return max(0, min(coord, size - 1));
-}
-
-#define convert_float(v) (float)v
-#define SAMPLE_1D_FUNC(vecsize) \
- static float##vecsize get1DSample##vecsize(rs_allocation a, float2 weights, \
- int iPixel, int next) { \
- uchar##vecsize *p0c = (uchar##vecsize*)rsGetElementAt(a, iPixel); \
- uchar##vecsize *p1c = (uchar##vecsize*)rsGetElementAt(a, next); \
- float##vecsize p0 = convert_float##vecsize(*p0c); \
- float##vecsize p1 = convert_float##vecsize(*p1c); \
- return p0 * weights.x + p1 * weights.y; \
- }
-#define SAMPLE_2D_FUNC(vecsize) \
- static float##vecsize get2DSample##vecsize(rs_allocation a, float4 weights, \
- int2 iPixel, int nextX, int nextY) { \
- uchar##vecsize *p0c = (uchar##vecsize*)rsGetElementAt(a, iPixel.x, iPixel.y); \
- uchar##vecsize *p1c = (uchar##vecsize*)rsGetElementAt(a, nextX, iPixel.y); \
- uchar##vecsize *p2c = (uchar##vecsize*)rsGetElementAt(a, iPixel.x, nextY); \
- uchar##vecsize *p3c = (uchar##vecsize*)rsGetElementAt(a, nextX, nextY); \
- float##vecsize p0 = convert_float##vecsize(*p0c); \
- float##vecsize p1 = convert_float##vecsize(*p1c); \
- float##vecsize p2 = convert_float##vecsize(*p2c); \
- float##vecsize p3 = convert_float##vecsize(*p3c); \
- return p0 * weights.x + p1 * weights.y + p2 * weights.z + p3 * weights.w; \
- }
-
-SAMPLE_1D_FUNC()
-SAMPLE_1D_FUNC(2)
-SAMPLE_1D_FUNC(3)
-SAMPLE_1D_FUNC(4)
-
-SAMPLE_2D_FUNC()
-SAMPLE_2D_FUNC(2)
-SAMPLE_2D_FUNC(3)
-SAMPLE_2D_FUNC(4)
-
-static float4 getBilinearSample565(rs_allocation a, float4 weights,
- int2 iPixel, int nextX, int nextY) {
- float4 zero = {0.0f, 0.0f, 0.0f, 0.0f};
- return zero;
-}
-
-static float4 getBilinearSample(rs_allocation a, float4 weights,
- int2 iPixel, int nextX, int nextY,
- uint32_t vecSize, rs_data_type dt) {
- if (dt == RS_TYPE_UNSIGNED_5_6_5) {
- return getBilinearSample565(a, weights, iPixel, nextX, nextY);
- }
-
- float4 result;
- switch(vecSize) {
- case 1:
- result.x = get2DSample(a, weights, iPixel, nextX, nextY);
- result.yzw = 0.0f;
- break;
- case 2:
- result.xy = get2DSample2(a, weights, iPixel, nextX, nextY);
- result.zw = 0.0f;
- break;
- case 3:
- result.xyz = get2DSample3(a, weights, iPixel, nextX, nextY);
- result.w = 0.0f;
- break;
- case 4:
- result = get2DSample4(a, weights, iPixel, nextX, nextY);
- break;
- }
-
- return result;
-}
-
-static float4 getNearestSample(rs_allocation a, int2 iPixel, uint32_t vecSize, rs_data_type dt) {
- if (dt == RS_TYPE_UNSIGNED_5_6_5) {
- float4 zero = {0.0f, 0.0f, 0.0f, 0.0f};
- return zero;
- }
-
- float4 result;
- switch(vecSize) {
- case 1:
- result.x = convert_float(*((uchar*)rsGetElementAt(a, iPixel.x, iPixel.y)));
- result.yzw = 0.0f;
- case 2:
- result.xy = convert_float2(*((uchar2*)rsGetElementAt(a, iPixel.x, iPixel.y)));
- result.zw = 0.0f;
- case 3:
- result.xyz = convert_float3(*((uchar3*)rsGetElementAt(a, iPixel.x, iPixel.y)));
- result.w = 0.0f;
- case 4:
- result = convert_float4(*((uchar4*)rsGetElementAt(a, iPixel.x, iPixel.y)));
- }
-
- return result;
-}
-
-
-// Naive implementation of texture filtering for prototyping purposes
-static float4 sample(rs_allocation a, rs_sampler s, float2 uv) {
-
- // Find out what kind of input data we are sampling
- rs_element elem = rsAllocationGetElement(a);
- uint32_t vecSize = rsElementGetVectorSize(elem);
- rs_data_kind dk = rsElementGetDataKind(elem);
- rs_data_type dt = rsElementGetDataType(elem);
-
- if (dk == RS_KIND_USER || (dt != RS_TYPE_UNSIGNED_8 && dt != RS_TYPE_UNSIGNED_5_6_5)) {
- float4 zero = {0.0f, 0.0f, 0.0f, 0.0f};
- return zero;
- }
- //rsDebug("*****************************************", 0);
- rs_sampler_value wrapS = rsgSamplerGetWrapS(s);
- rs_sampler_value wrapT = rsgSamplerGetWrapT(s);
-
- rs_sampler_value sampleMin = rsgSamplerGetMinification(s);
- rs_sampler_value sampleMag = rsgSamplerGetMagnification(s);
-
- int32_t sourceW = rsAllocationGetDimX(a);
- int32_t sourceH = rsAllocationGetDimY(a);
-
- float2 dimF;
- dimF.x = (float)(sourceW);
- dimF.y = (float)(sourceH);
- float2 pixelUV = uv * dimF;
- int2 iPixel = convert_int2(pixelUV);
-
- if (sampleMin == RS_SAMPLER_NEAREST ||
- sampleMag == RS_SAMPLER_NEAREST) {
- iPixel.x = wrapI(wrapS, iPixel.x, sourceW);
- iPixel.y = wrapI(wrapT, iPixel.y, sourceH);
- return getNearestSample(a, iPixel, vecSize, dt);
- }
-
- float2 frac = pixelUV - convert_float2(iPixel);
-
- if (frac.x < 0.5f) {
- iPixel.x -= 1;
- frac.x += 0.5f;
- } else {
- frac.x -= 0.5f;
- }
- if (frac.y < 0.5f) {
- iPixel.y -= 1;
- frac.y += 0.5f;
- } else {
- frac.y -= 0.5f;
- }
- float2 oneMinusFrac = 1.0f - frac;
-
- float4 weights;
- weights.x = oneMinusFrac.x * oneMinusFrac.y;
- weights.y = frac.x * oneMinusFrac.y;
- weights.z = oneMinusFrac.x * frac.y;
- weights.w = frac.x * frac.y;
-
- int32_t nextX = wrapI(wrapS, iPixel.x + 1, sourceW);
- int32_t nextY = wrapI(wrapT, iPixel.y + 1, sourceH);
- iPixel.x = wrapI(wrapS, iPixel.x, sourceW);
- iPixel.y = wrapI(wrapT, iPixel.y, sourceH);
-
- return getBilinearSample(a, weights, iPixel, nextX, nextY, vecSize, dt);
-}
-
void root(uchar4 *out, uint32_t x, uint32_t y) {
float destX = (float)rsAllocationGetDimX(destAlloc) - 1.0f;
@@ -207,7 +37,7 @@
uv.x = (float)x / destX;
uv.y = (float)y / destY;
- out->xyz = convert_uchar3(sample(sourceAlloc, allocSampler, uv*2.0f).xyz);
+ out->xyz = convert_uchar3(rsSample(sourceAlloc, allocSampler, uv*2.0f).xyz);
out->w = 0xff;
}
diff --git a/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java b/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java
index 5c9bef9..f31ee68 100644
--- a/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java
+++ b/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java
@@ -113,11 +113,14 @@
private static final int RSSI_MONITOR_COUNT = 5;
private int mRssiMonitorCount = 0;
- /* Avoid flapping */
- private static final int MIN_INTERVAL_AVOID_BSSID_MS = 60 * 1000;
- private String mLastAvoidedBssid;
- /* a -ve interval to allow avoidance at boot */
- private long mLastBssidAvoidedTime = -MIN_INTERVAL_AVOID_BSSID_MS;
+ /* Avoid flapping. The interval is changed over time as long as we continue to avoid
+ * under the max interval after which we reset the interval again */
+ private static final int MIN_INTERVAL_AVOID_BSSID_MS[] = {0, 30 * 1000, 60 * 1000,
+ 5 * 60 * 1000, 30 * 60 * 1000};
+ /* Index into the interval array MIN_INTERVAL_AVOID_BSSID_MS */
+ private int mMinIntervalArrayIndex = 0;
+
+ private long mLastBssidAvoidedTime;
private int mCurrentSignalLevel;
@@ -744,11 +747,13 @@
mCurrentSignalLevel = calculateSignalLevel(msg.arg1);
//Ready to avoid bssid again ?
long time = android.os.SystemClock.elapsedRealtime();
- if (time - mLastBssidAvoidedTime > MIN_INTERVAL_AVOID_BSSID_MS) {
+ if (time - mLastBssidAvoidedTime > MIN_INTERVAL_AVOID_BSSID_MS[
+ mMinIntervalArrayIndex]) {
handleRssiChange();
} else {
if (DBG) log("Early to avoid " + mWifiInfo + " time: " + time +
- " last avoided: " + mLastBssidAvoidedTime);
+ " last avoided: " + mLastBssidAvoidedTime +
+ " mMinIntervalArrayIndex: " + mMinIntervalArrayIndex);
}
break;
default:
@@ -892,7 +897,20 @@
private void sendPoorLinkDetected() {
if (DBG) log("send POOR_LINK_DETECTED " + mWifiInfo);
mWsmChannel.sendMessage(POOR_LINK_DETECTED);
- mLastAvoidedBssid = mWifiInfo.getBSSID();
+
+ long time = android.os.SystemClock.elapsedRealtime();
+ if (time - mLastBssidAvoidedTime > MIN_INTERVAL_AVOID_BSSID_MS[
+ MIN_INTERVAL_AVOID_BSSID_MS.length - 1]) {
+ mMinIntervalArrayIndex = 1;
+ if (DBG) log("set mMinIntervalArrayIndex to 1");
+ } else {
+
+ if (mMinIntervalArrayIndex < MIN_INTERVAL_AVOID_BSSID_MS.length - 1) {
+ mMinIntervalArrayIndex++;
+ }
+ if (DBG) log("mMinIntervalArrayIndex: " + mMinIntervalArrayIndex);
+ }
+
mLastBssidAvoidedTime = android.os.SystemClock.elapsedRealtime();
}