Reconcile with ics-factoryrom-release
Change-Id: Id586eed2642eaf5d2e3c21d063c9d13acf83f551
diff --git a/apps/Fallback/res/values-af/strings.xml b/apps/Fallback/res/values-af/strings.xml
deleted file mode 100644
index 8cf16a7..0000000
--- a/apps/Fallback/res/values-af/strings.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Copyright (C) 2007 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-
-<resources xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <string name="appTitle" msgid="161410001913116606">"Terugval"</string>
- <string name="title" msgid="8156274565006125136">"Nie-ondersteunde handeling"</string>
- <string name="error" msgid="6539615832923362301">"Hierdie handeling word tans nie ondersteun nie."</string>
-</resources>
diff --git a/apps/Fallback/res/values-am/strings.xml b/apps/Fallback/res/values-am/strings.xml
deleted file mode 100644
index 0c89122..0000000
--- a/apps/Fallback/res/values-am/strings.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Copyright (C) 2007 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-
-<resources xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <string name="appTitle" msgid="161410001913116606">"Fallback"</string>
- <string name="title" msgid="8156274565006125136">"የማይደገፍ ድርጊት"</string>
- <string name="error" msgid="6539615832923362301">"ያድርጊት በአሁኑ ጊዜ የማይደገፍ ነው።"</string>
-</resources>
diff --git a/apps/Fallback/res/values-iw/strings.xml b/apps/Fallback/res/values-iw/strings.xml
index cbf35ef..671919b 100644
--- a/apps/Fallback/res/values-iw/strings.xml
+++ b/apps/Fallback/res/values-iw/strings.xml
@@ -16,7 +16,7 @@
<resources xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <string name="appTitle" msgid="161410001913116606">"חלופי"</string>
+ <string name="appTitle" msgid="161410001913116606">"החזרה"</string>
<string name="title" msgid="8156274565006125136">"פעולה לא נתמכת"</string>
<string name="error" msgid="6539615832923362301">"הפעולה אינה נתמכת בשלב זה."</string>
</resources>
diff --git a/apps/Fallback/res/values-ms/strings.xml b/apps/Fallback/res/values-ms/strings.xml
deleted file mode 100644
index 930fe79..0000000
--- a/apps/Fallback/res/values-ms/strings.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Copyright (C) 2007 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-
-<resources xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <string name="appTitle" msgid="161410001913116606">"Jatuh balik"</string>
- <string name="title" msgid="8156274565006125136">"Tindakan tidak disokong"</string>
- <string name="error" msgid="6539615832923362301">"Tindakan tidak disokong pada masa ini."</string>
-</resources>
diff --git a/apps/Fallback/res/values-sw/strings.xml b/apps/Fallback/res/values-sw/strings.xml
deleted file mode 100644
index f322c20..0000000
--- a/apps/Fallback/res/values-sw/strings.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Copyright (C) 2007 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-
-<resources xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <string name="appTitle" msgid="161410001913116606">"Rudi nyuma"</string>
- <string name="title" msgid="8156274565006125136">"Kitendo kinachohimiliwa"</string>
- <string name="error" msgid="6539615832923362301">"Kitendo hakijahimiliwa ipasavyo"</string>
-</resources>
diff --git a/apps/Fallback/res/values-zu/strings.xml b/apps/Fallback/res/values-zu/strings.xml
deleted file mode 100644
index 2c85d28..0000000
--- a/apps/Fallback/res/values-zu/strings.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- Copyright (C) 2007 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-
-<resources xmlns:android="http://schemas.android.com/apk/res/android"
- xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <string name="appTitle" msgid="161410001913116606">"Buyela emuva"</string>
- <string name="title" msgid="8156274565006125136">"Isenzo esingasekelwe"</string>
- <string name="error" msgid="6539615832923362301">"Leso senzo okwamanje asisekelwe."</string>
-</resources>
diff --git a/build/sdk-android-armeabi-v7a.atree b/build/sdk-android-armeabi-v7a.atree
index 8a79994..8867bf7 100644
--- a/build/sdk-android-armeabi-v7a.atree
+++ b/build/sdk-android-armeabi-v7a.atree
@@ -15,4 +15,7 @@
#
# Copy the ARMv7 specific kernel image to .../kernel-qemu
-prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu-armv7 platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/kernel-qemu
+prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu-armv7 system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/kernel-qemu
+
+# version files for the SDK updater, from development.git
+development/sdk/images_armeabi-v7a_source.properties system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
diff --git a/build/sdk-android-armeabi.atree b/build/sdk-android-armeabi.atree
index d90b069..a31eca6 100644
--- a/build/sdk-android-armeabi.atree
+++ b/build/sdk-android-armeabi.atree
@@ -14,4 +14,7 @@
# limitations under the License.
#
-prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/kernel-qemu
+prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/kernel-qemu
+
+# version files for the SDK updater, from development.git
+development/sdk/images_armeabi_source.properties system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
diff --git a/build/sdk-android-x86.atree b/build/sdk-android-x86.atree
index d90b069..748478a 100644
--- a/build/sdk-android-x86.atree
+++ b/build/sdk-android-x86.atree
@@ -14,4 +14,7 @@
# limitations under the License.
#
-prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/kernel-qemu
+prebuilt/android-${TARGET_ARCH}/kernel/kernel-qemu system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/kernel-qemu
+
+# version files for the SDK updater, from development.git
+development/sdk/images_x86_source.properties system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/source.properties
diff --git a/build/sdk.atree b/build/sdk.atree
index 7e0c2a5..ad1d19f 100644
--- a/build/sdk.atree
+++ b/build/sdk.atree
@@ -54,7 +54,7 @@
# Platform Component
##############################################################################
-# version files for the SDK updater, from sdk.git
+# version files for the SDK updater, from development.git
development/sdk/platform_source.properties platforms/${PLATFORM_NAME}/source.properties
# copy build prop from out/.../sdk/
@@ -71,14 +71,6 @@
external/clang/lib/Headers platforms/${PLATFORM_NAME}/renderscript/clang-include
external/clang/LICENSE.TXT platforms/${PLATFORM_NAME}/renderscript/clang-include/LICENSE.TXT
-# System images + Kernel
-system.img platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/system.img
-ramdisk.img platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/ramdisk.img
-userdata.img platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/userdata.img
-system/build.prop platforms/${PLATFORM_NAME}/images/${TARGET_CPU_ABI}/build.prop
-
-# Note: the kernel image is handled by sdk-android-<abi>.atree now.
-
# emulator skins from sdk.git
development/tools/emulator/skins/QVGA platforms/${PLATFORM_NAME}/skins/QVGA
development/tools/emulator/skins/WQVGA432 platforms/${PLATFORM_NAME}/skins/WQVGA432
@@ -122,6 +114,18 @@
sdk/files/sdk_files_NOTICE.txt platforms/${PLATFORM_NAME}/skins/NOTICE.txt
##############################################################################
+# System image Component
+##############################################################################
+
+# System images + Kernel
+system.img system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/system.img
+ramdisk.img system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/ramdisk.img
+userdata.img system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/userdata.img
+system/build.prop system-images/${PLATFORM_NAME}/${TARGET_CPU_ABI}/build.prop
+
+# Note: the kernel image is handled by sdk-android-<abi>.atree now.
+
+##############################################################################
# Docs Component
##############################################################################
diff --git a/samples/ApiDemos/src/com/example/android/apis/view/LabelView.java b/samples/ApiDemos/src/com/example/android/apis/view/LabelView.java
index b98a5b5..b0a1f96 100644
--- a/samples/ApiDemos/src/com/example/android/apis/view/LabelView.java
+++ b/samples/ApiDemos/src/com/example/android/apis/view/LabelView.java
@@ -84,7 +84,8 @@
private final void initLabelView() {
mTextPaint = new Paint();
mTextPaint.setAntiAlias(true);
- mTextPaint.setTextSize(16);
+ // Must manually scale the desired text size to match screen density
+ mTextPaint.setTextSize(16 * getResources().getDisplayMetrics().density);
mTextPaint.setColor(0xFF000000);
setPadding(3, 3, 3, 3);
}
@@ -104,6 +105,7 @@
* @param size Font size
*/
public void setTextSize(int size) {
+ // This text size has been pre-scaled by the getDimensionPixelOffset method
mTextPaint.setTextSize(size);
requestLayout();
invalidate();
diff --git a/samples/HoneycombGallery/AndroidManifest.xml b/samples/HoneycombGallery/AndroidManifest.xml
index 704a1c6..2a99856 100644
--- a/samples/HoneycombGallery/AndroidManifest.xml
+++ b/samples/HoneycombGallery/AndroidManifest.xml
@@ -16,9 +16,9 @@
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.android.hcgallery" android:versionCode="2"
- android:versionName="1.2">
+ android:versionName="1.5">
- <uses-sdk android:minSdkVersion="11" android:targetSdkVersion="13" />
+ <uses-sdk android:minSdkVersion="11" android:targetSdkVersion="14" />
<!-- Declares that the app supports devices w/o touch, such as a mouse-driven device
or a device that provides only a d-pad for navigation -->
diff --git a/samples/HoneycombGallery/res/values-port/dimens.xml b/samples/HoneycombGallery/res/values-large-port/dimens.xml
similarity index 100%
rename from samples/HoneycombGallery/res/values-port/dimens.xml
rename to samples/HoneycombGallery/res/values-large-port/dimens.xml
diff --git a/sdk/images_armeabi-v7a_source.properties b/sdk/images_armeabi-v7a_source.properties
new file mode 100644
index 0000000..50d4054
--- /dev/null
+++ b/sdk/images_armeabi-v7a_source.properties
@@ -0,0 +1,7 @@
+Pkg.Desc=Android SDK Platform 4.0
+Pkg.UserSrc=false
+Platform.Version=4.0
+Pkg.Revision=1
+AndroidVersion.ApiLevel=14
+SystemImage.Abi=armeabi-v7a
+#AndroidVersion.CodeName=
diff --git a/sdk/images_armeabi_source.properties b/sdk/images_armeabi_source.properties
new file mode 100644
index 0000000..06cf392
--- /dev/null
+++ b/sdk/images_armeabi_source.properties
@@ -0,0 +1,7 @@
+Pkg.Desc=Android SDK Platform 4.0
+Pkg.UserSrc=false
+Platform.Version=4.0
+Pkg.Revision=1
+AndroidVersion.ApiLevel=14
+SystemImage.Abi=armeabi
+#AndroidVersion.CodeName=
diff --git a/sdk/images_x86_source.properties b/sdk/images_x86_source.properties
new file mode 100644
index 0000000..199f5c5
--- /dev/null
+++ b/sdk/images_x86_source.properties
@@ -0,0 +1,7 @@
+Pkg.Desc=Android SDK Platform 4.0
+Pkg.UserSrc=false
+Platform.Version=4.0
+Pkg.Revision=1
+AndroidVersion.ApiLevel=14
+SystemImage.Abi=x86
+#AndroidVersion.CodeName=
diff --git a/tools/emulator/system/camera/Android.mk b/tools/emulator/system/camera/Android.mk
index 47f425a..2e5c19c 100755
--- a/tools/emulator/system/camera/Android.mk
+++ b/tools/emulator/system/camera/Android.mk
@@ -28,6 +28,16 @@
libcamera_client \
libui \
+# JPEG conversion libraries and includes.
+LOCAL_SHARED_LIBRARIES += \
+ libjpeg \
+ libskia \
+ libandroid_runtime \
+
+LOCAL_C_INCLUDES += external/jpeg \
+ external/skia/include/core/ \
+ frameworks/base/core/jni/android/graphics
+
LOCAL_SRC_FILES := \
EmulatedCameraHal.cpp \
EmulatedCameraFactory.cpp \
@@ -40,7 +50,8 @@
Converters.cpp \
PreviewWindow.cpp \
CallbackNotifier.cpp \
- QemuClient.cpp
+ QemuClient.cpp \
+ JpegCompressor.cpp
ifeq ($(TARGET_PRODUCT),vbox_x86)
LOCAL_MODULE := camera.vbox_x86
diff --git a/tools/emulator/system/camera/CallbackNotifier.cpp b/tools/emulator/system/camera/CallbackNotifier.cpp
index 188bf3a..c2a84e2 100755
--- a/tools/emulator/system/camera/CallbackNotifier.cpp
+++ b/tools/emulator/system/camera/CallbackNotifier.cpp
@@ -25,6 +25,7 @@
#include <media/stagefright/MetadataBufferType.h>
#include "EmulatedCameraDevice.h"
#include "CallbackNotifier.h"
+#include "JpegCompressor.h"
namespace android {
@@ -92,7 +93,9 @@
mLastFrameTimestamp(0),
mFrameRefreshFreq(0),
mMessageEnabler(0),
- mVideoRecEnabled(false)
+ mJpegQuality(90),
+ mVideoRecEnabled(false),
+ mTakingPicture(false)
{
}
@@ -194,7 +197,9 @@
mCBOpaque = NULL;
mLastFrameTimestamp = 0;
mFrameRefreshFreq = 0;
+ mJpegQuality = 90;
mVideoRecEnabled = false;
+ mTakingPicture = false;
}
void CallbackNotifier::onNextFrameAvailable(const void* frame,
@@ -213,6 +218,43 @@
LOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
}
}
+
+ if (mTakingPicture) {
+ /* This happens just once. */
+ mTakingPicture = false;
+ /* The sequence of callbacks during picture taking is:
+ * - CAMERA_MSG_SHUTTER
+ * - CAMERA_MSG_RAW_IMAGE_NOTIFY
+ * - CAMERA_MSG_COMPRESSED_IMAGE
+ */
+ if (isMessageEnabled(CAMERA_MSG_SHUTTER)) {
+ mNotifyCB(CAMERA_MSG_SHUTTER, 0, 0, mCBOpaque);
+ }
+ if (isMessageEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
+ mNotifyCB(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCBOpaque);
+ }
+ if (isMessageEnabled(CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* Compress the frame to JPEG. TODO: Make sure that frame is NV21! */
+ NV21JpegCompressor compressor;
+ status_t res =
+ compressor.compressRawImage(frame, camera_dev->getFrameWidth(),
+ camera_dev->getFrameHeight(),
+ mJpegQuality);
+ if (res == NO_ERROR) {
+ camera_memory_t* jpeg_buff =
+ mGetMemoryCB(-1, compressor.getCompressedSize(), 1, NULL);
+ if (NULL != jpeg_buff && NULL != jpeg_buff->data) {
+ compressor.getCompressedImage(jpeg_buff->data);
+ mDataCB(CAMERA_MSG_COMPRESSED_IMAGE, jpeg_buff, 0, NULL, mCBOpaque);
+ jpeg_buff->release(jpeg_buff);
+ } else {
+ LOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
+ }
+ } else {
+ LOGE("%s: Compression failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
+ }
+ }
+ }
}
/****************************************************************************
diff --git a/tools/emulator/system/camera/CallbackNotifier.h b/tools/emulator/system/camera/CallbackNotifier.h
index 0a595ef..3722d21 100755
--- a/tools/emulator/system/camera/CallbackNotifier.h
+++ b/tools/emulator/system/camera/CallbackNotifier.h
@@ -114,7 +114,7 @@
*/
inline int isMessageEnabled(uint msg_type)
{
- return mMessageEnabler & ~msg_type;
+ return mMessageEnabler & msg_type;
}
/* Checks id video recording is enabled.
@@ -159,6 +159,21 @@
nsecs_t timestamp,
EmulatedCameraDevice* camera_dev);
+ /* Sets, or resets taking picture state.
+ * This state control whether or not to notify the framework about compressed
+ * image, shutter, and other picture related events.
+ */
+ void setTakingPicture(bool taking)
+ {
+ mTakingPicture = taking;
+ }
+
+ /* Sets JPEG quality used to compress frame during picture taking. */
+ void setJpegQuality(int jpeg_quality)
+ {
+ mJpegQuality = jpeg_quality;
+ }
+
/****************************************************************************
* Private API
***************************************************************************/
@@ -197,8 +212,14 @@
/* Message enabler. */
uint32_t mMessageEnabler;
+ /* JPEG quality used to compress frame during picture taking. */
+ int mJpegQuality;
+
/* Video recording status. */
bool mVideoRecEnabled;
+
+ /* Picture taking status. */
+ bool mTakingPicture;
};
}; /* namespace android */
diff --git a/tools/emulator/system/camera/Converters.cpp b/tools/emulator/system/camera/Converters.cpp
index 797d652..f63f67f 100755
--- a/tools/emulator/system/camera/Converters.cpp
+++ b/tools/emulator/system/camera/Converters.cpp
@@ -25,24 +25,25 @@
namespace android {
-void YV12ToRGB565(const void* yv12, void* rgb, int width, int height)
+static void _YUV420SToRGB565(const uint8_t* Y,
+ const uint8_t* U,
+ const uint8_t* V,
+ int dUV,
+ uint16_t* rgb,
+ int width,
+ int height)
{
- const int pix_total = width * height;
- uint16_t* rgb_buf = reinterpret_cast<uint16_t*>(rgb);
- const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
- const uint8_t* U_pos = Y + pix_total;
- const uint8_t* V_pos = U_pos + pix_total / 4;
- const uint8_t* U = U_pos;
- const uint8_t* V = V_pos;
+ const uint8_t* U_pos = U;
+ const uint8_t* V_pos = V;
for (int y = 0; y < height; y++) {
- for (int x = 0; x < width; x += 2) {
- const uint8_t nU = *U; U++;
- const uint8_t nV = *V; V++;
- *rgb_buf = YUVToRGB565(*Y, nU, nV);
- Y++; rgb_buf++;
- *rgb_buf = YUVToRGB565(*Y, nU, nV);
- Y++; rgb_buf++;
+ for (int x = 0; x < width; x += 2, U += dUV, V += dUV) {
+ const uint8_t nU = *U;
+ const uint8_t nV = *V;
+ *rgb = YUVToRGB565(*Y, nU, nV);
+ Y++; rgb++;
+ *rgb = YUVToRGB565(*Y, nU, nV);
+ Y++; rgb++;
}
if (y & 0x1) {
U_pos = U;
@@ -54,24 +55,25 @@
}
}
-void YV12ToRGB32(const void* yv12, void* rgb, int width, int height)
+static void _YUV420SToRGB32(const uint8_t* Y,
+ const uint8_t* U,
+ const uint8_t* V,
+ int dUV,
+ uint32_t* rgb,
+ int width,
+ int height)
{
- const int pix_total = width * height;
- uint32_t* rgb_buf = reinterpret_cast<uint32_t*>(rgb);
- const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
- const uint8_t* U_pos = Y + pix_total;
- const uint8_t* V_pos = U_pos + pix_total / 4;
- const uint8_t* U = U_pos;
- const uint8_t* V = V_pos;
+ const uint8_t* U_pos = U;
+ const uint8_t* V_pos = V;
for (int y = 0; y < height; y++) {
- for (int x = 0; x < width; x += 2) {
- const uint8_t nU = *U; U++;
- const uint8_t nV = *V; V++;
- *rgb_buf = YUVToRGB32(*Y, nU, nV);
- Y++; rgb_buf++;
- *rgb_buf = YUVToRGB32(*Y, nU, nV);
- Y++; rgb_buf++;
+ for (int x = 0; x < width; x += 2, U += dUV, V += dUV) {
+ const uint8_t nU = *U;
+ const uint8_t nV = *V;
+ *rgb = YUVToRGB32(*Y, nU, nV);
+ Y++; rgb++;
+ *rgb = YUVToRGB32(*Y, nU, nV);
+ Y++; rgb++;
}
if (y & 0x1) {
U_pos = U;
@@ -83,4 +85,89 @@
}
}
+void YV12ToRGB565(const void* yv12, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
+ const uint8_t* U = Y + pix_total;
+ const uint8_t* V = U + pix_total / 4;
+ _YUV420SToRGB565(Y, U, V, 1, reinterpret_cast<uint16_t*>(rgb), width, height);
+}
+
+void YV12ToRGB32(const void* yv12, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
+ const uint8_t* V = Y + pix_total;
+ const uint8_t* U = V + pix_total / 4;
+ _YUV420SToRGB32(Y, U, V, 1, reinterpret_cast<uint32_t*>(rgb), width, height);
+}
+
+void YU12ToRGB32(const void* yu12, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* Y = reinterpret_cast<const uint8_t*>(yu12);
+ const uint8_t* U = Y + pix_total;
+ const uint8_t* V = U + pix_total / 4;
+ _YUV420SToRGB32(Y, U, V, 1, reinterpret_cast<uint32_t*>(rgb), width, height);
+}
+
+/* Common converter for YUV 4:2:0 interleaved to RGB565.
+ * y, u, and v point to Y,U, and V panes, where U and V values are interleaved.
+ */
+static void _NVXXToRGB565(const uint8_t* Y,
+ const uint8_t* U,
+ const uint8_t* V,
+ uint16_t* rgb,
+ int width,
+ int height)
+{
+ _YUV420SToRGB565(Y, U, V, 2, rgb, width, height);
+}
+
+/* Common converter for YUV 4:2:0 interleaved to RGB32.
+ * y, u, and v point to Y,U, and V panes, where U and V values are interleaved.
+ */
+static void _NVXXToRGB32(const uint8_t* Y,
+ const uint8_t* U,
+ const uint8_t* V,
+ uint32_t* rgb,
+ int width,
+ int height)
+{
+ _YUV420SToRGB32(Y, U, V, 2, rgb, width, height);
+}
+
+void NV12ToRGB565(const void* nv12, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* y = reinterpret_cast<const uint8_t*>(nv12);
+ _NVXXToRGB565(y, y + pix_total, y + pix_total + 1,
+ reinterpret_cast<uint16_t*>(rgb), width, height);
+}
+
+void NV12ToRGB32(const void* nv12, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* y = reinterpret_cast<const uint8_t*>(nv12);
+ _NVXXToRGB32(y, y + pix_total, y + pix_total + 1,
+ reinterpret_cast<uint32_t*>(rgb), width, height);
+}
+
+void NV21ToRGB565(const void* nv21, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* y = reinterpret_cast<const uint8_t*>(nv21);
+ _NVXXToRGB565(y, y + pix_total + 1, y + pix_total,
+ reinterpret_cast<uint16_t*>(rgb), width, height);
+}
+
+void NV21ToRGB32(const void* nv21, void* rgb, int width, int height)
+{
+ const int pix_total = width * height;
+ const uint8_t* y = reinterpret_cast<const uint8_t*>(nv21);
+ _NVXXToRGB32(y, y + pix_total + 1, y + pix_total,
+ reinterpret_cast<uint32_t*>(rgb), width, height);
+}
+
}; /* namespace android */
diff --git a/tools/emulator/system/camera/Converters.h b/tools/emulator/system/camera/Converters.h
index 5cdcea2..ab00711 100755
--- a/tools/emulator/system/camera/Converters.h
+++ b/tools/emulator/system/camera/Converters.h
@@ -269,6 +269,46 @@
*/
void YV12ToRGB32(const void* yv12, void* rgb, int width, int height);
+/* Converts an YU12 framebuffer to RGB32 framebuffer.
+ * Param:
+ * yu12 - YU12 framebuffer.
+ * rgb - RGB32 framebuffer.
+ * width, height - Dimensions for both framebuffers.
+ */
+void YU12ToRGB32(const void* yu12, void* rgb, int width, int height);
+
+/* Converts an NV12 framebuffer to RGB565 framebuffer.
+ * Param:
+ * nv12 - NV12 framebuffer.
+ * rgb - RGB565 framebuffer.
+ * width, height - Dimensions for both framebuffers.
+ */
+void NV12ToRGB565(const void* nv12, void* rgb, int width, int height);
+
+/* Converts an NV12 framebuffer to RGB32 framebuffer.
+ * Param:
+ * nv12 - NV12 framebuffer.
+ * rgb - RGB32 framebuffer.
+ * width, height - Dimensions for both framebuffers.
+ */
+void NV12ToRGB32(const void* nv12, void* rgb, int width, int height);
+
+/* Converts an NV21 framebuffer to RGB565 framebuffer.
+ * Param:
+ * nv21 - NV21 framebuffer.
+ * rgb - RGB565 framebuffer.
+ * width, height - Dimensions for both framebuffers.
+ */
+void NV21ToRGB565(const void* nv21, void* rgb, int width, int height);
+
+/* Converts an NV21 framebuffer to RGB32 framebuffer.
+ * Param:
+ * nv21 - NV21 framebuffer.
+ * rgb - RGB32 framebuffer.
+ * width, height - Dimensions for both framebuffers.
+ */
+void NV21ToRGB32(const void* nv21, void* rgb, int width, int height);
+
}; /* namespace android */
#endif /* HW_EMULATOR_CAMERA_CONVERTERS_H */
diff --git a/tools/emulator/system/camera/EmulatedCamera.cpp b/tools/emulator/system/camera/EmulatedCamera.cpp
index 12b9792..80e7239 100755
--- a/tools/emulator/system/camera/EmulatedCamera.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera.cpp
@@ -125,8 +125,8 @@
mParameters.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT,
CameraParameters::PIXEL_FORMAT_YUV420P);
mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
- CameraParameters::PIXEL_FORMAT_YUV420P);
- mParameters.setPictureFormat(CameraParameters::PIXEL_FORMAT_YUV420P);
+ CameraParameters::PIXEL_FORMAT_JPEG);
+ mParameters.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
/*
* Not supported features
@@ -302,22 +302,77 @@
{
LOGV("%s", __FUNCTION__);
+ status_t res;
+ int width, height;
+ uint32_t org_fmt;
+
+ /* Collect frame info for the picture. */
+ mParameters.getPictureSize(&width, &height);
+ const char* pix_fmt = mParameters.getPictureFormat();
+ if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ org_fmt = V4L2_PIX_FMT_YUV420;
+ } else if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_RGBA8888) == 0) {
+ org_fmt = V4L2_PIX_FMT_RGB32;
+ } else if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ org_fmt = V4L2_PIX_FMT_NV21;
+ } else if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
+ /* We only have JPEG converted for NV21 format. */
+ org_fmt = V4L2_PIX_FMT_NV21;
+ } else {
+ LOGE("%s: Unsupported pixel format %s", __FUNCTION__, pix_fmt);
+ return EINVAL;
+ }
+ /* Get JPEG quality. */
+ int jpeg_quality = mParameters.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ if (jpeg_quality <= 0) {
+ jpeg_quality = 90; /* Fall back to default. */
+ }
+
/*
- * Before taking picture, pause the camera (pause worker thread), and pause
- * the preview.
+ * Make sure preview is not running, and device is stopped before taking
+ * picture.
*/
+ const bool preview_on = mPreviewWindow.isPreviewEnabled();
+ if (preview_on) {
+ doStopPreview();
+ }
+
+ /* Camera device should have been stopped when the shutter message has been
+ * enabled. */
+ EmulatedCameraDevice* const camera_dev = getCameraDevice();
+ if (camera_dev->isStarted()) {
+ LOGW("%s: Camera device is started", __FUNCTION__);
+ camera_dev->stopDeliveringFrames();
+ camera_dev->stopDevice();
+ }
+
/*
* Take the picture now.
*/
- /*
- * After picture has been taken, resume the preview, and the camera (if any
- * has been paused.
- */
+ /* Start camera device for the picture frame. */
+ LOGD("Starting camera for picture: %.4s(%s)[%dx%d]",
+ reinterpret_cast<const char*>(&org_fmt), pix_fmt, width, height);
+ res = camera_dev->startDevice(width, height, org_fmt);
+ if (res != NO_ERROR) {
+ if (preview_on) {
+ doStartPreview();
+ }
+ return res;
+ }
-
- return NO_ERROR;
+ /* Deliver one frame only. */
+ mCallbackNotifier.setJpegQuality(jpeg_quality);
+ mCallbackNotifier.setTakingPicture(true);
+ res = camera_dev->startDeliveringFrames(true);
+ if (res != NO_ERROR) {
+ mCallbackNotifier.setTakingPicture(false);
+ if (preview_on) {
+ doStartPreview();
+ }
+ }
+ return res;
}
status_t EmulatedCamera::cancelPicture()
@@ -454,17 +509,69 @@
{
LOGV("%s", __FUNCTION__);
- status_t res = mPreviewWindow.startPreview();
+ EmulatedCameraDevice* camera_dev = getCameraDevice();
+ if (camera_dev->isStarted()) {
+ camera_dev->stopDeliveringFrames();
+ camera_dev->stopDevice();
+ }
- /* Start the camera. */
- if (res == NO_ERROR && !getCameraDevice()->isCapturing()) {
- res = startCamera();
+ status_t res = mPreviewWindow.startPreview();
+ if (res != NO_ERROR) {
+ return res;
+ }
+
+ /* Make sure camera device is connected. */
+ if (!camera_dev->isConnected()) {
+ res = camera_dev->connectDevice();
if (res != NO_ERROR) {
- /* If camera didn't start, disable the preview window. */
mPreviewWindow.stopPreview();
+ return res;
}
}
+ int width, height;
+ /* Lets see what should we use for frame width, and height. */
+ if (mParameters.get(CameraParameters::KEY_VIDEO_SIZE) != NULL) {
+ mParameters.getVideoSize(&width, &height);
+ } else {
+ mParameters.getPreviewSize(&width, &height);
+ }
+ /* Lets see what should we use for the frame pixel format. */
+ const char* pix_fmt =
+ mParameters.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
+ if (pix_fmt == NULL) {
+ pix_fmt = mParameters.getPreviewFormat();
+ }
+ if (pix_fmt == NULL) {
+ LOGE("%s: Unable to obtain video format", __FUNCTION__);
+ mPreviewWindow.stopPreview();
+ return EINVAL;
+ }
+ uint32_t org_fmt;
+ if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ org_fmt = V4L2_PIX_FMT_YUV420;
+ } else if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_RGBA8888) == 0) {
+ org_fmt = V4L2_PIX_FMT_RGB32;
+ } else if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ org_fmt = V4L2_PIX_FMT_NV21;
+ } else {
+ LOGE("%s: Unsupported pixel format %s", __FUNCTION__, pix_fmt);
+ mPreviewWindow.stopPreview();
+ return EINVAL;
+ }
+ LOGD("Starting camera: %dx%d -> %.4s", width, height, pix_fmt);
+ res = camera_dev->startDevice(width, height, org_fmt);
+ if (res != NO_ERROR) {
+ mPreviewWindow.stopPreview();
+ return res;
+ }
+
+ res = camera_dev->startDeliveringFrames(false);
+ if (res != NO_ERROR) {
+ camera_dev->stopDevice();
+ mPreviewWindow.stopPreview();
+ }
+
return res;
}
@@ -473,89 +580,22 @@
LOGV("%s", __FUNCTION__);
status_t res = NO_ERROR;
- /* Stop the camera. */
- if (getCameraDevice()->isCapturing()) {
- res = stopCamera();
- }
+ if (mPreviewWindow.isPreviewEnabled()) {
+ /* Stop the camera. */
+ if (getCameraDevice()->isStarted()) {
+ getCameraDevice()->stopDeliveringFrames();
+ res = getCameraDevice()->stopDevice();
+ }
- if (res == NO_ERROR) {
- /* Disable preview as well. */
- mPreviewWindow.stopPreview();
+ if (res == NO_ERROR) {
+ /* Disable preview as well. */
+ mPreviewWindow.stopPreview();
+ }
}
return NO_ERROR;
}
-status_t EmulatedCamera::startCamera()
-{
- LOGV("%s", __FUNCTION__);
-
- status_t res = EINVAL;
- EmulatedCameraDevice* camera_dev = getCameraDevice();
- if (camera_dev != NULL) {
- if (!camera_dev->isConnected()) {
- res = camera_dev->connectDevice();
- if (res != NO_ERROR) {
- return res;
- }
- }
- if (!camera_dev->isCapturing()) {
- int width, height;
- /* Lets see what should we use for frame width, and height. */
- if (mParameters.get(CameraParameters::KEY_VIDEO_SIZE) != NULL) {
- mParameters.getVideoSize(&width, &height);
- } else {
- mParameters.getPreviewSize(&width, &height);
- }
- /* Lets see what should we use for the frame pixel format. */
- const char* pix_fmt =
- mParameters.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
- if (pix_fmt == NULL) {
- pix_fmt = mParameters.getPreviewFormat();
- }
- if (pix_fmt == NULL) {
- LOGE("%s: Unable to obtain video format", __FUNCTION__);
- return EINVAL;
- }
- uint32_t org_fmt;
- if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
- org_fmt = V4L2_PIX_FMT_YVU420;
- } else if (strcmp(pix_fmt, CameraParameters::PIXEL_FORMAT_RGBA8888) == 0) {
- org_fmt = V4L2_PIX_FMT_RGB32;
- } else {
- LOGE("%s: Unsupported pixel format %s", __FUNCTION__, pix_fmt);
- return EINVAL;
- }
- LOGD("Starting camera: %dx%d -> %s", width, height, pix_fmt);
- res = camera_dev->startCapturing(width, height, org_fmt);
- if (res != NO_ERROR) {
- return res;
- }
- }
- }
-
- return res;
-}
-
-status_t EmulatedCamera::stopCamera()
-{
- LOGV("%s", __FUNCTION__);
-
- status_t res = NO_ERROR;
- EmulatedCameraDevice* const camera_dev = getCameraDevice();
- if (camera_dev != NULL) {
- if (camera_dev->isCapturing()) {
- res = camera_dev->stopCapturing();
- if (res != NO_ERROR) {
- return res;
- }
- }
- }
-
- return res;
-}
-
-
/****************************************************************************
* Private API.
***************************************************************************/
@@ -573,8 +613,9 @@
/* Stop and disconnect the camera device. */
EmulatedCameraDevice* const camera_dev = getCameraDevice();
if (camera_dev != NULL) {
- if (camera_dev->isCapturing()) {
- res = camera_dev->stopCapturing();
+ if (camera_dev->isStarted()) {
+ camera_dev->stopDeliveringFrames();
+ res = camera_dev->stopDevice();
if (res != NO_ERROR) {
return -res;
}
diff --git a/tools/emulator/system/camera/EmulatedCamera.h b/tools/emulator/system/camera/EmulatedCamera.h
index 77d16c9..8b04de2 100755
--- a/tools/emulator/system/camera/EmulatedCamera.h
+++ b/tools/emulator/system/camera/EmulatedCamera.h
@@ -290,18 +290,6 @@
*/
virtual status_t doStopPreview();
- /* Starts capturing frames
- * Return:
- * NO_ERROR on success, or an appropriate error status on failure.
- */
- virtual status_t startCamera();
-
- /* Stops capturing frames.
- * Return:
- * NO_ERROR on success, or an appropriate error status on failure.
- */
- virtual status_t stopCamera();
-
/****************************************************************************
* Private API.
***************************************************************************/
diff --git a/tools/emulator/system/camera/EmulatedCameraDevice.cpp b/tools/emulator/system/camera/EmulatedCameraDevice.cpp
index fb9c1da..e09bead 100755
--- a/tools/emulator/system/camera/EmulatedCameraDevice.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraDevice.cpp
@@ -72,16 +72,82 @@
return NO_ERROR;
}
-status_t EmulatedCameraDevice::startCapturing(int width,
- int height,
- uint32_t pix_fmt)
+status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
{
LOGV("%s", __FUNCTION__);
- Mutex::Autolock locker(&mObjectLock);
+ if (!isStarted()) {
+ LOGE("%s: Device is not started", __FUNCTION__);
+ return EINVAL;
+ }
+
+ /* Frames will be delivered from the thread routine. */
+ const status_t res = startWorkerThread(one_burst);
+ LOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
+ return res;
+}
+
+status_t EmulatedCameraDevice::stopDeliveringFrames()
+{
+ LOGV("%s", __FUNCTION__);
+
+ if (!isStarted()) {
+ LOGW("%s: Device is not started", __FUNCTION__);
+ return NO_ERROR;
+ }
+
+ const status_t res = stopWorkerThread();
+ LOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
+ return res;
+}
+
+status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
+{
+ if (!isStarted()) {
+ LOGE("%s: Device is not started", __FUNCTION__);
+ return EINVAL;
+ }
+ if (mCurrentFrame == NULL || buffer == NULL) {
+ LOGE("%s: No framebuffer", __FUNCTION__);
+ return EINVAL;
+ }
+
+ /* In emulation the framebuffer is never RGB. */
+ switch (mPixelFormat) {
+ case V4L2_PIX_FMT_YVU420:
+ YV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
+ return NO_ERROR;
+ case V4L2_PIX_FMT_YUV420:
+ YU12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
+ return NO_ERROR;
+ case V4L2_PIX_FMT_NV21:
+ NV21ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
+ return NO_ERROR;
+ case V4L2_PIX_FMT_NV12:
+ NV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
+ return NO_ERROR;
+
+ default:
+ LOGE("%s: Unknown pixel format %.4s",
+ __FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat));
+ return EINVAL;
+ }
+}
+
+/****************************************************************************
+ * Emulated camera device private API
+ ***************************************************************************/
+
+status_t EmulatedCameraDevice::commonStartDevice(int width,
+ int height,
+ uint32_t pix_fmt)
+{
/* Validate pixel format, and calculate framebuffer size at the same time. */
switch (pix_fmt) {
case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_NV21:
+ case V4L2_PIX_FMT_NV12:
mFrameBufferSize = (width * height * 12) / 8;
break;
@@ -103,78 +169,20 @@
LOGE("%s: Unable to allocate framebuffer", __FUNCTION__);
return ENOMEM;
}
- /* Calculate U/V panes inside the framebuffer. */
- mFrameU = mCurrentFrame + mTotalPixels;
- mFrameV = mFrameU + mTotalPixels / 4;
-
- /* Start the camera. */
- const status_t res = startDevice();
- if (res == NO_ERROR) {
- LOGD("Camera device is started:\n"
- " Framebuffer dimensions: %dx%d.\n"
- " Pixel format: %.4s",
- mFrameWidth, mFrameHeight,
- reinterpret_cast<const char*>(&mPixelFormat));
- } else {
- delete[] mCurrentFrame;
- mCurrentFrame = NULL;
- }
-
- return res;
-}
-
-status_t EmulatedCameraDevice::stopCapturing()
-{
- LOGV("%s", __FUNCTION__);
-
- Mutex::Autolock locker(&mObjectLock);
- /* Stop the camera. */
- const status_t res = stopDevice();
- if (res == NO_ERROR) {
- /* Release resources allocated for capturing. */
- if (mCurrentFrame != NULL) {
- delete[] mCurrentFrame;
- mCurrentFrame = NULL;
- }
- }
-
- return res;
-}
-
-status_t EmulatedCameraDevice::getCurrentFrame(void* buffer)
-{
- Mutex::Autolock locker(&mObjectLock);
-
- if (!isCapturing() || mCurrentFrame == NULL) {
- LOGE("%s is called on a device that is not in the capturing state",
- __FUNCTION__);
- return EINVAL;
- }
-
- memcpy(buffer, mCurrentFrame, mFrameBufferSize);
-
+ LOGV("%s: Allocated %p %d bytes for %d pixels in %.4s[%dx%d] frame",
+ __FUNCTION__, mCurrentFrame, mFrameBufferSize, mTotalPixels,
+ reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
return NO_ERROR;
}
-status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
+void EmulatedCameraDevice::commonStopDevice()
{
- Mutex::Autolock locker(&mObjectLock);
+ mFrameWidth = mFrameHeight = mTotalPixels = 0;
+ mPixelFormat = 0;
- if (!isCapturing() || mCurrentFrame == NULL) {
- LOGE("%s is called on a device that is not in the capturing state",
- __FUNCTION__);
- return EINVAL;
- }
-
- /* In emulation the framebuffer is never RGB. */
- switch (mPixelFormat) {
- case V4L2_PIX_FMT_YVU420:
- YV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
- return NO_ERROR;
-
- default:
- LOGE("%s: Unknown pixel format %d", __FUNCTION__, mPixelFormat);
- return EINVAL;
+ if (mCurrentFrame != NULL) {
+ delete[] mCurrentFrame;
+ mCurrentFrame = NULL;
}
}
@@ -182,7 +190,7 @@
* Worker thread management.
***************************************************************************/
-status_t EmulatedCameraDevice::startWorkerThread()
+status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
{
LOGV("%s", __FUNCTION__);
@@ -191,11 +199,9 @@
return EINVAL;
}
- const status_t ret = getWorkerThread()->startThread();
- LOGE_IF(ret != NO_ERROR, "%s: Unable to start worker thread: %d -> %s",
- __FUNCTION__, ret, strerror(ret));
-
- return ret;
+ const status_t res = getWorkerThread()->startThread(one_burst);
+ LOGE_IF(res != NO_ERROR, "%s: Unable to start worker thread", __FUNCTION__);
+ return res;
}
status_t EmulatedCameraDevice::stopWorkerThread()
@@ -207,14 +213,15 @@
return EINVAL;
}
- getWorkerThread()->stopThread();
-
- return NO_ERROR;
+ const status_t res = getWorkerThread()->stopThread();
+ LOGE_IF(res != NO_ERROR, "%s: Unable to stop worker thread", __FUNCTION__);
+ return res;
}
bool EmulatedCameraDevice::inWorkerThread()
{
- /* This will end the thread loop, and will terminate the thread. */
+ /* This will end the thread loop, and will terminate the thread. Derived
+ * classes must override this method. */
return false;
}
@@ -268,10 +275,10 @@
LOGV("Emulated camera device's worker thread has been stopped.");
} else {
LOGE("%s: requestExitAndWait failed: %d -> %s",
- __FUNCTION__, res, strerror(res));
+ __FUNCTION__, res, strerror(-res));
}
} else {
- LOGE("%s: Unable to send THREAD_STOP: %d -> %s",
+ LOGE("%s: Unable to send THREAD_STOP message: %d -> %s",
__FUNCTION__, errno, strerror(errno));
res = errno ? errno : EINVAL;
}
diff --git a/tools/emulator/system/camera/EmulatedCameraDevice.h b/tools/emulator/system/camera/EmulatedCameraDevice.h
index b73b863..88e2dd2 100755
--- a/tools/emulator/system/camera/EmulatedCameraDevice.h
+++ b/tools/emulator/system/camera/EmulatedCameraDevice.h
@@ -69,28 +69,30 @@
* NO_ERROR on success, or an appropriate error status. If this method is
* called for already disconnected, or uninitialized instance of this class,
* a successful status must be returned from this method. If this method is
- * called for an instance that is in "capturing" state, this method must
+ * called for an instance that is in the "started" state, this method must
* return a failure.
*/
virtual status_t disconnectDevice() = 0;
-protected:
- /* Starts capturing frames from the camera device.
- *
- * Typically, this method initializes the camera device with the settings
- * requested by the framework through the camera HAL, and starts a worker
- * thread that will listen to the physical device for available frames. When
- * new frame becomes available, it will be cached in current_framebuffer_,
- * and the containing emulated camera object will be notified via call to
- * its onNextFrameAvailable method. This method must be called on a
- * connected instance of this class. If it is called on a disconnected
- * instance, this method must return a failure.
+ /* Starts the camera device.
+ * This method tells the camera device to start capturing frames of the given
+ * dimensions for the given pixel format. Note that this method doesn't start
+ * the delivery of the captured frames to the emulated camera. Call
+ * startDeliveringFrames method to start delivering frames. This method must
+ * be called on a connected instance of this class. If it is called on a
+ * disconnected instance, this method must return a failure.
+ * Param:
+ * width, height - Frame dimensions to use when capturing video frames.
+ * pix_fmt - Pixel format to use when capturing video frames.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
- virtual status_t startDevice() = 0;
+ virtual status_t startDevice(int width, int height, uint32_t pix_fmt) = 0;
- /* Stops capturing frames from the camera device.
+ /* Stops the camera device.
+ * This method tells the camera device to stop capturing frames. Note that
+ * this method doesn't stop delivering frames to the emulated camera. Always
+ * call stopDeliveringFrames prior to calling this method.
* Return:
* NO_ERROR on success, or an appropriate error status. If this method is
* called for an object that is not capturing frames, or is disconnected,
@@ -114,79 +116,95 @@
*/
virtual status_t Initialize();
- /* Starts capturing frames from the camera device.
- *
- * Typically, this method caches desired frame parameters, and calls
- * startDevice method to start capturing video frames from the camera
- * device. This method must be called on a connected instance of this class.
- * If it is called on a disconnected instance, this method must return a
- * failure.
+ /* Starts delivering frames captured from the camera device.
+ * This method will start the worker thread that would be pulling frames from
+ * the camera device, and will deliver the pulled frames back to the emulated
+ * camera via onNextFrameAvailable callback. This method must be called on a
+ * connected instance of this class with a started camera device. If it is
+ * called on a disconnected instance, or camera device has not been started,
+ * this method must return a failure.
+ * Param:
+ * one_burst - Controls how many frames should be delivered. If this
+ * parameter is 'true', only one captured frame will be delivered to the
+ * emulated camera. If this parameter is 'false', frames will keep
+ * coming until stopDeliveringFrames method is called. Typically, this
+ * parameter is set to 'true' only in order to obtain a single frame
+ * that will be used as a "picture" in takePicture method of the
+ * emulated camera.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
- virtual status_t startCapturing(int width, int height, uint32_t pix_fmt);
+ virtual status_t startDeliveringFrames(bool one_burst);
- /* Stops capturing frames from the camera device.
- *
- * Typically, this method calls stopDevice method of this class, and
- * uninitializes frame properties, saved in StartCapturing method of this
- * class.
- * This method must be called on a connected instance of this class. If it
- * is called on a disconnected instance, this method must return a failure.
+ /* Stops delivering frames captured from the camera device.
+ * This method will stop the worker thread started by startDeliveringFrames.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
- virtual status_t stopCapturing();
+ virtual status_t stopDeliveringFrames();
- /* Gets current fame into provided buffer.
- * Typically, this method is called by the emulated camera (HAL) in response
- * to a callback from the emulated camera device that gets invoked when new
- * captured frame is available.
- * This method must be called on an instance that is capturing frames from
- * the physical device. If this method is called on an instance that is not
- * capturing frames from the physical device, it must return a failure.
+ /* Gets current framebuffer, converted into preview frame format.
+ * This method must be called on a connected instance of this class with a
+ * started camera device. If it is called on a disconnected instance, or
+ * camera device has not been started, this method must return a failure.
+ * Note that this method should be called only after at least one frame has
+ * been captured and delivered. Otherwise it will return garbage in the
+ * preview frame buffer. Typically, this method shuld be called from
+ * onNextFrameAvailable callback.
* Param:
- * buffer - A buffer where to return the frame. Note that the buffer must be
- * large enough to contain the entire frame, as defined by frame's width,
- * height, and pixel format that are current for the camera device.
- */
- virtual status_t getCurrentFrame(void* buffer);
-
- /* Gets current preview fame into provided buffer.
- * Param:
- * buffer - A buffer where to return the preview frame. Note that the buffer
- * must be large enough to contain the entire preview frame, as defined
- * by frame's width, height, and preview pixel format. Note also, that
- * due to the the limitations of the camera framework in emulator, the
- * preview frame is always formatted with RGBA8888.
+ * buffer - Buffer, large enough to contain the entire preview frame.
+ * Return:
+ * NO_ERROR on success, or an appropriate error status.
*/
virtual status_t getCurrentPreviewFrame(void* buffer);
- /* Gets width of the frame obtained from the physical device. */
+ /* Gets width of the frame obtained from the physical device.
+ * Return:
+ * Width of the frame obtained from the physical device. Note that value
+ * returned from this method is valid only in case if camera device has been
+ * started.
+ */
inline int getFrameWidth() const
{
+ LOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mFrameWidth;
}
- /* Gets height of the frame obtained from the physical device. */
+ /* Gets height of the frame obtained from the physical device.
+ * Return:
+ * Height of the frame obtained from the physical device. Note that value
+ * returned from this method is valid only in case if camera device has been
+ * started.
+ */
inline int getFrameHeight() const
{
+ LOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mFrameHeight;
}
- /* Gets byte size of the current frame buffer. */
+ /* Gets byte size of the current frame buffer.
+ * Return:
+ * Byte size of the frame buffer. Note that value returned from this method
+ * is valid only in case if camera device has been started.
+ */
inline size_t getFrameBufferSize() const
{
+ LOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mFrameBufferSize;
}
- /* Gets number of pixels in the current frame buffer. */
+ /* Gets number of pixels in the current frame buffer.
+ * Return:
+ * Number of pixels in the frame buffer. Note that value returned from this
+ * method is valid only in case if camera device has been started.
+ */
inline int getPixelNum() const
{
+ LOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mTotalPixels;
}
- /* Gets pixel format of the frame that physical device streams.
+ /* Gets pixel format of the frame that camera device streams to this class.
* Throughout camera framework, there are three different forms of pixel
* format representation:
* - Original format, as reported by the actual camera device. Values for
@@ -198,17 +216,17 @@
* pixel format in the original form. And that's the pixel format
* representation that will be returned from this method. HAL components will
* need to translate value returned from this method to the appropriate form.
- * This method must be called only on connected instance of this class, since
- * it's applicable only when physical device is ready to stream frames. If
- * this method is called on an instance that is not connected, it must return
- * a failure.
+ * This method must be called only on started instance of this class, since
+ * it's applicable only when camera device is ready to stream frames.
* Param:
* pix_fmt - Upon success contains the original pixel format.
* Return:
- * Current framebuffer's pixel format.
+ * Current framebuffer's pixel format. Note that value returned from this
+ * method is valid only in case if camera device has been started.
*/
inline uint32_t getOriginalPixelFormat() const
{
+ LOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
return mPixelFormat;
}
@@ -222,16 +240,32 @@
return mWorkerThread.get() != NULL && mState != ECDS_CONSTRUCTED;
}
inline bool isConnected() const {
- /* Instance is connected when it is initialized and its status is either
- * "connected", or "capturing". */
- return isInitialized() &&
- (mState == ECDS_CONNECTED || mState == ECDS_CAPTURING);
+ /* Instance is connected when its status is either"connected", or
+ * "started". */
+ return mState == ECDS_CONNECTED || mState == ECDS_STARTED;
}
- inline bool isCapturing() const {
- return isInitialized() && mState == ECDS_CAPTURING;
+ inline bool isStarted() const {
+ return mState == ECDS_STARTED;
}
/****************************************************************************
+ * Emulated camera device private API
+ ***************************************************************************/
+protected:
+ /* Performs common validation and calculation of startDevice parameters.
+ * Param:
+ * width, height, pix_fmt - Parameters passed to the startDevice method.
+ * Return:
+ * NO_ERROR on success, or an appropriate error status.
+ */
+ virtual status_t commonStartDevice(int width, int height, uint32_t pix_fmt);
+
+ /* Performs common cleanup on stopDevice.
+ * This method will undo what commonStartDevice had done.
+ */
+ virtual void commonStopDevice();
+
+ /****************************************************************************
* Worker thread management.
* Typicaly when emulated camera device starts capturing frames from the
* actual device, it does that in a worker thread created in StartCapturing,
@@ -242,15 +276,22 @@
protected:
/* Starts the worker thread.
- * Typically, worker thread is started from StartCamera method of this
- * class.
+ * Typically, worker thread is started from startDeliveringFrames method of
+ * this class.
+ * Param:
+ * one_burst - Controls how many times thread loop should run. If this
+ * parameter is 'true', thread routine will run only once If this
+ * parameter is 'false', thread routine will run until stopWorkerThread
+ * method is called. See startDeliveringFrames for more info.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
- virtual status_t startWorkerThread();
+ virtual status_t startWorkerThread(bool one_burst);
/* Stops the worker thread.
* Note that this method will always wait for the worker thread to terminate.
+ * Typically, worker thread is started from stopDeliveringFrames method of
+ * this class.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
@@ -260,7 +301,7 @@
* In the default implementation of the worker thread routine we simply
* return 'false' forcing the thread loop to exit, and the thread to
* terminate. Derived class should override that method to provide there the
- * actual frame capturing functionality.
+ * actual frame delivery.
* Return:
* true To continue thread loop (this method will be called again), or false
* to exit the thread loop and to terminate the thread.
@@ -298,9 +339,19 @@
}
}
- /* Starts the thread */
- inline status_t startThread()
+ /* Starts the thread
+ * Param:
+ * one_burst - Controls how many times thread loop should run. If
+ * this parameter is 'true', thread routine will run only once
+ * If this parameter is 'false', thread routine will run until
+ * stopThread method is called. See startWorkerThread for more
+ * info.
+ * Return:
+ * NO_ERROR on success, or an appropriate error status.
+ */
+ inline status_t startThread(bool one_burst)
{
+ mOneBurst = one_burst;
return run(NULL, ANDROID_PRIORITY_URGENT_DISPLAY, 0);
}
@@ -343,10 +394,15 @@
private:
/* Implements abstract method of the base Thread class. */
- inline bool threadLoop()
+ bool threadLoop()
{
/* Simply dispatch the call to the containing camera device. */
- return mCameraDevice->inWorkerThread();
+ if (mCameraDevice->inWorkerThread()) {
+ /* Respect "one burst" parameter (see startThread). */
+ return !mOneBurst;
+ } else {
+ return false;
+ }
}
/* Containing camera device object. */
@@ -358,6 +414,10 @@
/* FD that thread uses to receive control messages. */
int mControlFD;
+ /* Controls number of times the thread loop runs.
+ * See startThread for more information. */
+ bool mOneBurst;
+
/* Enumerates control messages that can be sent into the thread. */
enum ControlMessage {
/* Stop the thread. */
@@ -391,12 +451,6 @@
/* Framebuffer containing the current frame. */
uint8_t* mCurrentFrame;
- /* U panel inside the framebuffer. */
- uint8_t* mFrameU;
-
- /* V panel inside the framebuffer. */
- uint8_t* mFrameV;
-
/*
* Framebuffer properties.
*/
@@ -426,8 +480,8 @@
ECDS_INITIALIZED,
/* Object has been connected to the physical device. */
ECDS_CONNECTED,
- /* Frames are being captured. */
- ECDS_CAPTURING,
+ /* Camera device has been started. */
+ ECDS_STARTED,
};
/* Object state. */
diff --git a/tools/emulator/system/camera/EmulatedCameraFactory.cpp b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
index d6ba9b4..5c5c5de 100755
--- a/tools/emulator/system/camera/EmulatedCameraFactory.cpp
+++ b/tools/emulator/system/camera/EmulatedCameraFactory.cpp
@@ -22,6 +22,7 @@
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_Factory"
#include <cutils/log.h>
+#include <cutils/properties.h>
#include "EmulatedQemuCamera.h"
#include "EmulatedFakeCamera.h"
#include "EmulatedCameraFactory.h"
@@ -43,9 +44,8 @@
mConstructedOK(false)
{
- /* If qemu camera emulation is on, try to connect to the factory service in
- * the emulator. */
- if (isQemuCameraEmulationOn() && mQemuClient.connectClient(NULL) == NO_ERROR) {
+ /* Connect to the factory service in the emulator, and create Qemu cameras. */
+ if (mQemuClient.connectClient(NULL) == NO_ERROR) {
/* Connection has succeeded. Create emulated cameras for each camera
* device, reported by the service. */
createQemuCameras();
@@ -82,6 +82,8 @@
mFakeCameraID = -1;
LOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
}
+ } else {
+ LOGD("Fake camera emulation is disabled.");
}
LOGV("%d cameras are being emulated. Fake camera ID is %d",
@@ -121,7 +123,7 @@
return -EINVAL;
}
- if (camera_id >= getEmulatedCameraNum()) {
+ if (camera_id < 0 || camera_id >= getEmulatedCameraNum()) {
LOGE("%s: Camera id %d is out of bounds (%d)",
__FUNCTION__, camera_id, getEmulatedCameraNum());
return -EINVAL;
@@ -139,7 +141,7 @@
return -EINVAL;
}
- if (camera_id >= getEmulatedCameraNum()) {
+ if (camera_id < 0 || camera_id >= getEmulatedCameraNum()) {
LOGE("%s: Camera id %d is out of bounds (%d)",
__FUNCTION__, camera_id, getEmulatedCameraNum());
return -EINVAL;
@@ -197,6 +199,8 @@
static const char lListNameToken[] = "name=";
/* Frame dimensions token. */
static const char lListDimsToken[] = "framedims=";
+/* Facing direction token. */
+static const char lListDirToken[] = "dir=";
void EmulatedCameraFactory::createQemuCameras()
{
@@ -252,13 +256,15 @@
next_entry++; // Start of the next entry.
}
- /* Find 'name', and 'framedims' tokens that are required here. */
+ /* Find 'name', 'framedims', and 'dir' tokens that are required here. */
char* name_start = strstr(cur_entry, lListNameToken);
char* dim_start = strstr(cur_entry, lListDimsToken);
- if (name_start != NULL && dim_start != NULL) {
+ char* dir_start = strstr(cur_entry, lListDirToken);
+ if (name_start != NULL && dim_start != NULL && dir_start != NULL) {
/* Advance to the token values. */
name_start += strlen(lListNameToken);
dim_start += strlen(lListDimsToken);
+ dir_start += strlen(lListDirToken);
/* Terminate token values with zero. */
char* s = strchr(name_start, ' ');
@@ -269,12 +275,16 @@
if (s != NULL) {
*s = '\0';
}
+ s = strchr(dir_start, ' ');
+ if (s != NULL) {
+ *s = '\0';
+ }
/* Create and initialize qemu camera. */
EmulatedQemuCamera* qemu_cam =
new EmulatedQemuCamera(index, &HAL_MODULE_INFO_SYM.common);
if (NULL != qemu_cam) {
- res = qemu_cam->Initialize(name_start, dim_start);
+ res = qemu_cam->Initialize(name_start, dim_start, dir_start);
if (res == NO_ERROR) {
mEmulatedCameras[index] = qemu_cam;
index++;
@@ -295,16 +305,17 @@
mEmulatedCameraNum = index;
}
-bool EmulatedCameraFactory::isQemuCameraEmulationOn()
-{
- /* TODO: Have a boot property that controls that! */
- return true;
-}
-
bool EmulatedCameraFactory::isFakeCameraEmulationOn()
{
- /* TODO: Have a boot property that controls that! */
- return true;
+ /* Defined by 'qemu.sf.fake_camera' boot property: If property is there
+ * and contains 'off', fake camera emulation is disabled. */
+ char prop[PROPERTY_VALUE_MAX];
+ if (property_get("qemu.sf.fake_camera", prop, NULL) <= 0 ||
+ strcmp(prop, "off")) {
+ return true;
+ } else {
+ return false;
+ }
}
/********************************************************************************
diff --git a/tools/emulator/system/camera/EmulatedCameraFactory.h b/tools/emulator/system/camera/EmulatedCameraFactory.h
index 1e40d82..19745a3 100755
--- a/tools/emulator/system/camera/EmulatedCameraFactory.h
+++ b/tools/emulator/system/camera/EmulatedCameraFactory.h
@@ -94,11 +94,6 @@
***************************************************************************/
public:
- /* Gets fake camera facing. */
- int getFakeCameraFacing() {
- /* TODO: Have a boot property that controls that. */
- return CAMERA_FACING_BACK;
- }
/* Gets fake camera orientation. */
int getFakeCameraOrientation() {
@@ -106,12 +101,6 @@
return 90;
}
- /* Gets qemu camera facing. */
- int getQemuCameraFacing() {
- /* TODO: Have a boot property that controls that. */
- return CAMERA_FACING_FRONT;
- }
-
/* Gets qemu camera orientation. */
int getQemuCameraOrientation() {
/* TODO: Have a boot property that controls that. */
@@ -142,9 +131,6 @@
*/
void createQemuCameras();
- /* Checks if qemu camera emulation is on. */
- bool isQemuCameraEmulationOn();
-
/* Checks if fake camera emulation is on. */
bool isFakeCameraEmulationOn();
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera.cpp b/tools/emulator/system/camera/EmulatedFakeCamera.cpp
index 84828cf..d82fd78 100755
--- a/tools/emulator/system/camera/EmulatedFakeCamera.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera.cpp
@@ -22,6 +22,7 @@
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera_FakeCamera"
#include <cutils/log.h>
+#include <cutils/properties.h>
#include "EmulatedFakeCamera.h"
#include "EmulatedCameraFactory.h"
@@ -48,11 +49,13 @@
return res;
}
- const char* facing = EmulatedCamera::FACING_BACK;
- if (gEmulatedCameraFactory.getFakeCameraOrientation() == CAMERA_FACING_FRONT) {
- facing = EmulatedCamera::FACING_FRONT;
- }
+ /* Fake camera facing is defined by the qemu.sf.fake_camera boot property. */
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("qemu.sf.fake_camera", prop, EmulatedCamera::FACING_BACK);
+ const char* facing = prop;
+
mParameters.set(EmulatedCamera::FACING_KEY, facing);
+ LOGD("%s: Fake camera is facing %s", __FUNCTION__, facing);
mParameters.set(EmulatedCamera::ORIENTATION_KEY,
gEmulatedCameraFactory.getFakeCameraOrientation());
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera.h b/tools/emulator/system/camera/EmulatedFakeCamera.h
index f8a8099..3debe9e 100755
--- a/tools/emulator/system/camera/EmulatedFakeCamera.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera.h
@@ -45,11 +45,7 @@
***************************************************************************/
public:
- /* Initializes EmulatedFakeCamera instance.
- * The contained EmulatedFakeCameraDevice will be initialized in this method.
- * Return:
- * NO_ERROR on success, or an appropriate error statsu on failure.
- */
+ /* Initializes EmulatedFakeCamera instance. */
status_t Initialize();
/****************************************************************************
diff --git a/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp b/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp
index a6c97c6..53a5b1b 100755
--- a/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCameraDevice.cpp
@@ -34,9 +34,15 @@
mRedYUV(kRed8),
mGreenYUV(kGreen8),
mBlueYUV(kBlue8),
+ mLastRedrawn(0),
mCheckX(0),
mCheckY(0),
mCcounter(0)
+#if EFCD_ROTATE_FRAME
+ , mLastRotatedAt(0),
+ mCurrentFrameType(0),
+ mCurrentColor(&mWhiteYUV)
+#endif // EFCD_ROTATE_FRAME
{
}
@@ -62,6 +68,7 @@
return NO_ERROR;
}
+ /* There is no device to connect to. */
mState = ECDS_CONNECTED;
return NO_ERROR;
@@ -76,57 +83,99 @@
LOGW("%s: Fake camera device is already disconnected.", __FUNCTION__);
return NO_ERROR;
}
- if (isCapturing()) {
- LOGE("%s: Cannot disconnect while in the capturing state.", __FUNCTION__);
+ if (isStarted()) {
+ LOGE("%s: Cannot disconnect from the started device.", __FUNCTION__);
return EINVAL;
}
+ /* There is no device to disconnect from. */
mState = ECDS_INITIALIZED;
return NO_ERROR;
}
-status_t EmulatedFakeCameraDevice::startDevice()
+status_t EmulatedFakeCameraDevice::startDevice(int width,
+ int height,
+ uint32_t pix_fmt)
{
LOGV("%s", __FUNCTION__);
+ Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
LOGE("%s: Fake camera device is not connected.", __FUNCTION__);
return EINVAL;
}
- if (isCapturing()) {
- LOGW("%s: Fake camera device is already capturing.", __FUNCTION__);
- return NO_ERROR;
+ if (isStarted()) {
+ LOGE("%s: Fake camera device is already started.", __FUNCTION__);
+ return EINVAL;
}
- /* Used in calculating U/V position when drawing the square. */
- mHalfWidth = mFrameWidth / 2;
+ /* Initialize the base class. */
+ const status_t res =
+ EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
+ if (res == NO_ERROR) {
+ /* Calculate U/V panes inside the framebuffer. */
+ switch (mPixelFormat) {
+ case V4L2_PIX_FMT_YVU420:
+ mFrameV = mCurrentFrame + mTotalPixels;
+ mFrameU = mFrameU + mTotalPixels / 4;
+ mUVStep = 1;
+ mUVTotalNum = mTotalPixels / 4;
+ break;
- /* Just start the worker thread: there is no real device to deal with. */
- const status_t ret = startWorkerThread();
- if (ret == NO_ERROR) {
- mState = ECDS_CAPTURING;
+ case V4L2_PIX_FMT_YUV420:
+ mFrameU = mCurrentFrame + mTotalPixels;
+ mFrameV = mFrameU + mTotalPixels / 4;
+ mUVStep = 1;
+ mUVTotalNum = mTotalPixels / 4;
+ break;
+
+ case V4L2_PIX_FMT_NV21:
+ /* Interleaved UV pane, V first. */
+ mFrameV = mCurrentFrame + mTotalPixels;
+ mFrameU = mFrameV + 1;
+ mUVStep = 2;
+ mUVTotalNum = mTotalPixels / 4;
+ break;
+
+ case V4L2_PIX_FMT_NV12:
+ /* Interleaved UV pane, U first. */
+ mFrameU = mCurrentFrame + mTotalPixels;
+ mFrameV = mFrameU + 1;
+ mUVStep = 2;
+ mUVTotalNum = mTotalPixels / 4;
+ break;
+
+ default:
+ LOGE("%s: Unknown pixel format %.4s", __FUNCTION__,
+ reinterpret_cast<const char*>(&mPixelFormat));
+ return EINVAL;
+ }
+ /* Number of items in a single row inside U/V panes. */
+ mUVInRow = (width / 2) * mUVStep;
+ mState = ECDS_STARTED;
+ } else {
+ LOGE("%s: commonStartDevice failed", __FUNCTION__);
}
- return ret;
+ return res;
}
status_t EmulatedFakeCameraDevice::stopDevice()
{
LOGV("%s", __FUNCTION__);
- if (!isCapturing()) {
- LOGW("%s: Fake camera device is not capturing.", __FUNCTION__);
+ Mutex::Autolock locker(&mObjectLock);
+ if (!isStarted()) {
+ LOGW("%s: Fake camera device is not started.", __FUNCTION__);
return NO_ERROR;
}
- /* Just stop the worker thread: there is no real device to deal with. */
- const status_t ret = stopWorkerThread();
- if (ret == NO_ERROR) {
- mState = ECDS_CONNECTED;
- }
+ mFrameU = mFrameV = NULL;
+ EmulatedCameraDevice::commonStopDevice();
+ mState = ECDS_CONNECTED;
- return ret;
+ return NO_ERROR;
}
/****************************************************************************
@@ -144,23 +193,31 @@
}
/* Lets see if we need to generate a new frame. */
- if ((systemTime(SYSTEM_TIME_MONOTONIC) - mCurFrameTimestamp) >= mRedrawAfter) {
+ if ((systemTime(SYSTEM_TIME_MONOTONIC) - mLastRedrawn) >= mRedrawAfter) {
/*
* Time to generate a new frame.
*/
+#if EFCD_ROTATE_FRAME
+ const int frame_type = rotateFrame();
+ switch (frame_type) {
+ case 0:
+ drawCheckerboard();
+ break;
+ case 1:
+ drawStripes();
+ break;
+ case 2:
+ drawSolid(mCurrentColor);
+ break;
+ }
+#else
/* Draw the checker board. */
drawCheckerboard();
- /* Run the square. */
- int x = ((mCcounter * 3) & 255);
- if(x > 128) x = 255 - x;
- int y = ((mCcounter * 5) & 255);
- if(y > 128) y = 255 - y;
- const int size = mFrameWidth / 10;
- drawSquare(x * size / 32, y * size / 32, (size * 5) >> 1,
- (mCcounter & 0x100) ? &mRedYUV : &mGreenYUV);
- mCcounter++;
+#endif // EFCD_ROTATE_FRAME
+
+ mLastRedrawn = systemTime(SYSTEM_TIME_MONOTONIC);
}
/* Timestamp the current frame, and notify the camera HAL about new frame. */
@@ -202,7 +259,7 @@
mWhiteYUV.get(Y, U, V);
}
Y[1] = *Y;
- Y += 2; U++; V++;
+ Y += 2; U += mUVStep; V += mUVStep;
countx += 2;
if(countx >= size) {
countx = 0;
@@ -223,6 +280,16 @@
}
mCheckX += 3;
mCheckY++;
+
+ /* Run the square. */
+ int sqx = ((mCcounter * 3) & 255);
+ if(sqx > 128) sqx = 255 - sqx;
+ int sqy = ((mCcounter * 5) & 255);
+ if(sqy > 128) sqy = 255 - sqy;
+ const int sqsize = mFrameWidth / 10;
+ drawSquare(sqx * sqsize / 32, sqy * sqsize / 32, (sqsize * 5) >> 1,
+ (mCcounter & 0x100) ? &mRedYUV : &mGreenYUV);
+ mCcounter++;
}
void EmulatedFakeCameraDevice::drawSquare(int x,
@@ -230,24 +297,115 @@
int size,
const YUVPixel* color)
{
- const int half_x = x / 2;
- const int square_xstop = min(mFrameWidth, x+size);
- const int square_ystop = min(mFrameHeight, y+size);
+ const int square_xstop = min(mFrameWidth, x + size);
+ const int square_ystop = min(mFrameHeight, y + size);
uint8_t* Y_pos = mCurrentFrame + y * mFrameWidth + x;
// Draw the square.
for (; y < square_ystop; y++) {
- const int iUV = (y / 2) * mHalfWidth + half_x;
+ const int iUV = (y / 2) * mUVInRow + (x / 2) * mUVStep;
uint8_t* sqU = mFrameU + iUV;
uint8_t* sqV = mFrameV + iUV;
uint8_t* sqY = Y_pos;
for (int i = x; i < square_xstop; i += 2) {
color->get(sqY, sqU, sqV);
sqY[1] = *sqY;
- sqY += 2; sqU++; sqV++;
+ sqY += 2; sqU += mUVStep; sqV += mUVStep;
}
Y_pos += mFrameWidth;
}
}
+#if EFCD_ROTATE_FRAME
+
+void EmulatedFakeCameraDevice::drawSolid(YUVPixel* color)
+{
+ /* All Ys are the same. */
+ memset(mCurrentFrame, color->Y, mTotalPixels);
+
+ /* Fill U, and V panes. */
+ uint8_t* U = mFrameU;
+ uint8_t* V = mFrameV;
+ for (int k = 0; k < mUVTotalNum; k++, U += mUVStep, V += mUVStep) {
+ *U = color->U;
+ *V = color->V;
+ }
+}
+
+void EmulatedFakeCameraDevice::drawStripes()
+{
+ /* Divide frame into 4 stripes. */
+ const int change_color_at = mFrameHeight / 4;
+ const int each_in_row = mUVInRow / mUVStep;
+ uint8_t* pY = mCurrentFrame;
+ for (int y = 0; y < mFrameHeight; y++, pY += mFrameWidth) {
+ /* Select the color. */
+ YUVPixel* color;
+ const int color_index = y / change_color_at;
+ if (color_index == 0) {
+ /* White stripe on top. */
+ color = &mWhiteYUV;
+ } else if (color_index == 1) {
+ /* Then the red stripe. */
+ color = &mRedYUV;
+ } else if (color_index == 2) {
+ /* Then the green stripe. */
+ color = &mGreenYUV;
+ } else {
+ /* And the blue stripe at the bottom. */
+ color = &mBlueYUV;
+ }
+
+ /* All Ys at the row are the same. */
+ memset(pY, color->Y, mFrameWidth);
+
+ /* Offset of the current row inside U/V panes. */
+ const int uv_off = (y / 2) * mUVInRow;
+ /* Fill U, and V panes. */
+ uint8_t* U = mFrameU + uv_off;
+ uint8_t* V = mFrameV + uv_off;
+ for (int k = 0; k < each_in_row; k++, U += mUVStep, V += mUVStep) {
+ *U = color->U;
+ *V = color->V;
+ }
+ }
+}
+
+int EmulatedFakeCameraDevice::rotateFrame()
+{
+ if ((systemTime(SYSTEM_TIME_MONOTONIC) - mLastRotatedAt) >= mRotateFreq) {
+ mLastRotatedAt = systemTime(SYSTEM_TIME_MONOTONIC);
+ mCurrentFrameType++;
+ if (mCurrentFrameType > 2) {
+ mCurrentFrameType = 0;
+ }
+ if (mCurrentFrameType == 2) {
+ LOGD("********** Rotated to the SOLID COLOR frame **********");
+ /* Solid color: lets rotate color too. */
+ if (mCurrentColor == &mWhiteYUV) {
+ LOGD("----- Painting a solid RED frame -----");
+ mCurrentColor = &mRedYUV;
+ } else if (mCurrentColor == &mRedYUV) {
+ LOGD("----- Painting a solid GREEN frame -----");
+ mCurrentColor = &mGreenYUV;
+ } else if (mCurrentColor == &mGreenYUV) {
+ LOGD("----- Painting a solid BLUE frame -----");
+ mCurrentColor = &mBlueYUV;
+ } else {
+ /* Back to white. */
+ LOGD("----- Painting a solid WHITE frame -----");
+ mCurrentColor = &mWhiteYUV;
+ }
+ } else if (mCurrentFrameType == 0) {
+ LOGD("********** Rotated to the CHECKERBOARD frame **********");
+ } else {
+ LOGD("********** Rotated to the STRIPED frame **********");
+ }
+ }
+
+ return mCurrentFrameType;
+}
+
+#endif // EFCD_ROTATE_FRAME
+
}; /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedFakeCameraDevice.h b/tools/emulator/system/camera/EmulatedFakeCameraDevice.h
index c9f13ea..f54127e 100755
--- a/tools/emulator/system/camera/EmulatedFakeCameraDevice.h
+++ b/tools/emulator/system/camera/EmulatedFakeCameraDevice.h
@@ -25,6 +25,15 @@
#include "Converters.h"
#include "EmulatedCameraDevice.h"
+/* This is used for debugging format / conversion issues. If EFCD_ROTATE_FRAME is
+ * set to 0, the frame content will be always the "checkerboard". Otherwise, if
+ * EFCD_ROTATE_FRAME is set to a non-zero value, the frame content will "rotate"
+ * from a "checkerboard" frame to a "white/red/green/blue stripes" frame, to a
+ * "white/red/green/blue" frame. Frame content rotation helps finding bugs in
+ * format conversions.
+ */
+#define EFCD_ROTATE_FRAME 1
+
namespace android {
class EmulatedFakeCamera;
@@ -62,19 +71,15 @@
*/
status_t disconnectDevice();
-protected:
- /* Starts capturing frames from the camera device.
- * Since there is no real device to control, this method simply starts the
- * worker thread, and changes the state.
- */
- status_t startDevice();
+ /* Starts the camera device. */
+ status_t startDevice(int width, int height, uint32_t pix_fmt);
- /* Stops capturing frames from the camera device.
- * Since there is no real device to control, this method simply stops the
- * worker thread, and changes the state.
- */
+ /* Stops the camera device. */
status_t stopDevice();
+ /* Gets current preview fame into provided buffer. */
+ status_t getPreviewFrame(void* buffer);
+
/***************************************************************************
* Worker thread management overrides.
* See declarations of these methods in EmulatedCameraDevice class for
@@ -83,8 +88,8 @@
protected:
/* Implementation of the worker thread routine.
- * This method simply sleeps for a period of time defined by FPS property of
- * the fake camera (simulating frame frequency), and then calls emulated
+ * This method simply sleeps for a period of time defined by the FPS property
+ * of the fake camera (simulating frame frequency), and then calls emulated
* camera's onNextFrameAvailable method.
*/
bool inWorkerThread();
@@ -105,6 +110,12 @@
*/
void drawSquare(int x, int y, int size, const YUVPixel* color);
+#if EFCD_ROTATE_FRAME
+ void drawSolid(YUVPixel* color);
+ void drawStripes();
+ int rotateFrame();
+#endif // EFCD_ROTATE_FRAME
+
/****************************************************************************
* Fake camera device data members
***************************************************************************/
@@ -120,14 +131,37 @@
YUVPixel mGreenYUV;
YUVPixel mBlueYUV;
+ /* Last time the frame has been redrawn. */
+ nsecs_t mLastRedrawn;
+
/*
- * Drawing related stuff
+ * Precalculated values related to U/V panes.
+ */
+
+ /* U pane inside the framebuffer. */
+ uint8_t* mFrameU;
+
+ /* V pane inside the framebuffer. */
+ uint8_t* mFrameV;
+
+ /* Defines byte distance between adjacent U, and V values. */
+ int mUVStep;
+
+ /* Defines number of Us and Vs in a row inside the U/V panes.
+ * Note that if U/V panes are interleaved, this value reflects the total
+ * number of both, Us and Vs in a single row in the interleaved UV pane. */
+ int mUVInRow;
+
+ /* Total number of each, U, and V elements in the framebuffer. */
+ int mUVTotalNum;
+
+ /*
+ * Checkerboard drawing related stuff
*/
int mCheckX;
int mCheckY;
int mCcounter;
- int mHalfWidth;
/* Emulated FPS (frames per second).
* We will emulate 50 FPS. */
@@ -136,6 +170,25 @@
/* Defines time (in nanoseconds) between redrawing the checker board.
* We will redraw the checker board every 15 milliseconds. */
static const nsecs_t mRedrawAfter = 15000000LL;
+
+#if EFCD_ROTATE_FRAME
+ /* Frame rotation frequency in nanosec (currently - 3 sec) */
+ static const nsecs_t mRotateFreq = 3000000000LL;
+
+ /* Last time the frame has rotated. */
+ nsecs_t mLastRotatedAt;
+
+ /* Type of the frame to display in the current rotation:
+ * 0 - Checkerboard.
+ * 1 - White/Red/Green/Blue horisontal stripes
+ * 2 - Solid color. */
+ int mCurrentFrameType;
+
+ /* Color to use to paint the solid color frame. Colors will rotate between
+ * white, red, gree, and blue each time rotation comes to the solid color
+ * frame. */
+ YUVPixel* mCurrentColor;
+#endif // EFCD_ROTATE_FRAME
};
}; /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedQemuCamera.cpp b/tools/emulator/system/camera/EmulatedQemuCamera.cpp
index 5c98ae6..611b6b5 100755
--- a/tools/emulator/system/camera/EmulatedQemuCamera.cpp
+++ b/tools/emulator/system/camera/EmulatedQemuCamera.cpp
@@ -42,8 +42,11 @@
***************************************************************************/
status_t EmulatedQemuCamera::Initialize(const char* device_name,
- const char* frame_dims)
+ const char* frame_dims,
+ const char* facing_dir)
{
+ LOGV("%s:\n Name=%s\n Facing '%s'\n Dimensions=%s",
+ __FUNCTION__, device_name, facing_dir, frame_dims);
/* Save dimensions. */
mFrameDims = frame_dims;
@@ -63,11 +66,7 @@
* Set customizable parameters.
*/
- const char* facing = EmulatedCamera::FACING_FRONT;
- if (gEmulatedCameraFactory.getQemuCameraOrientation() == CAMERA_FACING_BACK) {
- facing = EmulatedCamera::FACING_BACK;
- }
- mParameters.set(EmulatedCamera::FACING_KEY, facing);
+ mParameters.set(EmulatedCamera::FACING_KEY, facing_dir);
mParameters.set(EmulatedCamera::ORIENTATION_KEY,
gEmulatedCameraFactory.getQemuCameraOrientation());
mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, frame_dims);
@@ -84,7 +83,7 @@
if (c == NULL) {
strncpy(first_dim, frame_dims, sizeof(first_dim));
first_dim[sizeof(first_dim)-1] = '\0';
- } else if ((c - frame_dims) < sizeof(first_dim)) {
+ } else if (static_cast<size_t>(c - frame_dims) < sizeof(first_dim)) {
memcpy(first_dim, frame_dims, c - frame_dims);
first_dim[c - frame_dims] = '\0';
} else {
diff --git a/tools/emulator/system/camera/EmulatedQemuCamera.h b/tools/emulator/system/camera/EmulatedQemuCamera.h
index f00076b..1b826c7 100755
--- a/tools/emulator/system/camera/EmulatedQemuCamera.h
+++ b/tools/emulator/system/camera/EmulatedQemuCamera.h
@@ -42,12 +42,10 @@
**************************************************************************/
public:
- /* Initializes EmulatedQemuCamera instance.
- * The contained EmulatedQemuCameraDevice will be initialized in this method.
- * Return:
- * NO_ERROR on success, or an appropriate error status.
- */
- status_t Initialize(const char* device_name, const char* frame_dims);
+ /* Initializes EmulatedQemuCamera instance. */
+ status_t Initialize(const char* device_name,
+ const char* frame_dims,
+ const char* facing_dir);
/***************************************************************************
* EmulatedCamera abstract API implementation.
diff --git a/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp b/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp
index 1342b95..5117a84 100755
--- a/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp
+++ b/tools/emulator/system/camera/EmulatedQemuCameraDevice.cpp
@@ -82,16 +82,20 @@
return EINVAL;
}
if (isConnected()) {
- LOGW("%s: Qemu camera device is already connected.", __FUNCTION__);
+ LOGW("%s: Qemu camera device '%s' is already connected.",
+ __FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
+ /* Connect to the camera device via emulator. */
const status_t res = mQemuClient.queryConnect();
if (res == NO_ERROR) {
- LOGV("%s: Connected", __FUNCTION__);
+ LOGV("%s: Connected to device '%s'",
+ __FUNCTION__, (const char*)mDeviceName);
mState = ECDS_CONNECTED;
} else {
- LOGE("%s: Connection failed", __FUNCTION__);
+ LOGE("%s: Connection to device '%s' failed",
+ __FUNCTION__, (const char*)mDeviceName);
}
return res;
@@ -103,62 +107,76 @@
Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
- LOGW("%s: Qemu camera device is already disconnected.", __FUNCTION__);
+ LOGW("%s: Qemu camera device '%s' is already disconnected.",
+ __FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
- if (isCapturing()) {
- LOGE("%s: Cannot disconnect while in the capturing state.", __FUNCTION__);
+ if (isStarted()) {
+ LOGE("%s: Cannot disconnect from the started device '%s.",
+ __FUNCTION__, (const char*)mDeviceName);
return EINVAL;
}
+ /* Disconnect from the camera device via emulator. */
const status_t res = mQemuClient.queryDisconnect();
if (res == NO_ERROR) {
- LOGV("%s: Disonnected", __FUNCTION__);
+ LOGV("%s: Disonnected from device '%s'",
+ __FUNCTION__, (const char*)mDeviceName);
mState = ECDS_INITIALIZED;
} else {
- LOGE("%s: Disconnection failed", __FUNCTION__);
+ LOGE("%s: Disconnection from device '%s' failed",
+ __FUNCTION__, (const char*)mDeviceName);
}
return res;
}
-status_t EmulatedQemuCameraDevice::startDevice()
+status_t EmulatedQemuCameraDevice::startDevice(int width,
+ int height,
+ uint32_t pix_fmt)
{
LOGV("%s", __FUNCTION__);
+ Mutex::Autolock locker(&mObjectLock);
if (!isConnected()) {
- LOGE("%s: Qemu camera device is not connected.", __FUNCTION__);
+ LOGE("%s: Qemu camera device '%s' is not connected.",
+ __FUNCTION__, (const char*)mDeviceName);
return EINVAL;
}
- if (isCapturing()) {
- LOGW("%s: Qemu camera device is already capturing.", __FUNCTION__);
+ if (isStarted()) {
+ LOGW("%s: Qemu camera device '%s' is already started.",
+ __FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
+ status_t res = EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
+ if (res != NO_ERROR) {
+ LOGE("%s: commonStartDevice failed", __FUNCTION__);
+ return res;
+ }
+
/* Allocate preview frame buffer. */
/* TODO: Watch out for preview format changes! At this point we implement
* RGB32 only.*/
- mPreviewFrame = new uint16_t[mTotalPixels * 4];
+ mPreviewFrame = new uint32_t[mTotalPixels];
if (mPreviewFrame == NULL) {
LOGE("%s: Unable to allocate %d bytes for preview frame",
- __FUNCTION__, mTotalPixels * 4);
+ __FUNCTION__, mTotalPixels);
return ENOMEM;
}
- memset(mPreviewFrame, 0, mTotalPixels * 4);
/* Start the actual camera device. */
- status_t res =
- mQemuClient.queryStart(mPixelFormat, mFrameWidth, mFrameHeight);
+ res = mQemuClient.queryStart(mPixelFormat, mFrameWidth, mFrameHeight);
if (res == NO_ERROR) {
- /* Start the worker thread. */
- res = startWorkerThread();
- if (res == NO_ERROR) {
- mState = ECDS_CAPTURING;
- } else {
- mQemuClient.queryStop();
- }
+ LOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+ __FUNCTION__, (const char*)mDeviceName,
+ reinterpret_cast<const char*>(&mPixelFormat),
+ mFrameWidth, mFrameHeight);
+ mState = ECDS_STARTED;
} else {
- LOGE("%s: Start failed", __FUNCTION__);
+ LOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+ __FUNCTION__, (const char*)mDeviceName,
+ reinterpret_cast<const char*>(&pix_fmt), width, height);
}
return res;
@@ -168,28 +186,27 @@
{
LOGV("%s", __FUNCTION__);
- if (!isCapturing()) {
- LOGW("%s: Qemu camera device is not capturing.", __FUNCTION__);
+ Mutex::Autolock locker(&mObjectLock);
+ if (!isStarted()) {
+ LOGW("%s: Qemu camera device '%s' is not started.",
+ __FUNCTION__, (const char*)mDeviceName);
return NO_ERROR;
}
- /* Stop the worker thread first. */
- status_t res = stopWorkerThread();
+ /* Stop the actual camera device. */
+ status_t res = mQemuClient.queryStop();
if (res == NO_ERROR) {
- /* Stop the actual camera device. */
- res = mQemuClient.queryStop();
- if (res == NO_ERROR) {
- if (mPreviewFrame == NULL) {
- delete[] mPreviewFrame;
- mPreviewFrame = NULL;
- }
- mState = ECDS_CONNECTED;
- LOGV("%s: Stopped", __FUNCTION__);
- } else {
- LOGE("%s: Stop failed", __FUNCTION__);
+ if (mPreviewFrame == NULL) {
+ delete[] mPreviewFrame;
+ mPreviewFrame = NULL;
}
+ EmulatedCameraDevice::commonStopDevice();
+ mState = ECDS_CONNECTED;
+ LOGV("%s: Qemu camera device '%s' is stopped",
+ __FUNCTION__, (const char*)mDeviceName);
} else {
- LOGE("%s: Unable to stop worker thread", __FUNCTION__);
+ LOGE("%s: Unable to stop device '%s'",
+ __FUNCTION__, (const char*)mDeviceName);
}
return res;
diff --git a/tools/emulator/system/camera/EmulatedQemuCameraDevice.h b/tools/emulator/system/camera/EmulatedQemuCameraDevice.h
index 2030869..8ef562b 100755
--- a/tools/emulator/system/camera/EmulatedQemuCameraDevice.h
+++ b/tools/emulator/system/camera/EmulatedQemuCameraDevice.h
@@ -67,15 +67,16 @@
/* Disconnects from the camera device. */
status_t disconnectDevice();
-protected:
/* Starts capturing frames from the camera device. */
- status_t startDevice();
+ status_t startDevice(int width, int height, uint32_t pix_fmt);
/* Stops capturing frames from the camera device. */
status_t stopDevice();
/***************************************************************************
* EmulatedCameraDevice virtual overrides
+ * See declarations of these methods in EmulatedCameraDevice class for
+ * information on each of these methods.
**************************************************************************/
public:
@@ -108,7 +109,7 @@
String8 mDeviceName;
/* Current preview framebuffer. */
- uint16_t* mPreviewFrame;
+ uint32_t* mPreviewFrame;
/* Emulated FPS (frames per second).
* We will emulate 50 FPS. */
diff --git a/tools/emulator/system/camera/JpegCompressor.cpp b/tools/emulator/system/camera/JpegCompressor.cpp
new file mode 100644
index 0000000..0e538a1
--- /dev/null
+++ b/tools/emulator/system/camera/JpegCompressor.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Contains implementation of a class NV21JpegCompressor that encapsulates a
+ * converter between NV21, and JPEG formats.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "EmulatedCamera_JPEG"
+#include <cutils/log.h>
+#include "JpegCompressor.h"
+
+namespace android {
+
+NV21JpegCompressor::NV21JpegCompressor()
+ : Yuv420SpToJpegEncoder(mStrides)
+{
+}
+
+NV21JpegCompressor::~NV21JpegCompressor()
+{
+}
+
+/****************************************************************************
+ * Public API
+ ***************************************************************************/
+
+status_t NV21JpegCompressor::compressRawImage(const void* image,
+ int width,
+ int height,
+ int quality)
+{
+ LOGV("%s: %p[%dx%d]", __FUNCTION__, image, width, height);
+ void* pY = const_cast<void*>(image);
+ int offsets[2];
+ offsets[0] = 0;
+ offsets[1] = width * height;
+ mStrides[0] = width;
+ mStrides[1] = width;
+ if (encode(&mStream, pY, width, height, offsets, quality)) {
+ LOGV("%s: Compressed JPEG: %d[%dx%d] -> %d bytes",
+ __FUNCTION__, (width * height * 12) / 8, width, height, mStream.getOffset());
+ return NO_ERROR;
+ } else {
+ LOGE("%s: JPEG compression failed", __FUNCTION__);
+ return errno ? errno : EINVAL;
+ }
+}
+
+}; /* namespace android */
diff --git a/tools/emulator/system/camera/JpegCompressor.h b/tools/emulator/system/camera/JpegCompressor.h
new file mode 100644
index 0000000..1f97ae4
--- /dev/null
+++ b/tools/emulator/system/camera/JpegCompressor.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H
+#define HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H
+
+/*
+ * Contains declaration of a class NV21JpegCompressor that encapsulates a
+ * converter between YV21, and JPEG formats.
+ */
+
+#include <YuvToJpegEncoder.h>
+
+namespace android {
+
+/* Encapsulates a converter between YV12, and JPEG formats.
+ */
+class NV21JpegCompressor : protected Yuv420SpToJpegEncoder
+{
+public:
+ /* Constructs JpegCompressor instance. */
+ NV21JpegCompressor();
+ /* Destructs JpegCompressor instance. */
+ ~NV21JpegCompressor();
+
+ /****************************************************************************
+ * Public API
+ ***************************************************************************/
+
+public:
+ /* Compresses raw NV21 image into a JPEG.
+ * The compressed image will be saved in mStream member of this class. Use
+ * getCompressedSize method to obtain buffer size of the compressed image,
+ * and getCompressedImage to copy out the compressed image.
+ * Param:
+ * image - Raw NV21 image.
+ * width, height - Image dimensions.
+ * quality - JPEG quality.
+ * Return:
+ * NO_ERROR on success, or an appropriate error status.
+ *
+ */
+ status_t compressRawImage(const void* image,
+ int width,
+ int height,
+ int quality);
+
+ /* Get size of the compressed JPEG buffer.
+ * This method must be called only after a successful completion of
+ * compressRawImage call.
+ * Return:
+ * Size of the compressed JPEG buffer.
+ */
+ size_t getCompressedSize() const
+ {
+ return mStream.getOffset();
+ }
+
+ /* Copies out compressed JPEG buffer.
+ * This method must be called only after a successful completion of
+ * compressRawImage call.
+ * Param:
+ * buff - Buffer where to copy the JPEG. Must be large enough to contain the
+ * entire image.
+ */
+ void getCompressedImage(void* buff) const
+ {
+ mStream.copyTo(buff);
+ }
+
+ /****************************************************************************
+ * Class data
+ ***************************************************************************/
+
+protected:
+ /* Memory stream where converted JPEG is saved. */
+ SkDynamicMemoryWStream mStream;
+ /* Strides for Y (the first element), and UV (the second one) panes. */
+ int mStrides[2];
+};
+
+}; /* namespace android */
+
+#endif /* HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H */
diff --git a/tools/emulator/system/camera/PreviewWindow.cpp b/tools/emulator/system/camera/PreviewWindow.cpp
index c96a807..c4f6f80 100755
--- a/tools/emulator/system/camera/PreviewWindow.cpp
+++ b/tools/emulator/system/camera/PreviewWindow.cpp
@@ -180,6 +180,10 @@
grbuffer_mapper.unlock(*buffer);
}
+/***************************************************************************
+ * Private API
+ **************************************************************************/
+
bool PreviewWindow::adjustPreviewDimensions(EmulatedCameraDevice* camera_dev)
{
/* Match the cached frame dimensions against the actual ones. */
diff --git a/tools/emulator/system/camera/QemuClient.cpp b/tools/emulator/system/camera/QemuClient.cpp
index 49307bb..fd49585 100755
--- a/tools/emulator/system/camera/QemuClient.cpp
+++ b/tools/emulator/system/camera/QemuClient.cpp
@@ -19,7 +19,7 @@
* services in the emulator via qemu pipe.
*/
-#define LOG_NDEBUG 0
+#define LOG_NDEBUG 1
#define LOG_TAG "EmulatedCamera_QemuClient"
#include <cutils/log.h>
#include "EmulatedCamera.h"
@@ -40,7 +40,7 @@
QemuQuery::QemuQuery()
: mQuery(mQueryPrealloc),
- mQueryStatus(NO_ERROR),
+ mQueryDeliveryStatus(NO_ERROR),
mReplyBuffer(NULL),
mReplyData(NULL),
mReplySize(0),
@@ -52,26 +52,26 @@
QemuQuery::QemuQuery(const char* query_string)
: mQuery(mQueryPrealloc),
- mQueryStatus(NO_ERROR),
+ mQueryDeliveryStatus(NO_ERROR),
mReplyBuffer(NULL),
mReplyData(NULL),
mReplySize(0),
mReplyDataSize(0),
mReplyStatus(0)
{
- mQueryStatus = QemuQuery::createQuery(query_string, NULL);
+ mQueryDeliveryStatus = QemuQuery::createQuery(query_string, NULL);
}
QemuQuery::QemuQuery(const char* query_name, const char* query_param)
: mQuery(mQueryPrealloc),
- mQueryStatus(NO_ERROR),
+ mQueryDeliveryStatus(NO_ERROR),
mReplyBuffer(NULL),
mReplyData(NULL),
mReplySize(0),
mReplyDataSize(0),
mReplyStatus(0)
{
- mQueryStatus = QemuQuery::createQuery(query_name, query_param);
+ mQueryDeliveryStatus = QemuQuery::createQuery(query_name, query_param);
}
QemuQuery::~QemuQuery()
@@ -88,6 +88,7 @@
if (name == NULL || *name == '\0') {
LOGE("%s: NULL or an empty string is passed as query name.",
__FUNCTION__);
+ mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
@@ -101,7 +102,7 @@
if (mQuery == NULL) {
LOGE("%s: Unable to allocate %d bytes for query buffer",
__FUNCTION__, required);
- mQueryStatus = ENOMEM;
+ mQueryDeliveryStatus = ENOMEM;
return ENOMEM;
}
}
@@ -119,9 +120,9 @@
status_t QemuQuery::completeQuery(status_t status)
{
/* Save query completion status. */
- mQueryStatus = status;
- if (mQueryStatus != NO_ERROR) {
- return mQueryStatus;
+ mQueryDeliveryStatus = status;
+ if (mQueryDeliveryStatus != NO_ERROR) {
+ return mQueryDeliveryStatus;
}
/* Make sure reply buffer contains at least 'ok', or 'ko'.
@@ -131,7 +132,7 @@
* zero-terminated, and the terminator will be inculded in the reply. */
if (mReplyBuffer == NULL || mReplySize < 3) {
LOGE("%s: Invalid reply to the query", __FUNCTION__);
- mQueryStatus = EINVAL;
+ mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
@@ -142,7 +143,7 @@
mReplyStatus = 0;
} else {
LOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
- mQueryStatus = EINVAL;
+ mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
@@ -152,7 +153,7 @@
* with a ':' */
if (mReplyBuffer[2] != ':') {
LOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
- mQueryStatus = EINVAL;
+ mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
mReplyData = mReplyBuffer + 3;
@@ -162,7 +163,7 @@
* zero-terminator. */
if (mReplyBuffer[2] != '\0') {
LOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
- mQueryStatus = EINVAL;
+ mQueryDeliveryStatus = EINVAL;
return EINVAL;
}
}
@@ -176,14 +177,13 @@
delete[] mQuery;
}
mQuery = mQueryPrealloc;
- mQueryStatus = NO_ERROR;
+ mQueryDeliveryStatus = NO_ERROR;
if (mReplyBuffer != NULL) {
free(mReplyBuffer);
mReplyBuffer = NULL;
}
mReplyData = NULL;
- mReplySize = 0;
- mReplyDataSize = 0;
+ mReplySize = mReplyDataSize = 0;
mReplyStatus = 0;
}
@@ -270,9 +270,9 @@
if (written == data_size) {
return NO_ERROR;
} else {
- LOGE("%s: Error sending data via qemu pipe: %s",
+ LOGE("%s: Error sending data via qemu pipe: '%s'",
__FUNCTION__, strerror(errno));
- return errno != NO_ERROR ? errno : EIO;
+ return errno ? errno : EIO;
}
}
@@ -331,9 +331,9 @@
status_t QemuClient::doQuery(QemuQuery* query)
{
/* Make sure that query has been successfuly constructed. */
- if (query->mQueryStatus != NO_ERROR) {
+ if (query->mQueryDeliveryStatus != NO_ERROR) {
LOGE("%s: Query is invalid", __FUNCTION__);
- return query->mQueryStatus;
+ return query->mQueryDeliveryStatus;
}
LOGQ("Send query '%s'", query->mQuery);
@@ -357,7 +357,11 @@
}
/* Complete the query, and return its completion handling status. */
- return query->completeQuery(res);
+ const status_t res1 = query->completeQuery(res);
+ LOGE_IF(res1 != NO_ERROR && res1 != res,
+ "%s: Error %d in query '%s' completion",
+ __FUNCTION__, res1, query->mQuery);
+ return res1;
}
/****************************************************************************
@@ -385,8 +389,9 @@
LOGV("%s", __FUNCTION__);
QemuQuery query(mQueryList);
- doQuery(&query);
- if (!query.isQuerySucceeded()) {
+ if (doQuery(&query) || !query.isQuerySucceeded()) {
+ LOGE("%s: List cameras query failed: %s", __FUNCTION__,
+ query.mReplyData ? query.mReplyData : "No error message");
return query.getCompletionStatus();
}
@@ -445,9 +450,9 @@
QemuQuery query(mQueryConnect);
doQuery(&query);
const status_t res = query.getCompletionStatus();
- LOGE_IF(res != NO_ERROR, "%s failed: %s",
+ LOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
- "No error message");
+ "No error message");
return res;
}
@@ -458,9 +463,9 @@
QemuQuery query(mQueryDisconnect);
doQuery(&query);
const status_t res = query.getCompletionStatus();
- LOGE_IF(res != NO_ERROR, "%s failed: %s",
+ LOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
- "No error message");
+ "No error message");
return res;
}
@@ -476,9 +481,9 @@
QemuQuery query(query_str);
doQuery(&query);
const status_t res = query.getCompletionStatus();
- LOGE_IF(res != NO_ERROR, "%s failed: %s",
+ LOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
- "No error message");
+ "No error message");
return res;
}
@@ -489,9 +494,9 @@
QemuQuery query(mQueryStop);
doQuery(&query);
const status_t res = query.getCompletionStatus();
- LOGE_IF(res != NO_ERROR, "%s failed: %s",
+ LOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
__FUNCTION__, query.mReplyData ? query.mReplyData :
- "No error message");
+ "No error message");
return res;
}
@@ -500,6 +505,8 @@
size_t vframe_size,
size_t pframe_size)
{
+ LOGV("%s", __FUNCTION__);
+
char query_str[256];
snprintf(query_str, sizeof(query_str), "%s video=%d preview=%d",
mQueryFrame, (vframe && vframe_size) ? vframe_size : 0,
@@ -507,39 +514,41 @@
QemuQuery query(query_str);
doQuery(&query);
const status_t res = query.getCompletionStatus();
- LOGE_IF(res != NO_ERROR, "%s failed: %s",
- __FUNCTION__, query.mReplyData ? query.mReplyData :
+ if( res != NO_ERROR) {
+ LOGE("%s: Query failed: %s",
+ __FUNCTION__, query.mReplyData ? query.mReplyData :
"No error message");
- if (res == NO_ERROR) {
- /* Copy requested frames. */
- size_t cur_offset = 0;
- const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData);
- /* Video frame is always first. */
- if (vframe != NULL && vframe_size != 0) {
- /* Make sure that video frame is in. */
- if ((query.mReplyDataSize - cur_offset) >= vframe_size) {
- memcpy(vframe, frame, vframe_size);
- cur_offset += vframe_size;
- } else {
- LOGE("%s: Reply (%d bytes) is to small to contain video frame (%d bytes)",
- __FUNCTION__, query.mReplyDataSize - cur_offset, vframe_size);
- return EINVAL;
- }
+ return res;
+ }
+
+ /* Copy requested frames. */
+ size_t cur_offset = 0;
+ const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData);
+ /* Video frame is always first. */
+ if (vframe != NULL && vframe_size != 0) {
+ /* Make sure that video frame is in. */
+ if ((query.mReplyDataSize - cur_offset) >= vframe_size) {
+ memcpy(vframe, frame, vframe_size);
+ cur_offset += vframe_size;
+ } else {
+ LOGE("%s: Reply %d bytes is to small to contain %d bytes video frame",
+ __FUNCTION__, query.mReplyDataSize - cur_offset, vframe_size);
+ return EINVAL;
}
- if (pframe != NULL && pframe_size != 0) {
- /* Make sure that preview frame is in. */
- if ((query.mReplyDataSize - cur_offset) >= pframe_size) {
- memcpy(pframe, frame + cur_offset, pframe_size);
- cur_offset += pframe_size;
- } else {
- LOGE("%s: Reply (%d bytes) is to small to contain preview frame (%d bytes)",
- __FUNCTION__, query.mReplyDataSize - cur_offset, pframe_size);
- return EINVAL;
- }
+ }
+ if (pframe != NULL && pframe_size != 0) {
+ /* Make sure that preview frame is in. */
+ if ((query.mReplyDataSize - cur_offset) >= pframe_size) {
+ memcpy(pframe, frame + cur_offset, pframe_size);
+ cur_offset += pframe_size;
+ } else {
+ LOGE("%s: Reply %d bytes is to small to contain %d bytes preview frame",
+ __FUNCTION__, query.mReplyDataSize - cur_offset, pframe_size);
+ return EINVAL;
}
}
- return res;
+ return NO_ERROR;
}
}; /* namespace android */
diff --git a/tools/emulator/system/camera/QemuClient.h b/tools/emulator/system/camera/QemuClient.h
index 9614a4d..c0b8e61 100755
--- a/tools/emulator/system/camera/QemuClient.h
+++ b/tools/emulator/system/camera/QemuClient.h
@@ -57,7 +57,9 @@
* - '=' are allowed only to divide parameter names from parameter values.
*
* Emulator replies to each query in two chunks:
- * - 4 bytes encoding the payload size
+ * - 8 bytes encoding the payload size as a string containing hexadecimal
+ * representation of the payload size value. This is done in order to simplify
+ * dealing with different endianness on the host, and on the guest.
* - Payload, whose size is defined by the first chunk.
*
* Every payload always begins with two characters, encoding the result of the
@@ -66,7 +68,9 @@
* - 'ko' Encoding a failure.
* After that payload may have optional data. If payload has more data following
* the query result, there is a ':' character separating them. If payload carries
- * only the result, it always ends with a zero-terminator.
+ * only the result, it always ends with a zero-terminator. So, payload 'ok'/'ko'
+ * prefix is always 3 bytes long: it either includes a zero-terminator, if there
+ * is no data, or a ':' separator.
*/
class QemuQuery {
public:
@@ -81,7 +85,7 @@
*/
explicit QemuQuery(const char* query_string);
- /* Constructs and initializes QemuQuery instance for a query.
+ /* Constructs and initializes QemuQuery instance for a query with parameters.
* Param:
* query_name - Query name.
* query_param - Query parameters. Can be NULL.
@@ -96,7 +100,8 @@
***************************************************************************/
/* Creates new query.
- * This method will reset this instance prior to creating a new query.
+ * Note: this method will reset this instance prior to creating a new query
+ * in order to discard possible "leftovers" from the previous query.
* Param:
* query_name - Query name.
* query_param - Query parameters. Can be NULL.
@@ -108,19 +113,19 @@
/* Completes the query after a reply from the emulator.
* This method will parse the reply buffer, and calculate the final query
* status, which depends not only on the transport success / failure, but
- * also on 'ok' / 'ko' in the query reply.
+ * also on 'ok' / 'ko' in the reply buffer.
* Param:
* status - Query delivery status. This status doesn't necessarily reflects
- * the final query status (which is defined by 'ok'/'ko' in the reply buffer).
- * This status simply states whether or not the query has been sent, and a
- * reply has been received successfuly. However, if status indicates a
- * failure, the entire query has failed. If status indicates a success, the
- * reply will be checked here to calculate the final query status.
+ * the final query status (which is defined by 'ok'/'ko' prefix in the
+ * reply buffer). This status simply states whether or not the query has
+ * been sent, and a reply has been received successfuly. However, if
+ * this status indicates a failure, it means that the entire query has
+ * failed.
* Return:
* NO_ERROR on success, or an appropriate error status on failure. Note that
* status returned here just signals whether or not the method has succeeded.
- * Use isQuerySucceeded() / getCompletionStatus() methods to check the final
- * query status.
+ * Use isQuerySucceeded() / getCompletionStatus() methods of this class to
+ * check the final query status.
*/
status_t completeQuery(status_t status);
@@ -132,19 +137,26 @@
* class has been executed.
*/
inline bool isQuerySucceeded() const {
- return mQueryStatus == NO_ERROR && mReplyStatus != 0;
+ return mQueryDeliveryStatus == NO_ERROR && mReplyStatus != 0;
}
/* Gets final completion status of the query.
* Note that this method must be called after completeQuery() method of this
* class has been executed.
- * NO_ERROR on success, or an appropriate error status on failure.
+ * Return:
+ * NO_ERROR if query has succeeded, or an appropriate error status on query
+ * failure.
*/
inline status_t getCompletionStatus() const {
- if (isQuerySucceeded()) {
- return NO_ERROR;
+ if (mQueryDeliveryStatus == NO_ERROR) {
+ if (mReplyStatus) {
+ return NO_ERROR;
+ } else {
+ return EINVAL;
+ }
+ } else {
+ return mQueryDeliveryStatus;
}
- return (mQueryStatus != NO_ERROR) ? mQueryStatus : EINVAL;
}
/****************************************************************************
@@ -154,8 +166,8 @@
public:
/* Query string. */
char* mQuery;
- /* Query status. */
- status_t mQueryStatus;
+ /* Query delivery status. */
+ status_t mQueryDeliveryStatus;
/* Reply buffer */
char* mReplyBuffer;
/* Reply data (past 'ok'/'ko'). If NULL, there were no data in reply. */
@@ -208,7 +220,7 @@
* the 'factory' service, while connection with parameters means
* connection to an 'emulated camera' service, where camera is identified
* by one of the connection parameters. So, passing NULL, or an empty
- * string to this method will establish connection with a 'factory'
+ * string to this method will establish a connection with the 'factory'
* service, while not empty string passed here will establish connection
* with an 'emulated camera' service. Parameters defining the emulated
* camera must be formatted as such:
@@ -216,10 +228,10 @@
* "name=<device name> [inp_channel=<input channel #>]",
*
* where 'device name' is a required parameter defining name of the
- * camera device, 'input channel' is an optional parameter (positive
- * integer), defining input channel to use on the camera device. Note
- * that device name passed here must have been previously obtained from
- * the factory service.
+ * camera device, and 'input channel' is an optional parameter (positive
+ * integer), defining the input channel to use on the camera device.
+ * Note that device name passed here must have been previously obtained
+ * from the factory service using 'list' query.
* Return:
* NO_ERROR on success, or an appropriate error status.
*/
@@ -259,11 +271,11 @@
* Return:
* NO_ERROR on success, or an appropriate error status on failure. Note that
* status returned here is not the final query status. Use isQuerySucceeded(),
- * or getCompletionStatus() method on the query to see if it has succeeded.
- * However, if this method returns a failure, it means that the query has
- * failed, and there is no guarantee that its data members are properly
- * initialized (except for the 'mQueryStatus', which is always in the
- * proper state).
+ * or getCompletionStatus() method on the query object to see if it has
+ * succeeded. However, if this method returns a failure, it means that the
+ * query has failed, and there is no guarantee that its data members are
+ * properly initialized (except for the 'mQueryDeliveryStatus', which is
+ * always in the proper state).
*/
virtual status_t doQuery(QemuQuery* query);
@@ -300,25 +312,26 @@
public:
/* Lists camera devices connected to the host.
* Param:
- * list - Upon success contains list of cameras connected to the host. The
+ * list - Upon success contains a list of cameras connected to the host. The
* list returned here is represented as a string, containing multiple
- * lines, separated with '\n', where each line represents a camera. Each
+ * lines separated with '\n', where each line represents a camera. Each
* camera line is formatted as such:
*
* "name=<device name> channel=<num> pix=<num> framedims=<dimensions>\n"
*
* Where:
- * - 'name' is the name of camera device attached to the host. This name
- * must be used for subsequent connection to the 'emulated camera'
+ * - 'name' is the name of the camera device attached to the host. This
+ * name must be used for subsequent connection to the 'emulated camera'
* service for that camera.
* - 'channel' - input channel number (positive int) to use to communicate
* with the camera.
- * - 'pix' - pixel format (a "fourcc" int), chosen for the video frames.
+ * - 'pix' - pixel format (a "fourcc" uint), chosen for the video frames
+ * by the camera service.
* - 'framedims' contains a list of frame dimensions supported by the
- * camera. Each etry in the list is in form '<width>x<height>', where
- * 'width' and 'height' are numeric values for width and height of a
- * supported frame dimension. Entries in this list are separated with
- * ','.
+ * camera for the chosen pixel format. Each etry in the list is in form
+ * '<width>x<height>', where 'width' and 'height' are numeric values
+ * for width and height of a supported frame dimension. Entries in
+ * this list are separated with ',' with no spaces between the entries.
* Return:
* NO_ERROR on success, or an appropriate error status on failure.
*/
diff --git a/tools/emulator/system/camera/media_profiles.xml b/tools/emulator/system/camera/media_profiles.xml
new file mode 100644
index 0000000..ae1ce88
--- /dev/null
+++ b/tools/emulator/system/camera/media_profiles.xml
@@ -0,0 +1,400 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2010 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!DOCTYPE MediaSettings [
+<!ELEMENT MediaSettings (CamcorderProfiles,
+ EncoderOutputFileFormat+,
+ VideoEncoderCap+,
+ AudioEncoderCap+,
+ VideoDecoderCap,
+ AudioDecoderCap)>
+<!ELEMENT CamcorderProfiles (EncoderProfile+, ImageEncoding+, ImageDecoding, Camera)>
+<!ELEMENT EncoderProfile (Video, Audio)>
+<!ATTLIST EncoderProfile quality (high|low) #REQUIRED>
+<!ATTLIST EncoderProfile fileFormat (mp4|3gp) #REQUIRED>
+<!ATTLIST EncoderProfile duration (30|60) #REQUIRED>
+<!ATTLIST EncoderProfile cameraId (0|1) #REQUIRED>
+<!ELEMENT Video EMPTY>
+<!ATTLIST Video codec (h264|h263|m4v) #REQUIRED>
+<!ATTLIST Video bitRate CDATA #REQUIRED>
+<!ATTLIST Video width CDATA #REQUIRED>
+<!ATTLIST Video height CDATA #REQUIRED>
+<!ATTLIST Video frameRate CDATA #REQUIRED>
+<!ELEMENT Audio EMPTY>
+<!ATTLIST Audio codec (amrnb|amrwb|aac) #REQUIRED>
+<!ATTLIST Audio bitRate CDATA #REQUIRED>
+<!ATTLIST Audio sampleRate CDATA #REQUIRED>
+<!ATTLIST Audio channels (1|2) #REQUIRED>
+<!ELEMENT ImageEncoding EMPTY>
+<!ATTLIST ImageEncoding quality (90|80|70|60|50|40) #REQUIRED>
+<!ELEMENT ImageDecoding EMPTY>
+<!ATTLIST ImageDecoding memCap CDATA #REQUIRED>
+<!ELEMENT Camera EMPTY>
+<!ELEMENT EncoderOutputFileFormat EMPTY>
+<!ATTLIST EncoderOutputFileFormat name (mp4|3gp) #REQUIRED>
+<!ELEMENT VideoEncoderCap EMPTY>
+<!ATTLIST VideoEncoderCap name (h264|h263|m4v|wmv) #REQUIRED>
+<!ATTLIST VideoEncoderCap enabled (true|false) #REQUIRED>
+<!ATTLIST VideoEncoderCap minBitRate CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap maxBitRate CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap minFrameWidth CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap maxFrameWidth CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap minFrameHeight CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap maxFrameHeight CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap minFrameRate CDATA #REQUIRED>
+<!ATTLIST VideoEncoderCap maxFrameRate CDATA #REQUIRED>
+<!ELEMENT AudioEncoderCap EMPTY>
+<!ATTLIST AudioEncoderCap name (amrnb|amrwb|aac|wma) #REQUIRED>
+<!ATTLIST AudioEncoderCap enabled (true|false) #REQUIRED>
+<!ATTLIST AudioEncoderCap minBitRate CDATA #REQUIRED>
+<!ATTLIST AudioEncoderCap maxBitRate CDATA #REQUIRED>
+<!ATTLIST AudioEncoderCap minSampleRate CDATA #REQUIRED>
+<!ATTLIST AudioEncoderCap maxSampleRate CDATA #REQUIRED>
+<!ATTLIST AudioEncoderCap minChannels (1|2) #REQUIRED>
+<!ATTLIST AudioEncoderCap maxChannels (1|2) #REQUIRED>
+<!ELEMENT VideoDecoderCap EMPTY>
+<!ATTLIST VideoDecoderCap name (wmv) #REQUIRED>
+<!ATTLIST VideoDecoderCap enabled (true|false) #REQUIRED>
+<!ELEMENT AudioDecoderCap EMPTY>
+<!ATTLIST AudioDecoderCap name (wma) #REQUIRED>
+<!ATTLIST AudioDecoderCap enabled (true|false) #REQUIRED>
+<!ELEMENT VideoEditorCap EMPTY>
+<!ATTLIST VideoEditorCap maxInputFrameWidth CDATA #REQUIRED>
+<!ATTLIST VideoEditorCap maxInputFrameHeight CDATA #REQUIRED>
+<!ATTLIST VideoEditorCap maxOutputFrameWidth CDATA #REQUIRED>
+<!ATTLIST VideoEditorCap maxOutputFrameHeight CDATA #REQUIRED>
+<!ELEMENT ExportVideoProfile EMPTY>
+<!ATTLIST ExportVideoProfile name (h264|h263|m4v) #REQUIRED>
+<!ATTLIST ExportVideoProfile profile CDATA #REQUIRED>
+<!ATTLIST ExportVideoProfile level CDATA #REQUIRED>
+]>
+<!--
+ This file is used to declare the multimedia profiles and capabilities
+ on an android-powered device.
+-->
+<MediaSettings>
+ <!-- Each camcorder profile defines a set of predefined configuration parameters -->
+ <CamcorderProfiles cameraId="0">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <CamcorderProfiles cameraId="1">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <CamcorderProfiles cameraId="2">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <CamcorderProfiles cameraId="3">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <CamcorderProfiles cameraId="4">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <CamcorderProfiles cameraId="5">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <CamcorderProfiles cameraId="6">
+
+ <EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
+ <Video codec="m4v"
+ bitRate="128000"
+ width="320"
+ height="240"
+ frameRate="15" />
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
+ <Video codec="h264"
+ bitRate="192000"
+ width="176"
+ height="144"
+ frameRate="30" />
+ <!-- audio setting is ignored -->
+ <Audio codec="amrnb"
+ bitRate="12200"
+ sampleRate="8000"
+ channels="1" />
+ </EncoderProfile>
+
+ <ImageEncoding quality="95" />
+ <ImageEncoding quality="80" />
+ <ImageEncoding quality="70" />
+ <ImageDecoding memCap="20000000" />
+
+ </CamcorderProfiles>
+
+ <EncoderOutputFileFormat name="3gp" />
+ <EncoderOutputFileFormat name="mp4" />
+
+ <!--
+ If a codec is not enabled, it is invisible to the applications
+ In other words, the applications won't be able to use the codec
+ or query the capabilities of the codec at all if it is disabled
+ -->
+ <VideoEncoderCap name="h264" enabled="true"
+ minBitRate="64000" maxBitRate="192000"
+ minFrameWidth="176" maxFrameWidth="320"
+ minFrameHeight="144" maxFrameHeight="240"
+ minFrameRate="15" maxFrameRate="30" />
+
+ <VideoEncoderCap name="h263" enabled="true"
+ minBitRate="64000" maxBitRate="192000"
+ minFrameWidth="176" maxFrameWidth="320"
+ minFrameHeight="144" maxFrameHeight="240"
+ minFrameRate="15" maxFrameRate="30" />
+
+ <VideoEncoderCap name="m4v" enabled="true"
+ minBitRate="64000" maxBitRate="192000"
+ minFrameWidth="176" maxFrameWidth="320"
+ minFrameHeight="144" maxFrameHeight="240"
+ minFrameRate="15" maxFrameRate="30" />
+
+ <AudioEncoderCap name="aac" enabled="true"
+ minBitRate="8000" maxBitRate="96000"
+ minSampleRate="8000" maxSampleRate="48000"
+ minChannels="1" maxChannels="1" />
+
+ <AudioEncoderCap name="amrwb" enabled="true"
+ minBitRate="6600" maxBitRate="23050"
+ minSampleRate="16000" maxSampleRate="16000"
+ minChannels="1" maxChannels="1" />
+
+ <AudioEncoderCap name="amrnb" enabled="true"
+ minBitRate="5525" maxBitRate="12200"
+ minSampleRate="8000" maxSampleRate="8000"
+ minChannels="1" maxChannels="1" />
+
+ <!--
+ FIXME:
+ We do not check decoder capabilities at present
+ At present, we only check whether windows media is visible
+ for TEST applications. For other applications, we do
+ not perform any checks at all.
+ -->
+ <VideoDecoderCap name="wmv" enabled="false"/>
+ <AudioDecoderCap name="wma" enabled="false"/>
+ <VideoEditorCap maxInputFrameWidth="320"
+ maxInputFrameHeight="240" maxOutputFrameWidth="320"
+ maxOutputFrameHeight="240"/>
+ <!--
+ The VideoEditor Export codec profile and level values
+ correspond to the values in OMX_Video.h.
+ E.g. for h264, profile value 1 means OMX_VIDEO_AVCProfileBaseline
+ and level 4096 means OMX_VIDEO_AVCLevel41.
+ Please note that the values are in decimal.
+ These values are for video encoder.
+ -->
+ <!--
+ Codec = h.264, Baseline profile, level 4.1
+ -->
+ <ExportVideoProfile name="h264" profile= "1" level="512"/>
+ <!--
+ Codec = h.263, Baseline profile, level 0
+ -->
+ <ExportVideoProfile name="h263" profile= "1" level="1"/>
+ <!--
+ Codec = mpeg4, Simple profile, level 3
+ -->
+ <ExportVideoProfile name="m4v" profile= "1" level="16"/>
+</MediaSettings>