Merge "MiterLimit represent a ratio itself, no need to scale" into lmp-dev
diff --git a/api/current.txt b/api/current.txt
index ea842d5..c6f7613 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -16256,6 +16256,7 @@
method public void disconnect();
method public android.os.Bundle getExtras();
method public android.net.Uri getRoot();
+ method public android.content.ComponentName getServiceComponent();
method public android.media.session.MediaSession.Token getSessionToken();
method public boolean isConnected();
method public void loadIcon(android.net.Uri, int, int, android.media.browse.MediaBrowser.IconCallback);
@@ -16656,12 +16657,14 @@
method public void adjustVolume(int, int);
method public android.media.routing.MediaRouter.Delegate createMediaRouterDelegate();
method public boolean dispatchMediaButtonEvent(android.view.KeyEvent);
+ method public android.os.Bundle getExtras();
method public long getFlags();
method public android.app.PendingIntent getLaunchActivity();
method public android.media.MediaMetadata getMetadata();
method public java.lang.String getPackageName();
method public android.media.session.PlaybackState getPlaybackState();
method public java.util.List<android.media.session.MediaSession.Track> getQueue();
+ method public java.lang.CharSequence getQueueTitle();
method public int getRatingType();
method public android.media.session.MediaSession.Token getSessionToken();
method public android.media.session.MediaController.TransportControls getTransportControls();
@@ -28801,16 +28804,16 @@
method public void changeCameraCapabilities(android.telecomm.CallCameraCapabilities);
method public void changePeerDimensions(int, int);
method public void handleCallSessionEvent(int);
- method public abstract void onRequestCallDataUsage();
- method public abstract void onRequestCameraCapabilities();
- method public abstract void onSendSessionModifyRequest(android.telecomm.VideoCallProfile);
- method public abstract void onSendSessionModifyResponse(android.telecomm.VideoCallProfile);
- method public abstract void onSetCamera(java.lang.String);
- method public abstract void onSetDeviceOrientation(int);
- method public abstract void onSetDisplaySurface(android.view.Surface);
- method public abstract void onSetPauseImage(java.lang.String);
- method public abstract void onSetPreviewSurface(android.view.Surface);
- method public abstract void onSetZoom(float);
+ method public void onRequestCallDataUsage();
+ method public void onRequestCameraCapabilities();
+ method public void onSendSessionModifyRequest(android.telecomm.VideoCallProfile);
+ method public void onSendSessionModifyResponse(android.telecomm.VideoCallProfile);
+ method public void onSetCamera(java.lang.String);
+ method public void onSetDeviceOrientation(int);
+ method public void onSetDisplaySurface(android.view.Surface);
+ method public void onSetPauseImage(java.lang.String);
+ method public void onSetPreviewSurface(android.view.Surface);
+ method public void onSetZoom(float);
method public void receiveSessionModifyRequest(android.telecomm.VideoCallProfile);
method public void receiveSessionModifyResponse(int, android.telecomm.VideoCallProfile, android.telecomm.VideoCallProfile);
}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
index 986f9a8..711edf4 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyMetadataMapper.java
@@ -94,7 +94,7 @@
static final boolean LIE_ABOUT_AF = false;
static final boolean LIE_ABOUT_AF_MAX_REGIONS = false;
static final boolean LIE_ABOUT_AWB_STATE = false;
- static final boolean LIE_ABOUT_AWB = true;
+ static final boolean LIE_ABOUT_AWB = false;
/**
* Create characteristics for a legacy device by mapping the {@code parameters}
@@ -436,8 +436,52 @@
}
private static void mapControlAwb(CameraMetadataNative m, Camera.Parameters p) {
- if (!LIE_ABOUT_AWB) {
- throw new AssertionError("Not implemented yet");
+ /*
+ * control.awbAvailableModes
+ */
+
+ {
+ List<String> wbModes = p.getSupportedWhiteBalance();
+
+ String[] wbModeStrings = new String[] {
+ Camera.Parameters.WHITE_BALANCE_AUTO ,
+ Camera.Parameters.WHITE_BALANCE_INCANDESCENT ,
+ Camera.Parameters.WHITE_BALANCE_FLUORESCENT ,
+ Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT ,
+ Camera.Parameters.WHITE_BALANCE_DAYLIGHT ,
+ Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT ,
+ Camera.Parameters.WHITE_BALANCE_TWILIGHT ,
+ Camera.Parameters.WHITE_BALANCE_SHADE ,
+ };
+
+ int[] wbModeInts = new int[] {
+ CONTROL_AWB_MODE_AUTO,
+ CONTROL_AWB_MODE_INCANDESCENT ,
+ CONTROL_AWB_MODE_FLUORESCENT ,
+ CONTROL_AWB_MODE_WARM_FLUORESCENT ,
+ CONTROL_AWB_MODE_DAYLIGHT ,
+ CONTROL_AWB_MODE_CLOUDY_DAYLIGHT ,
+ CONTROL_AWB_MODE_TWILIGHT ,
+ CONTROL_AWB_MODE_SHADE ,
+ // Note that CONTROL_AWB_MODE_OFF is unsupported
+ };
+
+ List<Integer> awbAvail = ArrayUtils.convertStringListToIntList(
+ wbModes, wbModeStrings, wbModeInts);
+
+ // No AWB modes supported? That's unpossible!
+ if (awbAvail == null || awbAvail.size() == 0) {
+ Log.w(TAG, "No AWB modes supported (HAL bug); defaulting to AWB_MODE_AUTO only");
+ awbAvail = new ArrayList<Integer>(/*capacity*/1);
+ awbAvail.add(CONTROL_AWB_MODE_AUTO);
+ }
+
+ m.set(CONTROL_AWB_AVAILABLE_MODES, ArrayUtils.toIntArray(awbAvail));
+
+ if (VERBOSE) {
+ Log.v(TAG, "mapControlAwb - control.awbAvailableModes set to " +
+ ListUtils.listToString(awbAvail));
+ }
}
}
@@ -650,6 +694,11 @@
m.set(REQUEST_MAX_NUM_INPUT_STREAMS, REQUEST_MAX_NUM_INPUT_STREAMS_COUNT);
/*
+ * request.partialResultCount
+ */
+ m.set(REQUEST_PARTIAL_RESULT_COUNT, 1); // No partial results supported
+
+ /*
* request.pipelineMaxDepth
*/
m.set(REQUEST_PIPELINE_MAX_DEPTH,
@@ -680,6 +729,14 @@
}
/*
+ * sensor.availableTestPatternModes
+ */
+ {
+ // Only "OFF" test pattern mode is available
+ m.set(SENSOR_AVAILABLE_TEST_PATTERN_MODES, new int[] { SENSOR_TEST_PATTERN_MODE_OFF });
+ }
+
+ /*
* sensor.info.pixelArraySize
*/
m.set(SENSOR_INFO_PIXEL_ARRAY_SIZE, largestJpegSize);
@@ -921,11 +978,9 @@
* control.*
*/
- if (LIE_ABOUT_AWB) {
- m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
- } else {
- throw new AssertionError("Valid control.awbMode not implemented yet");
- }
+ // control.awbMode
+ m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
+ // AWB is always unconditionally available in API1 devices
// control.aeAntibandingMode
m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
diff --git a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
index dfec9008..a6fe035c 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyRequestMapper.java
@@ -216,6 +216,25 @@
}
}
+ // control.awbMode
+ {
+ Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE,
+ /*defaultValue*/CONTROL_AWB_MODE_AUTO,
+ params.getSupportedWhiteBalance() != null,
+ /*allowedValue*/CONTROL_AWB_MODE_AUTO);
+
+ String whiteBalanceMode = null;
+ if (awbMode != null) { // null iff AWB is not supported by camera1 api
+ whiteBalanceMode = convertAwbModeToLegacy(awbMode);
+ params.setWhiteBalance(whiteBalanceMode);
+ }
+
+ if (VERBOSE) {
+ Log.v(TAG, "convertRequestToMetadata - control.awbMode "
+ + awbMode + " mapped to " + whiteBalanceMode);
+ }
+ }
+
// control.awbLock
{
Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/false,
@@ -294,6 +313,20 @@
}
}
}
+
+ /*
+ * sensor
+ */
+
+ // sensor.testPattern
+ {
+ int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE,
+ /*defaultValue*/SENSOR_TEST_PATTERN_MODE_OFF);
+ if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
+ Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode "
+ + testPatternMode + "; only OFF is supported");
+ }
+ }
}
private static List<Camera.Area> convertMeteringRegionsToLegacy(
@@ -445,6 +478,29 @@
return legacyFps;
}
+ private static String convertAwbModeToLegacy(int mode) {
+ switch (mode) {
+ case CONTROL_AWB_MODE_AUTO:
+ return Camera.Parameters.WHITE_BALANCE_AUTO;
+ case CONTROL_AWB_MODE_INCANDESCENT:
+ return Camera.Parameters.WHITE_BALANCE_INCANDESCENT;
+ case CONTROL_AWB_MODE_FLUORESCENT:
+ return Camera.Parameters.WHITE_BALANCE_FLUORESCENT;
+ case CONTROL_AWB_MODE_WARM_FLUORESCENT:
+ return Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT;
+ case CONTROL_AWB_MODE_DAYLIGHT:
+ return Camera.Parameters.WHITE_BALANCE_DAYLIGHT;
+ case CONTROL_AWB_MODE_CLOUDY_DAYLIGHT:
+ return Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT;
+ case CONTROL_AWB_MODE_TWILIGHT:
+ return Camera.Parameters.WHITE_BALANCE_TWILIGHT;
+ default:
+ Log.w(TAG, "convertAwbModeToLegacy - unrecognized control.awbMode" + mode);
+ return Camera.Parameters.WHITE_BALANCE_AUTO;
+ }
+ }
+
+
/**
* Return {@code null} if the value is not supported, otherwise return the retrieved key's
* value from the request (or the default value if it wasn't set).
diff --git a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
index 6da5dd0..9eff943 100644
--- a/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
+++ b/core/java/android/hardware/camera2/legacy/LegacyResultMapper.java
@@ -20,7 +20,6 @@
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.camera2.CameraCharacteristics;
-import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.impl.CameraMetadataNative;
@@ -133,26 +132,15 @@
*/
mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/params);
- // control.afMode
- result.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(params.getFocusMode()));
+ /*
+ * control.af*
+ */
+ mapAf(result, activeArraySize, zoomData, /*out*/params);
- // control.awbLock
- result.set(CaptureResult.CONTROL_AWB_LOCK, params.getAutoWhiteBalanceLock());
-
- // control.awbState
- if (LegacyMetadataMapper.LIE_ABOUT_AWB_STATE) {
- // Lie to pass CTS temporarily.
- // TODO: CTS needs to be updated not to query this value
- // for LIMITED devices unless its guaranteed to be available.
- result.set(CaptureResult.CONTROL_AWB_STATE,
- CameraMetadata.CONTROL_AWB_STATE_CONVERGED);
- // TODO: Read the awb mode from parameters instead
- }
-
- if (LegacyMetadataMapper.LIE_ABOUT_AWB) {
- result.set(CaptureResult.CONTROL_AWB_MODE,
- request.get(CaptureRequest.CONTROL_AWB_MODE));
- }
+ /*
+ * control.awb*
+ */
+ mapAwb(result, /*out*/params);
/*
@@ -203,7 +191,7 @@
* flash
*/
{
- // TODO
+ // flash.mode, flash.state mapped in mapAeAndFlashMode
}
/*
@@ -234,6 +222,11 @@
/*
* sensor
*/
+ // sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
+ {
+ // Unconditionally no test patterns
+ result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
+ }
// TODO: Remaining result metadata tags conversions.
return result;
@@ -295,6 +288,13 @@
m.set(CONTROL_AE_REGIONS, meteringRectArray);
}
+ }
+
+ private static void mapAf(CameraMetadataNative m,
+ Rect activeArray, ZoomData zoomData, Camera.Parameters p) {
+ // control.afMode
+ m.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(p.getFocusMode()));
+
// control.afRegions
{
if (VERBOSE) {
@@ -307,13 +307,21 @@
m.set(CONTROL_AF_REGIONS, meteringRectArray);
}
+ }
+ private static void mapAwb(CameraMetadataNative m, Camera.Parameters p) {
// control.awbLock
{
boolean lock = p.isAutoWhiteBalanceLockSupported() ?
p.getAutoWhiteBalanceLock() : false;
m.set(CONTROL_AWB_LOCK, lock);
}
+
+ // control.awbMode
+ {
+ int awbMode = convertLegacyAwbMode(p.getWhiteBalance());
+ m.set(CONTROL_AWB_MODE, awbMode);
+ }
}
private static MeteringRectangle[] getMeteringRectangles(Rect activeArray, ZoomData zoomData,
@@ -412,6 +420,35 @@
}
}
+ private static int convertLegacyAwbMode(String mode) {
+ if (mode == null) {
+ // OK: camera1 api may not support changing WB modes; assume AUTO
+ return CONTROL_AWB_MODE_AUTO;
+ }
+
+ switch (mode) {
+ case Camera.Parameters.WHITE_BALANCE_AUTO:
+ return CONTROL_AWB_MODE_AUTO;
+ case Camera.Parameters.WHITE_BALANCE_INCANDESCENT:
+ return CONTROL_AWB_MODE_INCANDESCENT;
+ case Camera.Parameters.WHITE_BALANCE_FLUORESCENT:
+ return CONTROL_AWB_MODE_FLUORESCENT;
+ case Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT:
+ return CONTROL_AWB_MODE_WARM_FLUORESCENT;
+ case Camera.Parameters.WHITE_BALANCE_DAYLIGHT:
+ return CONTROL_AWB_MODE_DAYLIGHT;
+ case Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT:
+ return CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
+ case Camera.Parameters.WHITE_BALANCE_TWILIGHT:
+ return CONTROL_AWB_MODE_TWILIGHT;
+ case Camera.Parameters.WHITE_BALANCE_SHADE:
+ return CONTROL_AWB_MODE_SHADE;
+ default:
+ Log.w(TAG, "convertAwbMode - unrecognized WB mode " + mode);
+ return CONTROL_AWB_MODE_AUTO;
+ }
+ }
+
/** Map results for scaler.* */
private static void mapScaler(CameraMetadataNative m,
ZoomData zoomData,
diff --git a/core/jni/android_util_Binder.cpp b/core/jni/android_util_Binder.cpp
index d82fc96..81e887d 100644
--- a/core/jni/android_util_Binder.cpp
+++ b/core/jni/android_util_Binder.cpp
@@ -179,7 +179,10 @@
env->ExceptionClear();
jstring tagstr = env->NewStringUTF(LOG_TAG);
- jstring msgstr = env->NewStringUTF(msg);
+ jstring msgstr = NULL;
+ if (tagstr != NULL) {
+ msgstr = env->NewStringUTF(msg);
+ }
if ((tagstr == NULL) || (msgstr == NULL)) {
env->ExceptionClear(); /* assume exception (OOM?) was thrown */
diff --git a/core/jni/android_view_GLES20Canvas.cpp b/core/jni/android_view_GLES20Canvas.cpp
index ce76b26..afcfaf6 100644
--- a/core/jni/android_view_GLES20Canvas.cpp
+++ b/core/jni/android_view_GLES20Canvas.cpp
@@ -348,7 +348,12 @@
DisplayListRenderer* renderer = reinterpret_cast<DisplayListRenderer*>(rendererPtr);
Paint* paint = reinterpret_cast<Paint*>(paintPtr);
- renderer->drawBitmap(bitmap, left, top, paint);
+
+ // apply transform directly to canvas, so it affects shaders correctly
+ renderer->save(SkCanvas::kMatrix_SaveFlag);
+ renderer->translate(left, top);
+ renderer->drawBitmap(bitmap, paint);
+ renderer->restore();
}
static void android_view_GLES20Canvas_drawBitmapRect(JNIEnv* env, jobject clazz,
@@ -375,7 +380,12 @@
DisplayListRenderer* renderer = reinterpret_cast<DisplayListRenderer*>(rendererPtr);
SkMatrix* matrix = reinterpret_cast<SkMatrix*>(matrixPtr);
Paint* paint = reinterpret_cast<Paint*>(paintPtr);
- renderer->drawBitmap(bitmap, *matrix, paint);
+
+ // apply transform directly to canvas, so it affects shaders correctly
+ renderer->save(SkCanvas::kMatrix_SaveFlag);
+ renderer->concatMatrix(*matrix);
+ renderer->drawBitmap(bitmap, paint);
+ renderer->restore();
}
static void android_view_GLES20Canvas_drawBitmapData(JNIEnv* env, jobject clazz,
@@ -399,7 +409,12 @@
DisplayListRenderer* renderer = reinterpret_cast<DisplayListRenderer*>(rendererPtr);
Paint* paint = reinterpret_cast<Paint*>(paintPtr);
- renderer->drawBitmapData(bitmap, left, top, paint);
+
+ // apply transform directly to canvas, so it affects shaders correctly
+ renderer->save(SkCanvas::kMatrix_SaveFlag);
+ renderer->translate(left, top);
+ renderer->drawBitmapData(bitmap, paint);
+ renderer->restore();
// Note - bitmap isn't deleted as DisplayListRenderer owns it now
}
diff --git a/graphics/java/android/graphics/drawable/AnimatedVectorDrawable.java b/graphics/java/android/graphics/drawable/AnimatedVectorDrawable.java
index 54683aa..00c92fa 100644
--- a/graphics/java/android/graphics/drawable/AnimatedVectorDrawable.java
+++ b/graphics/java/android/graphics/drawable/AnimatedVectorDrawable.java
@@ -312,9 +312,7 @@
final int size = animators.size();
for (int i = 0; i < size; i++) {
final Animator animator = animators.get(i);
- if (animator.isPaused()) {
- animator.resume();
- } else if (!animator.isRunning()) {
+ if (!animator.isRunning()) {
animator.start();
}
}
@@ -327,7 +325,7 @@
final int size = animators.size();
for (int i = 0; i < size; i++) {
final Animator animator = animators.get(i);
- animator.pause();
+ animator.end();
}
}
diff --git a/libs/hwui/DisplayListOp.h b/libs/hwui/DisplayListOp.h
index 777a35a..6883cc5 100644
--- a/libs/hwui/DisplayListOp.h
+++ b/libs/hwui/DisplayListOp.h
@@ -639,9 +639,10 @@
class DrawBitmapOp : public DrawBoundedOp {
public:
- DrawBitmapOp(const SkBitmap* bitmap, float left, float top, const SkPaint* paint)
- : DrawBoundedOp(left, top, left + bitmap->width(), top + bitmap->height(), paint),
- mBitmap(bitmap), mAtlas(Caches::getInstance().assetAtlas) {
+ DrawBitmapOp(const SkBitmap* bitmap, const SkPaint* paint)
+ : DrawBoundedOp(0, 0, bitmap->width(), bitmap->height(), paint)
+ , mBitmap(bitmap)
+ , mAtlas(Caches::getInstance().assetAtlas) {
mEntry = mAtlas.getEntry(bitmap);
if (mEntry) {
mEntryGenerationId = mAtlas.getGenerationId();
@@ -650,8 +651,7 @@
}
virtual status_t applyDraw(OpenGLRenderer& renderer, Rect& dirty) {
- return renderer.drawBitmap(mBitmap, mLocalBounds.left, mLocalBounds.top,
- getPaint(renderer));
+ return renderer.drawBitmap(mBitmap, getPaint(renderer));
}
AssetAtlas::Entry* getAtlasEntry() {
@@ -745,35 +745,6 @@
UvMapper mUvMapper;
};
-class DrawBitmapMatrixOp : public DrawBoundedOp {
-public:
- DrawBitmapMatrixOp(const SkBitmap* bitmap, const SkMatrix& matrix, const SkPaint* paint)
- : DrawBoundedOp(paint), mBitmap(bitmap), mMatrix(matrix) {
- mLocalBounds.set(0, 0, bitmap->width(), bitmap->height());
- const mat4 transform(matrix);
- transform.mapRect(mLocalBounds);
- }
-
- virtual status_t applyDraw(OpenGLRenderer& renderer, Rect& dirty) {
- return renderer.drawBitmap(mBitmap, mMatrix, getPaint(renderer));
- }
-
- virtual void output(int level, uint32_t logFlags) const {
- OP_LOG("Draw bitmap %p matrix " SK_MATRIX_STRING, mBitmap, SK_MATRIX_ARGS(&mMatrix));
- }
-
- virtual const char* name() { return "DrawBitmapMatrix"; }
-
- virtual void onDefer(OpenGLRenderer& renderer, DeferInfo& deferInfo,
- const DeferredDisplayState& state) {
- deferInfo.batchId = DeferredDisplayList::kOpBatch_Bitmap;
- }
-
-private:
- const SkBitmap* mBitmap;
- const SkMatrix mMatrix;
-};
-
class DrawBitmapRectOp : public DrawBoundedOp {
public:
DrawBitmapRectOp(const SkBitmap* bitmap,
@@ -807,12 +778,11 @@
class DrawBitmapDataOp : public DrawBitmapOp {
public:
- DrawBitmapDataOp(const SkBitmap* bitmap, float left, float top, const SkPaint* paint)
- : DrawBitmapOp(bitmap, left, top, paint) {}
+ DrawBitmapDataOp(const SkBitmap* bitmap, const SkPaint* paint)
+ : DrawBitmapOp(bitmap, paint) {}
virtual status_t applyDraw(OpenGLRenderer& renderer, Rect& dirty) {
- return renderer.drawBitmapData(mBitmap, mLocalBounds.left,
- mLocalBounds.top, getPaint(renderer));
+ return renderer.drawBitmapData(mBitmap, getPaint(renderer));
}
virtual void output(int level, uint32_t logFlags) const {
diff --git a/libs/hwui/DisplayListRenderer.cpp b/libs/hwui/DisplayListRenderer.cpp
index 5286ef8..b210e64 100644
--- a/libs/hwui/DisplayListRenderer.cpp
+++ b/libs/hwui/DisplayListRenderer.cpp
@@ -194,51 +194,42 @@
return DrawGlInfo::kStatusDone;
}
-status_t DisplayListRenderer::drawBitmap(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint) {
+status_t DisplayListRenderer::drawBitmap(const SkBitmap* bitmap, const SkPaint* paint) {
bitmap = refBitmap(bitmap);
paint = refPaint(paint);
- addDrawOp(new (alloc()) DrawBitmapOp(bitmap, left, top, paint));
- return DrawGlInfo::kStatusDone;
-}
-
-status_t DisplayListRenderer::drawBitmap(const SkBitmap* bitmap, const SkMatrix& matrix,
- const SkPaint* paint) {
- bitmap = refBitmap(bitmap);
- paint = refPaint(paint);
-
- addDrawOp(new (alloc()) DrawBitmapMatrixOp(bitmap, matrix, paint));
+ addDrawOp(new (alloc()) DrawBitmapOp(bitmap, paint));
return DrawGlInfo::kStatusDone;
}
status_t DisplayListRenderer::drawBitmap(const SkBitmap* bitmap, float srcLeft, float srcTop,
float srcRight, float srcBottom, float dstLeft, float dstTop,
float dstRight, float dstBottom, const SkPaint* paint) {
- bitmap = refBitmap(bitmap);
- paint = refPaint(paint);
-
- if (srcLeft == 0 && srcTop == 0 &&
- srcRight == bitmap->width() && srcBottom == bitmap->height() &&
- (srcBottom - srcTop == dstBottom - dstTop) &&
- (srcRight - srcLeft == dstRight - dstLeft)) {
+ if (srcLeft == 0 && srcTop == 0
+ && srcRight == bitmap->width() && srcBottom == bitmap->height()
+ && (srcBottom - srcTop == dstBottom - dstTop)
+ && (srcRight - srcLeft == dstRight - dstLeft)) {
// transform simple rect to rect drawing case into position bitmap ops, since they merge
- addDrawOp(new (alloc()) DrawBitmapOp(bitmap, dstLeft, dstTop, paint));
- return DrawGlInfo::kStatusDone;
- }
+ save(SkCanvas::kMatrix_SaveFlag);
+ translate(dstLeft, dstTop);
+ drawBitmap(bitmap, paint);
+ restore();
+ } else {
+ bitmap = refBitmap(bitmap);
+ paint = refPaint(paint);
- addDrawOp(new (alloc()) DrawBitmapRectOp(bitmap,
- srcLeft, srcTop, srcRight, srcBottom,
- dstLeft, dstTop, dstRight, dstBottom, paint));
+ addDrawOp(new (alloc()) DrawBitmapRectOp(bitmap,
+ srcLeft, srcTop, srcRight, srcBottom,
+ dstLeft, dstTop, dstRight, dstBottom, paint));
+ }
return DrawGlInfo::kStatusDone;
}
-status_t DisplayListRenderer::drawBitmapData(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint) {
+status_t DisplayListRenderer::drawBitmapData(const SkBitmap* bitmap, const SkPaint* paint) {
bitmap = refBitmapData(bitmap);
paint = refPaint(paint);
- addDrawOp(new (alloc()) DrawBitmapDataOp(bitmap, left, top, paint));
+ addDrawOp(new (alloc()) DrawBitmapDataOp(bitmap, paint));
return DrawGlInfo::kStatusDone;
}
diff --git a/libs/hwui/DisplayListRenderer.h b/libs/hwui/DisplayListRenderer.h
index d1d8572..1b3a48a 100644
--- a/libs/hwui/DisplayListRenderer.h
+++ b/libs/hwui/DisplayListRenderer.h
@@ -105,15 +105,11 @@
virtual status_t drawColor(int color, SkXfermode::Mode mode);
// Bitmap-based
- virtual status_t drawBitmap(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint);
- virtual status_t drawBitmap(const SkBitmap* bitmap, const SkMatrix& matrix,
- const SkPaint* paint);
+ virtual status_t drawBitmap(const SkBitmap* bitmap, const SkPaint* paint);
virtual status_t drawBitmap(const SkBitmap* bitmap, float srcLeft, float srcTop,
float srcRight, float srcBottom, float dstLeft, float dstTop,
float dstRight, float dstBottom, const SkPaint* paint);
- virtual status_t drawBitmapData(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint);
+ virtual status_t drawBitmapData(const SkBitmap* bitmap, const SkPaint* paint);
virtual status_t drawBitmapMesh(const SkBitmap* bitmap, int meshWidth, int meshHeight,
const float* vertices, const int* colors, const SkPaint* paint);
virtual status_t drawPatch(const SkBitmap* bitmap, const Res_png_9patch* patch,
diff --git a/libs/hwui/OpenGLRenderer.cpp b/libs/hwui/OpenGLRenderer.cpp
index 5fbef2e..e00d2e3 100755
--- a/libs/hwui/OpenGLRenderer.cpp
+++ b/libs/hwui/OpenGLRenderer.cpp
@@ -2034,12 +2034,8 @@
return DrawGlInfo::kStatusDrew;
}
-status_t OpenGLRenderer::drawBitmap(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint) {
- const float right = left + bitmap->width();
- const float bottom = top + bitmap->height();
-
- if (quickRejectSetupScissor(left, top, right, bottom)) {
+status_t OpenGLRenderer::drawBitmap(const SkBitmap* bitmap, const SkPaint* paint) {
+ if (quickRejectSetupScissor(0, 0, bitmap->width(), bitmap->height())) {
return DrawGlInfo::kStatusDone;
}
@@ -2049,49 +2045,16 @@
const AutoTexture autoCleanup(texture);
if (CC_UNLIKELY(bitmap->colorType() == kAlpha_8_SkColorType)) {
- drawAlphaBitmap(texture, left, top, paint);
+ drawAlphaBitmap(texture, 0, 0, paint);
} else {
- drawTextureRect(left, top, right, bottom, texture, paint);
+ drawTextureRect(0, 0, bitmap->width(), bitmap->height(), texture, paint);
}
return DrawGlInfo::kStatusDrew;
}
-status_t OpenGLRenderer::drawBitmap(const SkBitmap* bitmap, const SkMatrix& matrix,
- const SkPaint* paint) {
- Rect r(0.0f, 0.0f, bitmap->width(), bitmap->height());
- const mat4 transform(matrix);
- transform.mapRect(r);
-
- if (quickRejectSetupScissor(r.left, r.top, r.right, r.bottom)) {
- return DrawGlInfo::kStatusDone;
- }
-
- mCaches.activeTexture(0);
- Texture* texture = getTexture(bitmap);
- if (!texture) return DrawGlInfo::kStatusDone;
- const AutoTexture autoCleanup(texture);
-
- // This could be done in a cheaper way, all we need is pass the matrix
- // to the vertex shader. The save/restore is a bit overkill.
- save(SkCanvas::kMatrix_SaveFlag);
- concatMatrix(matrix);
- if (CC_UNLIKELY(bitmap->colorType() == kAlpha_8_SkColorType)) {
- drawAlphaBitmap(texture, 0.0f, 0.0f, paint);
- } else {
- drawTextureRect(0.0f, 0.0f, bitmap->width(), bitmap->height(), texture, paint);
- }
- restore();
-
- return DrawGlInfo::kStatusDrew;
-}
-
-status_t OpenGLRenderer::drawBitmapData(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint) {
- const float right = left + bitmap->width();
- const float bottom = top + bitmap->height();
-
- if (quickRejectSetupScissor(left, top, right, bottom)) {
+status_t OpenGLRenderer::drawBitmapData(const SkBitmap* bitmap, const SkPaint* paint) {
+ if (quickRejectSetupScissor(0, 0, bitmap->width(), bitmap->height())) {
return DrawGlInfo::kStatusDone;
}
@@ -2100,9 +2063,9 @@
const AutoTexture autoCleanup(texture);
if (CC_UNLIKELY(bitmap->colorType() == kAlpha_8_SkColorType)) {
- drawAlphaBitmap(texture, left, top, paint);
+ drawAlphaBitmap(texture, 0, 0, paint);
} else {
- drawTextureRect(left, top, right, bottom, texture, paint);
+ drawTextureRect(0, 0, bitmap->width(), bitmap->height(), texture, paint);
}
return DrawGlInfo::kStatusDrew;
diff --git a/libs/hwui/OpenGLRenderer.h b/libs/hwui/OpenGLRenderer.h
index 3bc591f..fd228db 100755
--- a/libs/hwui/OpenGLRenderer.h
+++ b/libs/hwui/OpenGLRenderer.h
@@ -161,17 +161,13 @@
virtual status_t drawRenderNode(RenderNode* displayList, Rect& dirty, int32_t replayFlags = 1);
virtual status_t drawLayer(Layer* layer, float x, float y);
- virtual status_t drawBitmap(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint);
+ virtual status_t drawBitmap(const SkBitmap* bitmap, const SkPaint* paint);
status_t drawBitmaps(const SkBitmap* bitmap, AssetAtlas::Entry* entry, int bitmapCount,
TextureVertex* vertices, bool pureTranslate, const Rect& bounds, const SkPaint* paint);
- virtual status_t drawBitmap(const SkBitmap* bitmap, const SkMatrix& matrix,
- const SkPaint* paint);
virtual status_t drawBitmap(const SkBitmap* bitmap, float srcLeft, float srcTop,
float srcRight, float srcBottom, float dstLeft, float dstTop,
float dstRight, float dstBottom, const SkPaint* paint);
- virtual status_t drawBitmapData(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint);
+ virtual status_t drawBitmapData(const SkBitmap* bitmap, const SkPaint* paint);
virtual status_t drawBitmapMesh(const SkBitmap* bitmap, int meshWidth, int meshHeight,
const float* vertices, const int* colors, const SkPaint* paint);
status_t drawPatches(const SkBitmap* bitmap, AssetAtlas::Entry* entry,
diff --git a/libs/hwui/Renderer.h b/libs/hwui/Renderer.h
index 40a21e4..f5cd266 100644
--- a/libs/hwui/Renderer.h
+++ b/libs/hwui/Renderer.h
@@ -160,15 +160,11 @@
virtual status_t drawColor(int color, SkXfermode::Mode mode) = 0;
// Bitmap-based
- virtual status_t drawBitmap(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint) = 0;
- virtual status_t drawBitmap(const SkBitmap* bitmap, const SkMatrix& matrix,
- const SkPaint* paint) = 0;
+ virtual status_t drawBitmap(const SkBitmap* bitmap, const SkPaint* paint) = 0;
virtual status_t drawBitmap(const SkBitmap* bitmap, float srcLeft, float srcTop,
float srcRight, float srcBottom, float dstLeft, float dstTop,
float dstRight, float dstBottom, const SkPaint* paint) = 0;
- virtual status_t drawBitmapData(const SkBitmap* bitmap, float left, float top,
- const SkPaint* paint) = 0;
+ virtual status_t drawBitmapData(const SkBitmap* bitmap, const SkPaint* paint) = 0;
virtual status_t drawBitmapMesh(const SkBitmap* bitmap, int meshWidth, int meshHeight,
const float* vertices, const int* colors, const SkPaint* paint) = 0;
virtual status_t drawPatch(const SkBitmap* bitmap, const Res_png_9patch* patch,
diff --git a/libs/hwui/renderthread/RenderProxy.cpp b/libs/hwui/renderthread/RenderProxy.cpp
index 3f03093..986e808 100644
--- a/libs/hwui/renderthread/RenderProxy.cpp
+++ b/libs/hwui/renderthread/RenderProxy.cpp
@@ -336,11 +336,14 @@
}
void RenderProxy::trimMemory(int level) {
- RenderThread& thread = RenderThread::getInstance();
- SETUP_TASK(timMemory);
- args->thread = &thread;
- args->level = level;
- thread.queue(task);
+ // Avoid creating a RenderThread to do a trimMemory.
+ if (RenderThread::hasInstance()) {
+ RenderThread& thread = RenderThread::getInstance();
+ SETUP_TASK(timMemory);
+ args->thread = &thread;
+ args->level = level;
+ thread.queue(task);
+ }
}
CREATE_BRIDGE0(fence) {
diff --git a/media/java/android/media/browse/MediaBrowser.java b/media/java/android/media/browse/MediaBrowser.java
index 858383e..1c6d81f 100644
--- a/media/java/android/media/browse/MediaBrowser.java
+++ b/media/java/android/media/browse/MediaBrowser.java
@@ -225,6 +225,17 @@
}
/**
+ * Gets the service component that the media browser is connected to.
+ */
+ public @NonNull ComponentName getServiceComponent() {
+ if (!isConnected()) {
+ throw new IllegalStateException("getServiceComponent() called while not connected" +
+ " (state=" + mState + ")");
+ }
+ return mServiceComponent;
+ }
+
+ /**
* Gets the root Uri.
* <p>
* Note that the root uri may become invalid or change when when the
@@ -234,7 +245,7 @@
* @throws IllegalStateException if not connected.
*/
public @NonNull Uri getRoot() {
- if (mState != CONNECT_STATE_CONNECTED) {
+ if (!isConnected()) {
throw new IllegalStateException("getSessionToken() called while not connected (state="
+ getStateLabel(mState) + ")");
}
@@ -247,7 +258,7 @@
* @throws IllegalStateException if not connected.
*/
public @Nullable Bundle getExtras() {
- if (mState != CONNECT_STATE_CONNECTED) {
+ if (!isConnected()) {
throw new IllegalStateException("getExtras() called while not connected (state="
+ getStateLabel(mState) + ")");
}
@@ -266,7 +277,7 @@
* @throws IllegalStateException if not connected.
*/
public @NonNull MediaSession.Token getSessionToken() {
- if (mState != CONNECT_STATE_CONNECTED) {
+ if (!isConnected()) {
throw new IllegalStateException("getSessionToken() called while not connected (state="
+ mState + ")");
}
diff --git a/media/java/android/media/session/MediaController.java b/media/java/android/media/session/MediaController.java
index 382579c..e3c198e 100644
--- a/media/java/android/media/session/MediaController.java
+++ b/media/java/android/media/session/MediaController.java
@@ -196,6 +196,30 @@
}
/**
+ * Get the queue title for this session.
+ */
+ public @Nullable CharSequence getQueueTitle() {
+ try {
+ return mSessionBinder.getQueueTitle();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getQueueTitle", e);
+ }
+ return null;
+ }
+
+ /**
+ * Get the extras for this session.
+ */
+ public @Nullable Bundle getExtras() {
+ try {
+ return mSessionBinder.getExtras();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getExtras", e);
+ }
+ return null;
+ }
+
+ /**
* Get the rating type supported by the session. One of:
* <ul>
* <li>{@link Rating#RATING_NONE}</li>
diff --git a/telecomm/java/android/telecomm/ConnectionService.java b/telecomm/java/android/telecomm/ConnectionService.java
index 53b304a..fddc9b0 100644
--- a/telecomm/java/android/telecomm/ConnectionService.java
+++ b/telecomm/java/android/telecomm/ConnectionService.java
@@ -880,6 +880,11 @@
return sNullConnection;
}
+ /**
+ * Abstraction for a class which provides video call functionality. This class contains no base
+ * implementation for its methods. It is expected that subclasses will override these
+ * functions to provide the desired behavior if it is supported.
+ */
public static abstract class VideoCallProvider {
private static final int MSG_SET_VIDEO_CALL_LISTENER = 1;
private static final int MSG_SET_CAMERA = 2;
@@ -1012,38 +1017,48 @@
*
* @param cameraId The id of the camera.
*/
- public abstract void onSetCamera(String cameraId);
+ public void onSetCamera(String cameraId) {
+ // To be implemented by subclass.
+ }
/**
* Sets the surface to be used for displaying a preview of what the user's camera is
- * currently capturing. When video transmission is enabled, this is the video signal which is
- * sent to the remote device.
+ * currently capturing. When video transmission is enabled, this is the video signal which
+ * is sent to the remote device.
*
* @param surface The surface.
*/
- public abstract void onSetPreviewSurface(Surface surface);
+ public void onSetPreviewSurface(Surface surface) {
+ // To be implemented by subclass.
+ }
/**
* Sets the surface to be used for displaying the video received from the remote device.
*
* @param surface The surface.
*/
- public abstract void onSetDisplaySurface(Surface surface);
+ public void onSetDisplaySurface(Surface surface) {
+ // To be implemented by subclass.
+ }
/**
- * Sets the device orientation, in degrees. Assumes that a standard portrait orientation of the
- * device is 0 degrees.
+ * Sets the device orientation, in degrees. Assumes that a standard portrait orientation of
+ * the device is 0 degrees.
*
* @param rotation The device orientation, in degrees.
*/
- public abstract void onSetDeviceOrientation(int rotation);
+ public void onSetDeviceOrientation(int rotation) {
+ // To be implemented by subclass.
+ }
/**
* Sets camera zoom ratio.
*
* @param value The camera zoom ratio.
*/
- public abstract void onSetZoom(float value);
+ public void onSetZoom(float value) {
+ // To be implemented by subclass.
+ }
/**
* Issues a request to modify the properties of the current session. The request is sent to
@@ -1054,7 +1069,9 @@
*
* @param requestProfile The requested call video properties.
*/
- public abstract void onSendSessionModifyRequest(VideoCallProfile requestProfile);
+ public void onSendSessionModifyRequest(VideoCallProfile requestProfile) {
+ // To be implemented by subclass.
+ }
/**te
* Provides a response to a request to change the current call session video
@@ -1066,21 +1083,27 @@
*
* @param responseProfile The response call video properties.
*/
- public abstract void onSendSessionModifyResponse(VideoCallProfile responseProfile);
+ public void onSendSessionModifyResponse(VideoCallProfile responseProfile) {
+ // To be implemented by subclass.
+ }
/**
* Issues a request to the video provider to retrieve the camera capabilities.
* Camera capabilities are reported back to the caller via
* {@link InCallService.VideoCall.Listener#onCameraCapabilitiesChanged(CallCameraCapabilities)}.
*/
- public abstract void onRequestCameraCapabilities();
+ public void onRequestCameraCapabilities() {
+ // To be implemented by subclass.
+ }
/**
* Issues a request to the video telephony framework to retrieve the cumulative data usage for
* the current call. Data usage is reported back to the caller via
* {@link InCallService.VideoCall.Listener#onCallDataUsageChanged}.
*/
- public abstract void onRequestCallDataUsage();
+ public void onRequestCallDataUsage() {
+ // To be implemented by subclass.
+ }
/**
* Provides the video telephony framework with the URI of an image to be displayed to remote
@@ -1088,7 +1111,9 @@
*
* @param uri URI of image to display.
*/
- public abstract void onSetPauseImage(String uri);
+ public void onSetPauseImage(String uri) {
+ // To be implemented by subclass.
+ }
/**
* Invokes callback method defined in {@link InCallService.VideoCall.Listener}.