am 92ea38bd: VideoEncoderDecoderTest: use getImage() for YUV buffers

* commit '92ea38bde4d8c216c522a2c38c127eb3626e885d':
  VideoEncoderDecoderTest: use getImage() for YUV buffers
diff --git a/suite/cts/deviceTests/videoperf/Android.mk b/suite/cts/deviceTests/videoperf/Android.mk
index cd82dde..a393683 100644
--- a/suite/cts/deviceTests/videoperf/Android.mk
+++ b/suite/cts/deviceTests/videoperf/Android.mk
@@ -17,8 +17,15 @@
 
 # don't include this package in any target
 LOCAL_MODULE_TAGS := optional
+# and when built explicitly put it in the data partition
+LOCAL_MODULE_PATH := $(TARGET_OUT_DATA_APPS)
 
-LOCAL_STATIC_JAVA_LIBRARIES := ctsdeviceutil ctstestrunner
+# include both the 32 and 64 bit versions
+LOCAL_MULTILIB := both
+
+LOCAL_STATIC_JAVA_LIBRARIES := ctsmediautil ctsdeviceutil ctstestrunner
+
+LOCAL_JNI_SHARED_LIBRARIES := libctsmediacodec_jni
 
 LOCAL_SRC_FILES := $(call all-java-files-under, src)
 
diff --git a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
index b7d1d27..6459c86 100644
--- a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
+++ b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/CodecInfo.java
@@ -66,7 +66,12 @@
                 break;
             }
         }
-        VideoCapabilities vidCap = cap.getVideoCapabilities();
+
+        if (cap.colorFormats.length == 0) {
+            Log.w(TAG, "no supported color format");
+            return null;
+        }
+
         CodecInfo info = new CodecInfo();
         for (int color : cap.colorFormats) {
             if (color == CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
@@ -77,12 +82,8 @@
             }
         }
         printIntArray("supported colors", cap.colorFormats);
-        //  either YUV420 planar or semiplanar should be supported
-        if (!info.mSupportPlanar && !info.mSupportSemiPlanar) {
-            Log.i(TAG, "no supported color format");
-            return null;
-        }
 
+        VideoCapabilities vidCap = cap.getVideoCapabilities();
         if (mimeType.equals(VIDEO_AVC)) {
             info.mFps = vidCap.getSupportedFrameRatesFor(w, h).getUpper().intValue();
             info.mBitRate = vidCap.getBitrateRange().getUpper();
diff --git a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
index aacb7a5..bf02d9c 100644
--- a/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
+++ b/suite/cts/deviceTests/videoperf/src/com/android/cts/videoperf/VideoEncoderDecoderTest.java
@@ -16,7 +16,12 @@
 
 package com.android.cts.videoperf;
 
+import android.graphics.ImageFormat;
 import android.graphics.Point;
+import android.media.cts.CodecImage;
+import android.media.cts.CodecUtils;
+import android.media.Image;
+import android.media.Image.Plane;
 import android.media.MediaCodec;
 import android.media.MediaCodecList;
 import android.media.MediaCodecInfo.CodecCapabilities;
@@ -60,12 +65,10 @@
     private static final int Y_CLAMP_MIN = 16;
     private static final int Y_CLAMP_MAX = 235;
     private static final int YUV_PLANE_ADDITIONAL_LENGTH = 200;
-    private ByteBuffer mYBuffer;
-    private ByteBuffer mUVBuffer;
-    // if input raw data is semi-planar
-    private boolean mSrcSemiPlanar;
-    // if output raw data is semi-planar
-    private boolean mDstSemiPlanar;
+    private ByteBuffer mYBuffer, mYDirectBuffer;
+    private ByteBuffer mUVBuffer, mUVDirectBuffer;
+    private int mSrcColorFormat;
+    private int mDstColorFormat;
     private int mBufferWidth;
     private int mBufferHeight;
     private int mVideoWidth;
@@ -95,6 +98,8 @@
         mEncodedOutputBuffer = null;
         mYBuffer = null;
         mUVBuffer = null;
+        mYDirectBuffer = null;
+        mUVDirectBuffer = null;
         mRandom = null;
         super.tearDown();
     }
@@ -124,6 +129,33 @@
         doTest(VIDEO_AVC, 1920, 1072, NUMBER_OF_REPEAT);
     }
 
+    private boolean isSrcSemiPlanar() {
+        return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
+    }
+
+    private boolean isSrcFlexYUV() {
+        return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
+    }
+
+    private boolean isDstSemiPlanar() {
+        return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
+    }
+
+    private boolean isDstFlexYUV() {
+        return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
+    }
+
+    private static int getColorFormat(CodecInfo info) {
+        if (info.mSupportSemiPlanar) {
+            return CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
+        } else if (info.mSupportPlanar) {
+            return CodecCapabilities.COLOR_FormatYUV420Planar;
+        } else {
+            // FlexYUV must be supported
+            return CodecCapabilities.COLOR_FormatYUV420Flexible;
+        }
+    }
+
     /**
      * Run encoding / decoding test for given mimeType of codec
      * @param mimeType like video/avc
@@ -141,8 +173,14 @@
         assertNotNull(infoDec);
         mVideoWidth = w;
         mVideoHeight = h;
-        initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH,
-                infoEnc.mSupportSemiPlanar, infoDec.mSupportSemiPlanar);
+
+        mSrcColorFormat = getColorFormat(infoEnc);
+        mDstColorFormat = getColorFormat(infoDec);
+        Log.i(TAG, "Testing video resolution " + w + "x" + h +
+                   ": enc format " + mSrcColorFormat +
+                   ", dec format " + mDstColorFormat);
+
+        initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH);
         double[] encoderFpsResults = new double[numberRepeat];
         double[] decoderFpsResults = new double[numberRepeat];
         double[] totalFpsResults = new double[numberRepeat];
@@ -154,9 +192,7 @@
             format.setInteger(MediaFormat.KEY_BIT_RATE, infoEnc.mBitRate);
             format.setInteger(MediaFormat.KEY_WIDTH, w);
             format.setInteger(MediaFormat.KEY_HEIGHT, h);
-            format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
-                    infoEnc.mSupportSemiPlanar ? CodecCapabilities.COLOR_FormatYUV420SemiPlanar :
-                        CodecCapabilities.COLOR_FormatYUV420Planar);
+            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mSrcColorFormat);
             format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps);
             mFrameRate = infoEnc.mFps;
             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL);
@@ -166,9 +202,7 @@
             format.setString(MediaFormat.KEY_MIME, mimeType);
             format.setInteger(MediaFormat.KEY_WIDTH, w);
             format.setInteger(MediaFormat.KEY_HEIGHT, h);
-            format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
-                    infoDec.mSupportSemiPlanar ? CodecCapabilities.COLOR_FormatYUV420SemiPlanar :
-                        CodecCapabilities.COLOR_FormatYUV420Planar);
+            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDstColorFormat);
             double[] decoderResult = runDecoder(VIDEO_AVC, format);
             if (decoderResult == null) {
                 success = false;
@@ -228,7 +262,6 @@
             return Double.NaN;
         }
         codec.start();
-        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
         ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
 
         int numBytesSubmitted = 0;
@@ -241,10 +274,24 @@
             if (inFramesCount < totalFrames) {
                 index = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
                 if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
-                    int size = queueInputBufferEncoder(
-                            codec, codecInputBuffers, index, inFramesCount,
-                            (inFramesCount == (totalFrames - 1)) ?
-                                    MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+                    int size;
+                    // when encoder only supports flexYUV, use Image only; otherwise,
+                    // use ByteBuffer & Image each on half of the frames to test both
+                    if (isSrcFlexYUV() || inFramesCount % 2 == 0) {
+                        Image image = codec.getInputImage(index);
+                        // image should always be available
+                        assertTrue(image != null);
+                        size = queueInputImageEncoder(
+                                codec, image, index, inFramesCount,
+                                (inFramesCount == (totalFrames - 1)) ?
+                                        MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+                    } else {
+                        ByteBuffer buffer = codec.getInputBuffer(index);
+                        size = queueInputBufferEncoder(
+                                codec, buffer, index, inFramesCount,
+                                (inFramesCount == (totalFrames - 1)) ?
+                                        MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+                    }
                     inFramesCount++;
                     numBytesSubmitted += size;
                     if (VERBOSE) {
@@ -290,8 +337,7 @@
      * @return size of enqueued data.
      */
     private int queueInputBufferEncoder(
-            MediaCodec codec, ByteBuffer[] inputBuffers, int index, int frameCount, int flags) {
-        ByteBuffer buffer = inputBuffers[index];
+            MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags) {
         buffer.clear();
 
         Point origin = getOrigin(frameCount);
@@ -302,7 +348,7 @@
             buffer.put(yBuffer, srcOffsetY, mVideoWidth);
             srcOffsetY += mBufferWidth;
         }
-        if (mSrcSemiPlanar) {
+        if (isSrcSemiPlanar()) {
             int srcOffsetU = origin.y / 2 * mBufferWidth + origin.x / 2 * 2;
             final byte[] uvBuffer = mUVBuffer.array();
             for (int i = 0; i < mVideoHeight / 2; i++) {
@@ -313,16 +359,161 @@
             int srcOffsetU = origin.y / 2 * mBufferWidth / 2 + origin.x / 2;
             int srcOffsetV = srcOffsetU + mBufferWidth / 2 * mBufferHeight / 2;
             final byte[] uvBuffer = mUVBuffer.array();
-            for (int i = 0; i < mVideoHeight /2; i++) { //U only
+            for (int i = 0; i < mVideoHeight / 2; i++) { //U only
                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth / 2);
                 srcOffsetU += mBufferWidth / 2;
             }
-            for (int i = 0; i < mVideoHeight /2; i++) { //V only
+            for (int i = 0; i < mVideoHeight / 2; i++) { //V only
                 buffer.put(uvBuffer, srcOffsetV, mVideoWidth / 2);
                 srcOffsetV += mBufferWidth / 2;
             }
         }
-        int size = mVideoHeight * mVideoWidth * 3 /2;
+        int size = mVideoHeight * mVideoWidth * 3 / 2;
+        long ptsUsec = computePresentationTime(frameCount);
+
+        codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
+        if (VERBOSE && (frameCount == 0)) {
+            printByteArray("Y ", mYBuffer.array(), 0, 20);
+            printByteArray("UV ", mUVBuffer.array(), 0, 20);
+            printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
+        }
+        return size;
+    }
+
+    class YUVImage extends CodecImage {
+        private final int mImageWidth;
+        private final int mImageHeight;
+        private final Plane[] mPlanes;
+
+        YUVImage(
+                Point origin,
+                int imageWidth, int imageHeight,
+                int arrayWidth, int arrayHeight,
+                boolean semiPlanar,
+                ByteBuffer bufferY, ByteBuffer bufferUV) {
+            mImageWidth = imageWidth;
+            mImageHeight = imageHeight;
+            ByteBuffer dupY = bufferY.duplicate();
+            ByteBuffer dupUV = bufferUV.duplicate();
+            mPlanes = new Plane[3];
+
+            int srcOffsetY = origin.x + origin.y * arrayWidth;
+
+            mPlanes[0] = new YUVPlane(
+                        mImageWidth, mImageHeight, arrayWidth, 1,
+                        dupY, srcOffsetY);
+
+            if (semiPlanar) {
+                int srcOffsetUV = origin.y / 2 * arrayWidth + origin.x / 2 * 2;
+
+                mPlanes[1] = new YUVPlane(
+                        mImageWidth / 2, mImageHeight / 2, arrayWidth, 2,
+                        dupUV, srcOffsetUV);
+                mPlanes[2] = new YUVPlane(
+                        mImageWidth / 2, mImageHeight / 2, arrayWidth, 2,
+                        dupUV, srcOffsetUV + 1);
+            } else {
+                int srcOffsetU = origin.y / 2 * arrayWidth / 2 + origin.x / 2;
+                int srcOffsetV = srcOffsetU + arrayWidth / 2 * arrayHeight / 2;
+
+                mPlanes[1] = new YUVPlane(
+                        mImageWidth / 2, mImageHeight / 2, arrayWidth / 2, 1,
+                        dupUV, srcOffsetU);
+                mPlanes[2] = new YUVPlane(
+                        mImageWidth / 2, mImageHeight / 2, arrayWidth / 2, 1,
+                        dupUV, srcOffsetV);
+            }
+        }
+
+        @Override
+        public int getFormat() {
+            return ImageFormat.YUV_420_888;
+        }
+
+        @Override
+        public int getWidth() {
+            return mImageWidth;
+        }
+
+        @Override
+        public int getHeight() {
+            return mImageHeight;
+        }
+
+        @Override
+        public long getTimestamp() {
+            return 0;
+        }
+
+        @Override
+        public Plane[] getPlanes() {
+            return mPlanes;
+        }
+
+        @Override
+        public void close() {
+            mPlanes[0] = null;
+            mPlanes[1] = null;
+            mPlanes[2] = null;
+        }
+
+        class YUVPlane extends CodecImage.Plane {
+            private final int mRowStride;
+            private final int mPixelStride;
+            private final ByteBuffer mByteBuffer;
+
+            YUVPlane(int w, int h, int rowStride, int pixelStride,
+                    ByteBuffer buffer, int offset) {
+                mRowStride = rowStride;
+                mPixelStride = pixelStride;
+
+                // only safe to access length bytes starting from buffer[offset]
+                int length = (h - 1) * rowStride + (w - 1) * pixelStride + 1;
+
+                buffer.position(offset);
+                mByteBuffer = buffer.slice();
+                mByteBuffer.limit(length);
+            }
+
+            @Override
+            public int getRowStride() {
+                return mRowStride;
+            }
+
+            @Override
+            public int getPixelStride() {
+                return mPixelStride;
+            }
+
+            @Override
+            public ByteBuffer getBuffer() {
+                return mByteBuffer;
+            }
+        }
+    }
+
+    /**
+     * Fills input image for encoder from YUV buffers.
+     * @return size of enqueued data.
+     */
+    private int queueInputImageEncoder(
+            MediaCodec codec, Image image, int index, int frameCount, int flags) {
+        assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
+
+
+        Point origin = getOrigin(frameCount);
+
+        // Y color first
+        CodecImage srcImage = new YUVImage(
+                origin,
+                mVideoWidth, mVideoHeight,
+                mBufferWidth, mBufferHeight,
+                isSrcSemiPlanar(),
+                mYDirectBuffer, mUVDirectBuffer);
+
+        CodecUtils.copyFlexYUVImage(image, srcImage);
+
+        int size = mVideoHeight * mVideoWidth * 3 / 2;
         long ptsUsec = computePresentationTime(frameCount);
 
         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
@@ -368,7 +559,6 @@
         codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
         codec.start();
         ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
-        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
 
         double totalErrorSquared = 0;
 
@@ -407,22 +597,49 @@
 
                 // only do YUV compare on EOS frame if the buffer size is none-zero
                 if (info.size > 0) {
-                    ByteBuffer buf = codecOutputBuffers[outputBufIndex];
-                    if (VERBOSE && (outFrameCount == 0)) {
-                        printByteBuffer("Y ", buf, 0, 20);
-                        printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
-                        printByteBuffer("UV ", buf,
-                                mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
-                    }
                     Point origin = getOrigin(outFrameCount);
-                    for (int i = 0; i < PIXEL_CHECK_PER_FRAME; i++) {
+                    int i;
+
+                    // if decoder supports planar or semiplanar, check output with
+                    // ByteBuffer & Image each on half of the points
+                    int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
+                    if (!isDstFlexYUV()) {
+                        pixelCheckPerFrame /= 2;
+                        ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
+                        if (VERBOSE && (outFrameCount == 0)) {
+                            printByteBuffer("Y ", buf, 0, 20);
+                            printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
+                            printByteBuffer("UV ", buf,
+                                    mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
+                        }
+                        for (i = 0; i < pixelCheckPerFrame; i++) {
+                            int w = mRandom.nextInt(mVideoWidth);
+                            int h = mRandom.nextInt(mVideoHeight);
+                            getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
+                            getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+                            if (VERBOSE) {
+                                Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
+                                        + " expected "
+                                        + expected.mY + "," + expected.mU + "," + expected.mV
+                                        + " decoded "
+                                        + decoded.mY + "," + decoded.mU + "," + decoded.mV);
+                            }
+                            totalErrorSquared += expected.calcErrorSquared(decoded);
+                        }
+                    }
+
+                    Image image = codec.getOutputImage(outputBufIndex);
+                    assertTrue(image != null);
+                    for (i = 0; i < pixelCheckPerFrame; i++) {
                         int w = mRandom.nextInt(mVideoWidth);
                         int h = mRandom.nextInt(mVideoHeight);
                         getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
-                        getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+                        getPixelValuesFromImage(image, w, h, decoded);
                         if (VERBOSE) {
-                            Log.i(TAG, outFrameCount + "-" + i + "- th round expcted " + expected.mY
-                                    + "," + expected.mU + "," + expected.mV + "  decoded "
+                            Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
+                                    + " expcted "
+                                    + expected.mY + "," + expected.mU + "," + expected.mV
+                                    + " decoded "
                                     + decoded.mY + "," + decoded.mU + "," + decoded.mV);
                         }
                         totalErrorSquared += expected.calcErrorSquared(decoded);
@@ -434,23 +651,17 @@
                     Log.d(TAG, "saw output EOS.");
                     sawOutputEOS = true;
                 }
-            } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
-                codecOutputBuffers = codec.getOutputBuffers();
-                Log.d(TAG, "output buffers have changed.");
             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                 MediaFormat oformat = codec.getOutputFormat();
                 Log.d(TAG, "output format has changed to " + oformat);
                 int colorFormat = oformat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
-                if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar ) {
-                    mDstSemiPlanar = true;
-                } else if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar ) {
-                    mDstSemiPlanar = false;
+                if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar
+                        || colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
+                    mDstColorFormat = colorFormat;
                 } else {
+                    mDstColorFormat = CodecCapabilities.COLOR_FormatYUV420Flexible;
                     Log.w(TAG, "output format changed to unsupported one " +
-                            Integer.toHexString(colorFormat));
-                    // give up and return as nothing can be done
-                    codec.release();
-                    return null;
+                            Integer.toHexString(colorFormat) + ", using FlexYUV");
                 }
             }
         }
@@ -491,12 +702,12 @@
      * @param semiPlanarEnc
      * @param semiPlanarDec
      */
-    private void initYUVPlane(int w, int h, boolean semiPlanarEnc, boolean semiPlanarDec) {
+    private void initYUVPlane(int w, int h) {
         int bufferSizeY = w * h;
         mYBuffer = ByteBuffer.allocate(bufferSizeY);
         mUVBuffer = ByteBuffer.allocate(bufferSizeY / 2);
-        mSrcSemiPlanar = semiPlanarEnc;
-        mDstSemiPlanar = semiPlanarDec;
+        mYDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY);
+        mUVDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY / 2);
         mBufferWidth = w;
         mBufferHeight = h;
         final byte[] yArray = mYBuffer.array();
@@ -506,7 +717,7 @@
                 yArray[i * w + j]  = clampY((i + j) & 0xff);
             }
         }
-        if (semiPlanarEnc) {
+        if (isSrcSemiPlanar()) {
             for (int i = 0; i < h/2; i++) {
                 for (int j = 0; j < w/2; j++) {
                     uvArray[i * w + 2 * j]  = (byte) (i & 0xff);
@@ -522,6 +733,10 @@
                 }
             }
         }
+        mYDirectBuffer.put(yArray);
+        mUVDirectBuffer.put(uvArray);
+        mYDirectBuffer.rewind();
+        mUVDirectBuffer.rewind();
     }
 
     /**
@@ -556,7 +771,7 @@
     private void getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y,
             YUVValue result) {
         result.mY = mYBuffer.get((originY + y) * mBufferWidth + (originX + x));
-        if (mSrcSemiPlanar) {
+        if (isSrcSemiPlanar()) {
             int index = (originY + y) / 2 * mBufferWidth + (originX + x) / 2 * 2;
             //Log.d(TAG, "YUV " + originX + "," + originY + "," + x + "," + y + "," + index);
             result.mU = mUVBuffer.get(index);
@@ -577,7 +792,7 @@
      */
     private void getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result) {
         result.mY = buffer.get(y * mVideoWidth + x);
-        if (mDstSemiPlanar) {
+        if (isDstSemiPlanar()) {
             int index = mVideoWidth * mVideoHeight + y / 2 * mVideoWidth + x / 2 * 2;
             //Log.d(TAG, "Decoded " + x + "," + y + "," + index);
             result.mU = buffer.get(index);
@@ -590,6 +805,22 @@
         }
     }
 
+    private void getPixelValuesFromImage(Image image, int x, int y, YUVValue result) {
+        assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
+
+        Plane[] planes = image.getPlanes();
+        assertTrue(planes.length == 3);
+
+        result.mY = getPixelFromPlane(planes[0], x, y);
+        result.mU = getPixelFromPlane(planes[1], x / 2, y / 2);
+        result.mV = getPixelFromPlane(planes[2], x / 2, y / 2);
+    }
+
+    private byte getPixelFromPlane(Plane plane, int x, int y) {
+        ByteBuffer buf = plane.getBuffer();
+        return buf.get(y * plane.getRowStride() + x * plane.getPixelStride());
+    }
+
     /**
      * Y cannot have full range. clamp it to prevent invalid value.
      */
diff --git a/tests/tests/media/Android.mk b/tests/tests/media/Android.mk
index 15237a8..77d4bb7 100644
--- a/tests/tests/media/Android.mk
+++ b/tests/tests/media/Android.mk
@@ -16,6 +16,20 @@
 
 include $(CLEAR_VARS)
 
+LOCAL_SRC_FILES := \
+	src/android/media/cts/CodecImage.java \
+	src/android/media/cts/CodecUtils.java
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_MODULE := ctsmediautil
+
+LOCAL_SDK_VERSION := current
+
+include $(BUILD_STATIC_JAVA_LIBRARY)
+
+include $(CLEAR_VARS)
+
 # don't include this package in any target
 LOCAL_MODULE_TAGS := optional
 # and when built explicitly put it in the data partition
@@ -24,7 +38,8 @@
 # include both the 32 and 64 bit versions
 LOCAL_MULTILIB := both
 
-LOCAL_STATIC_JAVA_LIBRARIES := ctsdeviceutil ctstestserver ctstestrunner
+LOCAL_STATIC_JAVA_LIBRARIES := \
+    ctsmediautil ctsdeviceutil ctstestserver ctstestrunner
 
 LOCAL_JNI_SHARED_LIBRARIES := libctsmediacodec_jni
 
diff --git a/tests/tests/media/libmediandkjni/Android.mk b/tests/tests/media/libmediandkjni/Android.mk
index 59ff7bb..23f9f5c 100644
--- a/tests/tests/media/libmediandkjni/Android.mk
+++ b/tests/tests/media/libmediandkjni/Android.mk
@@ -24,7 +24,9 @@
 	native-media-jni.cpp \
 	codec-utils-jni.cpp
 
-LOCAL_C_INCLUDES := $(JNI_H_INCLUDE)
+LOCAL_C_INCLUDES := \
+	$(JNI_H_INCLUDE) \
+	system/core/include
 
 LOCAL_C_INCLUDES += $(call include-path-for, mediandk)
 
diff --git a/tests/tests/media/libmediandkjni/codec-utils-jni.cpp b/tests/tests/media/libmediandkjni/codec-utils-jni.cpp
index f99f1c8..f7a08a1 100644
--- a/tests/tests/media/libmediandkjni/codec-utils-jni.cpp
+++ b/tests/tests/media/libmediandkjni/codec-utils-jni.cpp
@@ -16,8 +16,10 @@
 
 /* Original code copied from NDK Native-media sample code */
 
-#undef NDEBUG
 //#define LOG_NDEBUG 0
+#define TAG "CodecUtilsJNI"
+#include <log/log.h>
+
 #include <stdint.h>
 #include <sys/types.h>
 #include <jni.h>
@@ -27,20 +29,6 @@
 
 typedef ssize_t offs_t;
 
-// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
-#include <android/log.h>
-#define TAG "CodecUtilsJNI"
-#define __ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
-#if LOG_NDEBUG
-#define ALOGV(...) do { if (0) { __ALOGV(__VA_ARGS__); } } while (0)
-#else
-#define ALOGV(...) __ALOGV(__VA_ARGS__)
-#endif
-#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
-#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
-#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
-#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
-
 struct NativeImage {
     struct crop {
         int left;
@@ -132,13 +120,13 @@
     }
 
     {   // Image
-        jclass imageClazz = env->FindClass("android/media/Image");
+        jclass imageClazz = env->FindClass("android/media/cts/CodecImage");
         gFields.methodWidth  = env->GetMethodID(imageClazz, "getWidth", "()I");
         gFields.methodHeight = env->GetMethodID(imageClazz, "getHeight", "()I");
         gFields.methodFormat = env->GetMethodID(imageClazz, "getFormat", "()I");
         gFields.methodTimestamp = env->GetMethodID(imageClazz, "getTimestamp", "()J");
         gFields.methodPlanes = env->GetMethodID(
-                imageClazz, "getPlanes", "()[Landroid/media/Image$Plane;");
+                imageClazz, "getPlanes", "()[Landroid/media/cts/CodecImage$Plane;");
         gFields.methodCrop   = env->GetMethodID(
                 imageClazz, "getCropRect", "()Landroid/graphics/Rect;");
         env->DeleteLocalRef(imageClazz);
@@ -146,7 +134,7 @@
     }
 
     {   // Image.Plane
-        jclass planeClazz = env->FindClass("android/media/Image$Plane");
+        jclass planeClazz = env->FindClass("android/media/cts/CodecImage$Plane");
         gFields.methodBuffer = env->GetMethodID(planeClazz, "getBuffer", "()Ljava/nio/ByteBuffer;");
         gFields.methodPixelStride = env->GetMethodID(planeClazz, "getPixelStride", "()I");
         gFields.methodRowStride = env->GetMethodID(planeClazz, "getRowStride", "()I");
diff --git a/tests/tests/media/libmediandkjni/native-media-jni.cpp b/tests/tests/media/libmediandkjni/native-media-jni.cpp
index cc41e10..2624c25 100644
--- a/tests/tests/media/libmediandkjni/native-media-jni.cpp
+++ b/tests/tests/media/libmediandkjni/native-media-jni.cpp
@@ -16,7 +16,10 @@
 
 /* Original code copied from NDK Native-media sample code */
 
-#undef NDEBUG
+//#define LOG_NDEBUG 0
+#define TAG "NativeMedia"
+#include <log/log.h>
+
 #include <assert.h>
 #include <jni.h>
 #include <pthread.h>
@@ -27,13 +30,6 @@
 
 #include <android/native_window_jni.h>
 
-// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
-#include <android/log.h>
-#define TAG "NativeMedia"
-#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
-#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
-#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
-
 #include "ndk/NdkMediaExtractor.h"
 #include "ndk/NdkMediaCodec.h"
 #include "ndk/NdkMediaCrypto.h"
diff --git a/tests/tests/media/src/android/media/cts/CodecImage.java b/tests/tests/media/src/android/media/cts/CodecImage.java
new file mode 100644
index 0000000..60a644a
--- /dev/null
+++ b/tests/tests/media/src/android/media/cts/CodecImage.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.cts;
+
+import java.nio.ByteBuffer;
+import java.lang.AutoCloseable;
+
+import android.graphics.Rect;
+
+/**
+ * <p>A single complete image buffer to use with a media source such as a
+ * {@link MediaCodec} or a
+ * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p>
+ *
+ * <p>This class allows for efficient direct application access to the pixel
+ * data of the CodecImage through one or more
+ * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a
+ * {@link Plane} that describes the layout of the pixel data in that plane. Due
+ * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class,
+ * Images are not directly usable as UI resources.</p>
+ *
+ * <p>Since Images are often directly produced or consumed by hardware
+ * components, they are a limited resource shared across the system, and should
+ * be closed as soon as they are no longer needed.</p>
+ *
+ * <p>For example, when using the {@link ImageReader} class to read out Images
+ * from various media sources, not closing old CodecImage objects will prevent the
+ * availability of new Images once
+ * {@link ImageReader#getMaxImages the maximum outstanding image count} is
+ * reached. When this happens, the function acquiring new Images will typically
+ * throw an {@link IllegalStateException}.</p>
+ *
+ * @see ImageReader
+ */
+public abstract class CodecImage implements AutoCloseable {
+    /**
+     * Get the format for this image. This format determines the number of
+     * ByteBuffers needed to represent the image, and the general layout of the
+     * pixel data in each in ByteBuffer.
+     *
+     * <p>
+     * The format is one of the values from
+     * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the
+     * formats and the planes is as follows:
+     * </p>
+     *
+     * <table>
+     * <tr>
+     *   <th>Format</th>
+     *   <th>Plane count</th>
+     *   <th>Layout details</th>
+     * </tr>
+     * <tr>
+     *   <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td>
+     *   <td>1</td>
+     *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
+     *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
+     *   </td>
+     * </tr>
+     * <tr>
+     *   <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td>
+     *   <td>3</td>
+     *   <td>A luminance plane followed by the Cb and Cr chroma planes.
+     *     The chroma planes have half the width and height of the luminance
+     *     plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits.
+     *     Each plane has its own row stride and pixel stride.</td>
+     * </tr>
+     * <tr>
+     *   <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
+     *   <td>1</td>
+     *   <td>A single plane of raw sensor image data, with 16 bits per color
+     *     sample. The details of the layout need to be queried from the source of
+     *     the raw sensor data, such as
+     *     {@link android.hardware.camera2.CameraDevice CameraDevice}.
+     *   </td>
+     * </tr>
+     * </table>
+     *
+     * @see android.graphics.ImageFormat
+     */
+    public abstract int getFormat();
+
+    /**
+     * The width of the image in pixels. For formats where some color channels
+     * are subsampled, this is the width of the largest-resolution plane.
+     */
+    public abstract int getWidth();
+
+    /**
+     * The height of the image in pixels. For formats where some color channels
+     * are subsampled, this is the height of the largest-resolution plane.
+     */
+    public abstract int getHeight();
+
+    /**
+     * Get the timestamp associated with this frame.
+     * <p>
+     * The timestamp is measured in nanoseconds, and is monotonically
+     * increasing. However, the zero point and whether the timestamp can be
+     * compared against other sources of time or images depend on the source of
+     * this image.
+     * </p>
+     */
+    public abstract long getTimestamp();
+
+    private Rect mCropRect;
+
+    /**
+     * Get the crop rectangle associated with this frame.
+     * <p>
+     * The crop rectangle specifies the region of valid pixels in the image,
+     * using coordinates in the largest-resolution plane.
+     */
+    public Rect getCropRect() {
+        if (mCropRect == null) {
+            return new Rect(0, 0, getWidth(), getHeight());
+        } else {
+            return new Rect(mCropRect); // return a copy
+        }
+    }
+
+    /**
+     * Set the crop rectangle associated with this frame.
+     * <p>
+     * The crop rectangle specifies the region of valid pixels in the image,
+     * using coordinates in the largest-resolution plane.
+     */
+    public void setCropRect(Rect cropRect) {
+        if (cropRect != null) {
+            cropRect = new Rect(cropRect);  // make a copy
+            cropRect.intersect(0, 0, getWidth(), getHeight());
+        }
+        mCropRect = cropRect;
+    }
+
+    /**
+     * Get the array of pixel planes for this CodecImage. The number of planes is
+     * determined by the format of the CodecImage.
+     */
+    public abstract Plane[] getPlanes();
+
+    /**
+     * Free up this frame for reuse.
+     * <p>
+     * After calling this method, calling any methods on this {@code CodecImage} will
+     * result in an {@link IllegalStateException}, and attempting to read from
+     * {@link ByteBuffer ByteBuffers} returned by an earlier
+     * {@link Plane#getBuffer} call will have undefined behavior.
+     * </p>
+     */
+    @Override
+    public abstract void close();
+
+    /**
+     * <p>A single color plane of image data.</p>
+     *
+     * <p>The number and meaning of the planes in an CodecImage are determined by the
+     * format of the CodecImage.</p>
+     *
+     * <p>Once the CodecImage has been closed, any access to the the plane's
+     * ByteBuffer will fail.</p>
+     *
+     * @see #getFormat
+     */
+    public static abstract class Plane {
+        /**
+         * <p>The row stride for this color plane, in bytes.</p>
+         *
+         * <p>This is the distance between the start of two consecutive rows of
+         * pixels in the image. The row stride is always greater than 0.</p>
+         */
+        public abstract int getRowStride();
+        /**
+         * <p>The distance between adjacent pixel samples, in bytes.</p>
+         *
+         * <p>This is the distance between two consecutive pixel values in a row
+         * of pixels. It may be larger than the size of a single pixel to
+         * account for interleaved image data or padded formats.
+         * The pixel stride is always greater than 0.</p>
+         */
+        public abstract int getPixelStride();
+        /**
+         * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer}
+         * containing the frame data.</p>
+         *
+         * <p>In particular, the buffer returned will always have
+         * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so
+         * the underlying data could be mapped as a pointer in JNI without doing
+         * any copies with {@code GetDirectBufferAddress}.</p>
+         *
+         * @return the byte buffer containing the image data for this plane.
+         */
+        public abstract ByteBuffer getBuffer();
+    }
+
+}
diff --git a/tests/tests/media/src/android/media/cts/CodecUtils.java b/tests/tests/media/src/android/media/cts/CodecUtils.java
index 3c3576f..df6eb4c 100644
--- a/tests/tests/media/src/android/media/cts/CodecUtils.java
+++ b/tests/tests/media/src/android/media/cts/CodecUtils.java
@@ -16,9 +16,12 @@
 
 package android.media.cts;
 
+import android.media.cts.CodecImage;
 import android.media.Image;
 import android.util.Log;
 
+import java.nio.ByteBuffer;
+
 public class CodecUtils  {
     private static final String TAG = "CodecUtils";
 
@@ -29,7 +32,89 @@
         Log.i(TAG, "after loadlibrary");
     }
 
-    public native static int getImageChecksum(Image image);
-    public native static void copyFlexYUVImage(Image target, Image source);
+    private static class ImageWrapper extends CodecImage {
+        private final Image mImage;
+        private final Plane[] mPlanes;
+
+        private ImageWrapper(Image image) {
+            mImage = image;
+            Image.Plane[] planes = mImage.getPlanes();
+
+            mPlanes = new Plane[planes.length];
+            for (int i = 0; i < planes.length; i++) {
+                mPlanes[i] = new PlaneWrapper(planes[i]);
+            }
+        }
+
+        public static ImageWrapper createFromImage(Image image) {
+            return new ImageWrapper(image);
+        }
+
+        @Override
+        public int getFormat() {
+            return mImage.getFormat();
+        }
+
+        @Override
+        public int getWidth() {
+            return mImage.getWidth();
+        }
+
+        @Override
+        public int getHeight() {
+            return mImage.getHeight();
+        }
+
+        @Override
+        public long getTimestamp() {
+            return mImage.getTimestamp();
+        }
+
+        @Override
+        public Plane[] getPlanes() {
+            return mPlanes;
+        }
+
+        @Override
+        public void close() {
+            mImage.close();
+        }
+
+        private static class PlaneWrapper extends CodecImage.Plane {
+            private final Image.Plane mPlane;
+
+            PlaneWrapper(Image.Plane plane) {
+                mPlane = plane;
+            }
+
+            @Override
+            public int getRowStride() {
+                return mPlane.getRowStride();
+            }
+
+           @Override
+            public int getPixelStride() {
+               return mPlane.getPixelStride();
+            }
+
+            @Override
+            public ByteBuffer getBuffer() {
+                return mPlane.getBuffer();
+            }
+        }
+    }
+
+
+    public native static int getImageChecksum(CodecImage image);
+    public native static void copyFlexYUVImage(CodecImage target, CodecImage source);
+
+    public static void copyFlexYUVImage(Image target, CodecImage source) {
+        copyFlexYUVImage(ImageWrapper.createFromImage(target), source);
+    }
+    public static void copyFlexYUVImage(Image target, Image source) {
+        copyFlexYUVImage(
+                ImageWrapper.createFromImage(target),
+                ImageWrapper.createFromImage(source));
+    }
 }