add video encoder / decoder perf
- runs encoder / decoder for the given reference image with
clipped image supplied to encoder for each frame
- output quality checked by comparing random pixels with the reference image
- final performance index is FPS of both encoder and decoder combined
- currently supporting only YUV420Planar and YUV420SemiPlanar
- This is still on-going work, and the next patch will change decoding part
to use SurfaceTexture.
Change-Id: I53f219dfb91df3e5c3339487df59d811e7a8453e
diff --git a/suite/pts/PtsBenchmarkingList.mk b/suite/pts/PtsBenchmarkingList.mk
index 0c08a70..c106e1f 100644
--- a/suite/pts/PtsBenchmarkingList.mk
+++ b/suite/pts/PtsBenchmarkingList.mk
@@ -22,7 +22,8 @@
PtsDeviceUi \
PtsDeviceDram \
PtsDeviceSimpleCpu \
- PtsDeviceBrowserBench
+ PtsDeviceBrowserBench \
+ PtsDeviceVideoPerf
PTS_SUPPORT_PACKAGES := \
PtsDeviceTaskswitchingAppA \
diff --git a/suite/pts/deviceTests/videoperf/Android.mk b/suite/pts/deviceTests/videoperf/Android.mk
new file mode 100644
index 0000000..055c9f7
--- /dev/null
+++ b/suite/pts/deviceTests/videoperf/Android.mk
@@ -0,0 +1,32 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+# don't include this package in any target
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_JAVA_LIBRARIES := android.test.runner
+
+LOCAL_STATIC_JAVA_LIBRARIES := ptsutil ctsutil ctstestrunner
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+
+LOCAL_PACKAGE_NAME := PtsDeviceVideoPerf
+
+LOCAL_SDK_VERSION := 16
+
+include $(BUILD_CTS_PACKAGE)
+
diff --git a/suite/pts/deviceTests/videoperf/AndroidManifest.xml b/suite/pts/deviceTests/videoperf/AndroidManifest.xml
new file mode 100644
index 0000000..fdef1ef
--- /dev/null
+++ b/suite/pts/deviceTests/videoperf/AndroidManifest.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2013 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="com.android.pts.videoperf">
+
+ <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+
+ <application>
+ <uses-library android:name="android.test.runner" />
+ </application>
+ <instrumentation android:name="android.test.InstrumentationCtsTestRunner"
+ android:targetPackage="com.android.pts.videoperf"
+ android:label="UI Latency measurement" />
+</manifest>
diff --git a/suite/pts/deviceTests/videoperf/src/com/android/pts/videoperf/VideoEncoderDecoderTest.java b/suite/pts/deviceTests/videoperf/src/com/android/pts/videoperf/VideoEncoderDecoderTest.java
new file mode 100644
index 0000000..b5ed9c2
--- /dev/null
+++ b/suite/pts/deviceTests/videoperf/src/com/android/pts/videoperf/VideoEncoderDecoderTest.java
@@ -0,0 +1,779 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.pts.videoperf;
+
+import android.graphics.Point;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo.CodecProfileLevel;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import com.android.pts.util.PtsAndroidTestCase;
+import com.android.pts.util.ResultType;
+import com.android.pts.util.ResultUnit;
+import com.android.pts.util.Stat;
+
+import java.nio.ByteBuffer;
+import java.lang.System;
+import java.util.Random;
+import java.util.Vector;
+
+/**
+ * This tries to test video encoder / decoder performance by running encoding / decoding
+ * without displaying the raw data. To make things simpler, encoder is used to encode synthetic
+ * data and decoder is used to decode the encoded video. This approach does not work where
+ * there is only decoder. Performance index is total time taken for encoding and decoding
+ * the whole frames.
+ * To prevent sacrificing quality for faster encoding / decoding, randomly selected pixels are
+ * compared with the original image. As the pixel comparison can slow down the decoding process,
+ * only some randomly selected pixels are compared. As there can be only one performance index,
+ * error above certain threshhold in pixel value will be treated as an error.
+ */
+public class VideoEncoderDecoderTest extends PtsAndroidTestCase {
+ private static final String TAG = "VideoEncoderDecoderTest";
+ // this wait time affects fps as too big value will work as a blocker if device fps
+ // is not very high.
+ private static final long VIDEO_CODEC_WAIT_TIME_US = 5000;
+ private static final boolean VERBOSE = false;
+ private static final String VIDEO_AVC = "video/avc";
+ private static final int TOTAL_FRAMES = 300;
+ private static final int NUMBER_OF_REPEAT = 10;
+ // i frame interval for encoder
+ private static final int KEY_I_FRAME_INTERVAL = 5;
+
+ private static final int Y_CLAMP_MIN = 16;
+ private static final int Y_CLAMP_MAX = 235;
+ private static final int YUV_PLANE_ADDITIONAL_LENGTH = 200;
+ private ByteBuffer mYBuffer;
+ private ByteBuffer mUVBuffer;
+ // if input raw data is semi-planar
+ private boolean mSrcSemiPlanar;
+ // if output raw data is semi-planar
+ private boolean mDstSemiPlanar;
+ private int mBufferWidth;
+ private int mBufferHeight;
+ private int mVideoWidth;
+ private int mVideoHeight;
+
+ private Vector<ByteBuffer> mEncodedOutputBuffer;
+ // check this many pixels per each decoded frame
+ // checking too many points decreases decoder frame rates a lot.
+ private static final int PIXEL_CHECK_PER_FRAME = 1000;
+ // RMS error in pixel values above this will be treated as error.
+ private static final double PIXEL_RMS_ERROR_MARGAIN = 20.0;
+ private Random mRandom;
+
+ @Override
+ protected void setUp() throws Exception {
+ mEncodedOutputBuffer = new Vector<ByteBuffer>(TOTAL_FRAMES * 2);
+ // Use time as a seed, hoping to prevent checking pixels in the same pattern
+ long now = System.currentTimeMillis();
+ mRandom = new Random(now);
+ super.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ mEncodedOutputBuffer.clear();
+ mEncodedOutputBuffer = null;
+ mYBuffer = null;
+ mUVBuffer = null;
+ mRandom = null;
+ super.tearDown();
+ }
+
+ public void testAvc0176x0144() throws Exception {
+ doTest(VIDEO_AVC, 176, 144, NUMBER_OF_REPEAT);
+ }
+
+ public void testAvc0352x0288() throws Exception {
+ doTest(VIDEO_AVC, 352, 288, NUMBER_OF_REPEAT);
+ }
+
+ public void testAvc0720x0480() throws Exception {
+ doTest(VIDEO_AVC, 720, 480, NUMBER_OF_REPEAT);
+ }
+
+ public void testAvc1280x0720() throws Exception {
+ doTest(VIDEO_AVC, 1280, 720, NUMBER_OF_REPEAT);
+ }
+
+ /**
+ * resolution intentionally set to 1072 not 1080
+ * as 1080 is not multiple of 16, and it requires additional setting like stride
+ * which is not specified in API documentation.
+ */
+ public void testAvc1920x1072() throws Exception {
+ doTest(VIDEO_AVC, 1920, 1072, NUMBER_OF_REPEAT);
+ }
+
+ /**
+ * Used to pass codec information for given codec type / width / height
+ */
+ private class CodecInfo {
+ public int mBitRate = 0;
+ public int mFps = 0;
+ public boolean mSupportSemiPlanar = false;
+ public boolean mSupportPlanar = false;
+ }
+
+ /**
+ * Check if given codec with given (w,h) is supported.
+ * @param mimeType codec type in mime format like "video/avc"
+ * @param w video width
+ * @param h video height
+ * @param isEncoder whether the codec is encoder or decoder
+ * @return null if the configuration is not supported.
+ */
+ private CodecInfo getSupportedFormatInfo(String mimeType, int w, int h, boolean isEncoder) {
+ CodecCapabilities cap = getCodecCapability(mimeType, isEncoder);
+ if (cap == null) { // not supported
+ return null;
+ }
+ CodecInfo info = new CodecInfo();
+ for (int color : cap.colorFormats) {
+ if (color == CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
+ info.mSupportSemiPlanar = true;
+ }
+ if (color == CodecCapabilities.COLOR_FormatYUV420Planar) {
+ info.mSupportPlanar = true;
+ }
+ }
+ printIntArray("supported colors", cap.colorFormats);
+ // either YUV420 planar or semiplanar should be supported
+ if (!info.mSupportPlanar && !info.mSupportSemiPlanar) {
+ Log.i(TAG, "no supported color format");
+ return null;
+ }
+
+ if (mimeType.equals(VIDEO_AVC)) {
+ int highestLevel = 0;
+ for (CodecProfileLevel lvl : cap.profileLevels) {
+ if (lvl.level > highestLevel) {
+ highestLevel = lvl.level;
+ }
+ }
+ Log.i(TAG, "Avc highest level " + Integer.toHexString(highestLevel));
+ int maxW = 0;
+ int maxH = 0;
+ int bitRate = 0;
+ double fps = 0; // frame rate for the max resolution
+ switch(highestLevel) {
+ // Do not support Level 1 to 2.
+ case CodecProfileLevel.AVCLevel1:
+ case CodecProfileLevel.AVCLevel11:
+ case CodecProfileLevel.AVCLevel12:
+ case CodecProfileLevel.AVCLevel13:
+ case CodecProfileLevel.AVCLevel1b:
+ case CodecProfileLevel.AVCLevel2:
+ fail();
+ break;
+ case CodecProfileLevel.AVCLevel21:
+ maxW = 352;
+ maxH = 576;
+ bitRate = 4000000;
+ fps = 25;
+ break;
+ case CodecProfileLevel.AVCLevel22:
+ maxW = 720;
+ maxH = 480;
+ bitRate = 4000000;
+ fps = 15;
+ break;
+ case CodecProfileLevel.AVCLevel3:
+ maxW = 720;
+ maxH = 480;
+ bitRate = 10000000;
+ fps = 30;
+ break;
+ case CodecProfileLevel.AVCLevel31:
+ maxW = 1280;
+ maxH = 720;
+ bitRate = 14000000;
+ fps = 30;
+ break;
+ case CodecProfileLevel.AVCLevel32:
+ maxW = 1280;
+ maxH = 720;
+ bitRate = 20000000;
+ fps = 60;
+ break;
+ case CodecProfileLevel.AVCLevel4:
+ maxW = 1920;
+ maxH = 1080;
+ bitRate = 20000000;
+ fps = 30.1;
+ break;
+ case CodecProfileLevel.AVCLevel41:
+ maxW = 1920;
+ maxH = 1080;
+ bitRate = 50000000;
+ fps = 30.1;
+ break;
+ case CodecProfileLevel.AVCLevel42:
+ maxW = 2048;
+ maxH = 1080;
+ bitRate = 50000000;
+ fps = 60;
+ break;
+ case CodecProfileLevel.AVCLevel5:
+ maxW = 3672;
+ maxH = 1536;
+ bitRate = 135000000;
+ fps = 26.7;
+ break;
+ case CodecProfileLevel.AVCLevel51:
+ maxW = 4096;
+ maxH = 2304;
+ bitRate = 240000000;
+ fps = 26.7;
+ break;
+ default:
+ maxW = 4096;
+ maxH = 2304;
+ bitRate = 240000000;
+ fps = 26.7;
+ break;
+ }
+ if ((w > maxW) || (h > maxH)) {
+ Log.i(TAG, "Requested resolution (" + w + "," + h + ") exceeds (" +
+ maxW + "," + maxH + ")");
+ return null;
+ }
+ info.mFps = (int)(fps * maxW * maxH / (w * h));
+ info.mBitRate = bitRate;
+ Log.i(TAG, "AVC Level " + Integer.toHexString(highestLevel) + " bit rate " + bitRate +
+ " fps " + info.mFps);
+ }
+ return info;
+ }
+
+ /**
+ * Run encoding / decoding test for given mimeType of codec
+ * @param mimeType like video/avc
+ * @param w video width
+ * @param h video height
+ * @param numberRepeat how many times to repeat the encoding / decoding process
+ */
+ private void doTest(String mimeType, int w, int h, int numberRepeat) throws Exception {
+ CodecInfo infoEnc = getSupportedFormatInfo(mimeType, w, h, true);
+ if (infoEnc == null) {
+ Log.i(TAG, "Codec " + mimeType + "with " + w + "," + h + " not supported");
+ return;
+ }
+ CodecInfo infoDec = getSupportedFormatInfo(mimeType, w, h, false);
+ assertNotNull(infoDec);
+ mVideoWidth = w;
+ mVideoHeight = h;
+ initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH,
+ infoEnc.mSupportSemiPlanar, infoDec.mSupportSemiPlanar);
+ double[] encoderFpsResults = new double[numberRepeat];
+ double[] decoderFpsResults = new double[numberRepeat];
+ double[] totalFpsResults = new double[numberRepeat];
+ double[] decoderRmsErrorResults = new double[numberRepeat];
+ for (int i = 0; i < numberRepeat; i++) {
+ MediaFormat format = new MediaFormat();
+ format.setString(MediaFormat.KEY_MIME, mimeType);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, infoEnc.mBitRate);
+ format.setInteger(MediaFormat.KEY_WIDTH, w);
+ format.setInteger(MediaFormat.KEY_HEIGHT, h);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+ infoEnc.mSupportSemiPlanar ? CodecCapabilities.COLOR_FormatYUV420SemiPlanar :
+ CodecCapabilities.COLOR_FormatYUV420Planar);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL);
+ double encodingTime = runEncoder(VIDEO_AVC, format, TOTAL_FRAMES);
+ // re-initialize format for decoder
+ format = new MediaFormat();
+ format.setString(MediaFormat.KEY_MIME, mimeType);
+ format.setInteger(MediaFormat.KEY_WIDTH, w);
+ format.setInteger(MediaFormat.KEY_HEIGHT, h);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
+ infoDec.mSupportSemiPlanar ? CodecCapabilities.COLOR_FormatYUV420SemiPlanar :
+ CodecCapabilities.COLOR_FormatYUV420Planar);
+ double[] decoderResult = runDecoder(VIDEO_AVC, format);
+ double decodingTime = decoderResult[0];
+ decoderRmsErrorResults[i] = decoderResult[1];
+ encoderFpsResults[i] = (double)TOTAL_FRAMES / encodingTime * 1000.0;
+ decoderFpsResults[i] = (double)TOTAL_FRAMES / decodingTime * 1000.0;
+ totalFpsResults[i] = (double)TOTAL_FRAMES / (encodingTime + decodingTime) * 1000.0;
+
+ // clear things for re-start
+ mEncodedOutputBuffer.clear();
+ // it will be good to clean everything to make every run the same.
+ System.gc();
+ }
+ getReportLog().printArray("encoder", encoderFpsResults, ResultType.HIGHER_BETTER,
+ ResultUnit.FPS);
+ getReportLog().printArray("rms error", decoderRmsErrorResults, ResultType.LOWER_BETTER,
+ ResultUnit.NONE);
+ getReportLog().printArray("decoder", decoderFpsResults, ResultType.HIGHER_BETTER,
+ ResultUnit.FPS);
+ getReportLog().printArray("encoder decoder", totalFpsResults, ResultType.HIGHER_BETTER,
+ ResultUnit.FPS);
+ getReportLog().printSummary("encoder decoder", Stat.getAverage(totalFpsResults),
+ ResultType.HIGHER_BETTER, ResultUnit.FPS);
+ // make sure that rms error is not too big.
+ for (int i = 0; i < numberRepeat; i++) {
+ assertTrue(decoderRmsErrorResults[i] < PIXEL_RMS_ERROR_MARGAIN);
+ }
+ }
+
+ /**
+ * run encoder benchmarking
+ * @param mimeType encoder type like video/avc
+ * @param format format of media to encode
+ * @param totalFrames total number of frames to encode
+ * @return time taken in ms to encode the frames. This does not include initialization time.
+ */
+ private double runEncoder(String mimeType, MediaFormat format, int totalFrames) {
+ MediaCodec codec = MediaCodec.createEncoderByType(mimeType);
+ try {
+ codec.configure(
+ format,
+ null /* surface */,
+ null /* crypto */,
+ MediaCodec.CONFIGURE_FLAG_ENCODE);
+ } catch (IllegalStateException e) {
+ Log.e(TAG, "codec '" + mimeType + "' failed configuration.");
+ assertTrue("codec '" + mimeType + "' failed configuration.", false);
+ }
+ codec.start();
+ ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
+ ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
+
+ int numBytesSubmitted = 0;
+ int numBytesDequeued = 0;
+ int inFramesCount = 0;
+ long start = System.currentTimeMillis();
+ while (true) {
+ int index;
+
+ if (inFramesCount < totalFrames) {
+ index = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
+ if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
+ int size = queueInputBufferEncoder(
+ codec, codecInputBuffers, index, inFramesCount,
+ (inFramesCount == (totalFrames - 1)) ?
+ MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+ inFramesCount++;
+ numBytesSubmitted += size;
+ if (VERBOSE) {
+ Log.d(TAG, "queued " + size + " bytes of input data, frame " +
+ (inFramesCount - 1));
+ }
+
+ }
+ }
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ index = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
+ if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ codecOutputBuffers = codec.getOutputBuffers();
+ } else if (index >= 0) {
+ dequeueOutputBufferEncoder(codec, codecOutputBuffers, index, info);
+ numBytesDequeued += info.size;
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (VERBOSE) {
+ Log.d(TAG, "dequeued output EOS.");
+ }
+ break;
+ }
+ if (VERBOSE) {
+ Log.d(TAG, "dequeued " + info.size + " bytes of output data.");
+ }
+ }
+ }
+ long finish = System.currentTimeMillis();
+ if (VERBOSE) {
+ Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, "
+ + "dequeued " + numBytesDequeued + " bytes.");
+ }
+ codec.stop();
+ codec.release();
+ codec = null;
+ return (double)(finish - start);
+ }
+
+ /**
+ * Fills input buffer for encoder from YUV buffers.
+ * @return size of enqueued data.
+ */
+ private int queueInputBufferEncoder(
+ MediaCodec codec, ByteBuffer[] inputBuffers, int index, int frameCount, int flags) {
+ ByteBuffer buffer = inputBuffers[index];
+ buffer.clear();
+
+ Point origin = getOrigin(frameCount);
+ // Y color first
+ int srcOffsetY = origin.x + origin.y * mBufferWidth;
+ final byte[] yBuffer = mYBuffer.array();
+ for (int i = 0; i < mVideoHeight; i++) {
+ buffer.put(yBuffer, srcOffsetY, mVideoWidth);
+ srcOffsetY += mBufferWidth;
+ }
+ if (mSrcSemiPlanar) {
+ int srcOffsetU = origin.y / 2 * mBufferWidth + origin.x / 2 * 2;
+ final byte[] uvBuffer = mUVBuffer.array();
+ for (int i = 0; i < mVideoHeight / 2; i++) {
+ buffer.put(uvBuffer, srcOffsetU, mVideoWidth);
+ srcOffsetU += mBufferWidth;
+ }
+ } else {
+ int srcOffsetU = origin.y / 2 * mBufferWidth / 2 + origin.x / 2;
+ int srcOffsetV = srcOffsetU + mBufferWidth / 2 * mBufferHeight / 2;
+ final byte[] uvBuffer = mUVBuffer.array();
+ for (int i = 0; i < mVideoHeight /2; i++) { //U only
+ buffer.put(uvBuffer, srcOffsetU, mVideoWidth / 2);
+ srcOffsetU += mBufferWidth / 2;
+ }
+ for (int i = 0; i < mVideoHeight /2; i++) { //V only
+ buffer.put(uvBuffer, srcOffsetV, mVideoWidth / 2);
+ srcOffsetV += mBufferWidth / 2;
+ }
+ }
+ int size = mVideoHeight * mVideoWidth * 3 /2;
+
+ codec.queueInputBuffer(index, 0 /* offset */, size, 0 /* timeUs */, flags);
+ if (VERBOSE && (frameCount == 0)) {
+ printByteArray("Y ", mYBuffer.array(), 0, 20);
+ printByteArray("UV ", mUVBuffer.array(), 0, 20);
+ printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
+ }
+ return size;
+ }
+
+ /**
+ * Dequeue encoded data from output buffer and store for later usage.
+ */
+ private void dequeueOutputBufferEncoder(
+ MediaCodec codec, ByteBuffer[] outputBuffers,
+ int index, MediaCodec.BufferInfo info) {
+ ByteBuffer output = outputBuffers[index];
+ output.clear();
+ int l = info.size;
+ ByteBuffer copied = ByteBuffer.allocate(l);
+ output.get(copied.array(), 0, l);
+ mEncodedOutputBuffer.add(copied);
+ codec.releaseOutputBuffer(index, false /* render */);
+ }
+
+ /**
+ * run encoder benchmarking with encoded stream stored from encoding phase
+ * @param mimeType encoder type like video/avc
+ * @param format format of media to decode
+ * @return returns length-2 array with 0: time for decoding, 1 : rms error of pixels
+ */
+ private double[] runDecoder(String mimeType, MediaFormat format) {
+ MediaCodec codec = MediaCodec.createDecoderByType(mimeType);
+ codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
+ codec.start();
+ ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
+ ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
+
+ double totalErrorSquared = 0;
+
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ boolean sawOutputEOS = false;
+ int inputLeft = mEncodedOutputBuffer.size();
+ int inputBufferCount = 0;
+ int outFrameCount = 0;
+ YUVValue expected = new YUVValue();
+ YUVValue decoded = new YUVValue();
+ long start = System.currentTimeMillis();
+ while (!sawOutputEOS) {
+ if (inputLeft > 0) {
+ int inputBufIndex = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US);
+
+ if (inputBufIndex >= 0) {
+ ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
+ dstBuf.clear();
+ ByteBuffer src = mEncodedOutputBuffer.get(inputBufferCount);
+ int writeSize = src.capacity();
+ dstBuf.put(src.array(), 0, writeSize);
+ codec.queueInputBuffer(
+ inputBufIndex,
+ 0 /* offset */,
+ writeSize,
+ 0,
+ (inputLeft == 1) ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+ inputLeft --;
+ inputBufferCount ++;
+ }
+ }
+
+ int res = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US);
+ if (res >= 0) {
+ int outputBufIndex = res;
+ ByteBuffer buf = codecOutputBuffers[outputBufIndex];
+ if (VERBOSE && (outFrameCount == 0)) {
+ printByteBuffer("Y ", buf, 0, 20);
+ printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
+ printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
+ }
+ Point origin = getOrigin(outFrameCount);
+ for (int i = 0; i < PIXEL_CHECK_PER_FRAME; i++) {
+ int w = mRandom.nextInt(mVideoWidth);
+ int h = mRandom.nextInt(mVideoHeight);
+ getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
+ getPixelValuesFromOutputBuffer(buf, w, h, decoded);
+ if (VERBOSE) {
+ Log.i(TAG, outFrameCount + "-" + i + "- th round expcted " + expected.mY +
+ "," + expected.mU + "," + expected.mV + " decoded " + decoded.mY +
+ "," + decoded.mU + "," + decoded.mV);
+ }
+ totalErrorSquared += expected.calcErrorSquared(decoded);
+ }
+ codec.releaseOutputBuffer(outputBufIndex, false /* render */);
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ Log.d(TAG, "saw output EOS.");
+ sawOutputEOS = true;
+ }
+ outFrameCount++;
+ } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ codecOutputBuffers = codec.getOutputBuffers();
+ Log.d(TAG, "output buffers have changed.");
+ } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ MediaFormat oformat = codec.getOutputFormat();
+ Log.d(TAG, "output format has changed to " + oformat);
+ int colorFormat = oformat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar ) {
+ mDstSemiPlanar = true;
+ } else if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar ) {
+ mDstSemiPlanar = false;
+ } else {
+ fail("unsupported color format " + Integer.toHexString(colorFormat));
+ }
+ }
+ }
+ long finish = System.currentTimeMillis();
+ codec.stop();
+ codec.release();
+ codec = null;
+ assertTrue(outFrameCount >= TOTAL_FRAMES);
+ // divide by 3 as sum is done for Y, U, V.
+ double errorRms = Math.sqrt(totalErrorSquared / PIXEL_CHECK_PER_FRAME / outFrameCount / 3);
+ double[] result = { (double) finish - start, errorRms };
+ return result;
+ }
+
+ /**
+ * returns origin in the absolute frame for given frame count.
+ * The video scene is moving by moving origin per each frame.
+ */
+ private Point getOrigin(int frameCount) {
+ if (frameCount < 100) {
+ return new Point(2 * frameCount, 0);
+ } else if (frameCount < 200) {
+ return new Point(200, (frameCount - 100) * 2);
+ } else {
+ if (frameCount > 300) { // for safety
+ frameCount = 300;
+ }
+ return new Point(600 - frameCount * 2, 600 - frameCount * 2);
+ }
+ }
+
+ /**
+ * initialize reference YUV plane
+ * @param w This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
+ * to allow movements
+ * @param h This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
+ * to allow movements
+ * @param semiPlanarEnc
+ * @param semiPlanarDec
+ */
+ private void initYUVPlane(int w, int h, boolean semiPlanarEnc, boolean semiPlanarDec) {
+ int bufferSizeY = w * h;
+ mYBuffer = ByteBuffer.allocate(bufferSizeY);
+ mUVBuffer = ByteBuffer.allocate(bufferSizeY / 2);
+ mSrcSemiPlanar = semiPlanarEnc;
+ mDstSemiPlanar = semiPlanarDec;
+ mBufferWidth = w;
+ mBufferHeight = h;
+ final byte[] yArray = mYBuffer.array();
+ final byte[] uvArray = mUVBuffer.array();
+ for (int i = 0; i < h; i++) {
+ for (int j = 0; j < w; j++) {
+ yArray[i * w + j] = clampY((i + j) & 0xff);
+ }
+ }
+ if (semiPlanarEnc) {
+ for (int i = 0; i < h/2; i++) {
+ for (int j = 0; j < w/2; j++) {
+ uvArray[i * w + 2 * j] = (byte) (i & 0xff);
+ uvArray[i * w + 2 * j + 1] = (byte) (j & 0xff);
+ }
+ }
+ } else { // planar, U first, then V
+ int vOffset = bufferSizeY / 4;
+ for (int i = 0; i < h/2; i++) {
+ for (int j = 0; j < w/2; j++) {
+ uvArray[i * w/2 + j] = (byte) (i & 0xff);
+ uvArray[i * w/2 + vOffset + j] = (byte) (j & 0xff);
+ }
+ }
+ }
+ }
+
+ /**
+ * class to store pixel values in YUV
+ *
+ */
+ public class YUVValue {
+ public byte mY;
+ public byte mU;
+ public byte mV;
+ public YUVValue() {
+ }
+
+ public boolean equalTo(YUVValue other) {
+ return (mY == other.mY) && (mU == other.mU) && (mV == other.mV);
+ }
+
+ public double calcErrorSquared(YUVValue other) {
+ double yDelta = mY - other.mY;
+ double uDelta = mU - other.mU;
+ double vDelta = mV - other.mV;
+ return yDelta * yDelta + uDelta * uDelta + vDelta * vDelta;
+ }
+ }
+
+ /**
+ * Read YUV values from given position (x,y) for given origin (originX, originY)
+ * The whole data is already available from YBuffer and UVBuffer.
+ * @param result pass the result via this. This is for avoiding creating / destroying too many
+ * instances
+ */
+ private void getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y,
+ YUVValue result) {
+ result.mY = mYBuffer.get((originY + y) * mBufferWidth + (originX + x));
+ if (mSrcSemiPlanar) {
+ int index = (originY + y) / 2 * mBufferWidth + (originX + x) / 2 * 2;
+ //Log.d(TAG, "YUV " + originX + "," + originY + "," + x + "," + y + "," + index);
+ result.mU = mUVBuffer.get(index);
+ result.mV = mUVBuffer.get(index + 1);
+ } else {
+ int vOffset = mBufferWidth * mBufferHeight / 4;
+ int index = (originY + y) / 2 * mBufferWidth / 2 + (originX + x) / 2;
+ result.mU = mUVBuffer.get(index);
+ result.mV = mUVBuffer.get(vOffset + index);
+ }
+ }
+
+ /**
+ * Read YUV pixels from decoded output buffer for give (x, y) position
+ * Output buffer is composed of Y parts followed by U/V
+ * @param result pass the result via this. This is for avoiding creating / destroying too many
+ * instances
+ */
+ private void getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result) {
+ result.mY = buffer.get(y * mVideoWidth + x);
+ if (mDstSemiPlanar) {
+ int index = mVideoWidth * mVideoHeight + y / 2 * mVideoWidth + x / 2 * 2;
+ //Log.d(TAG, "Decoded " + x + "," + y + "," + index);
+ result.mU = buffer.get(index);
+ result.mV = buffer.get(index + 1);
+ } else {
+ int vOffset = mVideoWidth * mVideoHeight / 4;
+ int index = mVideoWidth * mVideoHeight + y / 2 * mVideoWidth / 2 + x / 2;
+ result.mU = buffer.get(index);
+ result.mV = buffer.get(index + vOffset);
+ }
+ }
+
+ /**
+ * Y cannot have full range. clamp it to prevent invalid value.
+ */
+ private byte clampY(int y) {
+ if (y < Y_CLAMP_MIN) {
+ y = Y_CLAMP_MIN;
+ } else if (y > Y_CLAMP_MAX) {
+ y = Y_CLAMP_MAX;
+ }
+ return (byte) (y & 0xff);
+ }
+
+ /**
+ * Search for given codecName and returns CodecCapabilities if found
+ * @param codecName
+ * @param isEncoder true for encoder, false for decoder
+ * @return null if the codec is not supported
+ */
+ private CodecCapabilities getCodecCapability(
+ String codecName, boolean isEncoder) {
+ int codecCount = MediaCodecList.getCodecCount();
+ for (int i = 0; i < codecCount; ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ String[] types = info.getSupportedTypes();
+ if (isEncoder != info.isEncoder()) {
+ continue;
+ }
+ for (int j = 0; j < types.length; ++j) {
+ if (types[j].compareTo(codecName) == 0) {
+ CodecCapabilities cap = info.getCapabilitiesForType(types[j]);
+ Log.i(TAG, "Use codec " + info.getName());
+ return cap;
+ }
+ }
+ }
+ return null;
+ }
+
+ /// for debugging
+ private void printIntArray(String msg, int[] data) {
+ StringBuilder builder = new StringBuilder();
+ builder.append(msg);
+ builder.append(":");
+ for (int e : data) {
+ builder.append(Integer.toHexString(e));
+ builder.append(",");
+ }
+ builder.deleteCharAt(builder.length() - 1);
+ Log.i(TAG, builder.toString());
+ }
+
+ /// for debugging
+ private void printByteArray(String msg, byte[] data, int offset, int len) {
+ StringBuilder builder = new StringBuilder();
+ builder.append(msg);
+ builder.append(":");
+ for (int i = offset; i < offset + len; i++) {
+ builder.append(Integer.toHexString(data[i]));
+ builder.append(",");
+ }
+ builder.deleteCharAt(builder.length() - 1);
+ Log.i(TAG, builder.toString());
+ }
+
+ /// for debugging
+ private void printByteBuffer(String msg, ByteBuffer data, int offset, int len) {
+ StringBuilder builder = new StringBuilder();
+ builder.append(msg);
+ builder.append(":");
+ for (int i = offset; i < offset + len; i++) {
+ builder.append(Integer.toHexString(data.get(i)));
+ builder.append(",");
+ }
+ builder.deleteCharAt(builder.length() - 1);
+ Log.i(TAG, builder.toString());
+ }
+}