Initial libmix commit
Change-Id: I7a0b9afdc83a3274189cef0788c7296a871a3d98
Signed-off-by: Guilhem IMBERTON <guilhem.imberton@intel.com>
diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk
new file mode 100644
index 0000000..1fc4d9a
--- /dev/null
+++ b/videoencoder/Android.mk
@@ -0,0 +1,110 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+LOCAL_CFLAGS += \
+ -DBX_RC \
+ -DOSCL_IMPORT_REF= \
+ -DOSCL_UNUSED_ARG= \
+ -DOSCL_EXPORT_REF=
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_m4vh263enc
+endif
+
+LOCAL_SRC_FILES := \
+ VideoEncoderBase.cpp \
+ VideoEncoderAVC.cpp \
+ VideoEncoderH263.cpp \
+ VideoEncoderMP4.cpp \
+ VideoEncoderVP8.cpp \
+ VideoEncoderUtils.cpp \
+ VideoEncoderHost.cpp
+
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+ LOCAL_SRC_FILES += PVSoftMPEG4Encoder.cpp
+endif
+
+LOCAL_C_INCLUDES := \
+ $(TARGET_OUT_HEADERS)/libva \
+ $(call include-path-for, frameworks-native) \
+ $(TARGET_OUT_HEADERS)/pvr
+
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+LOCAL_C_INCLUDES += \
+ frameworks/av/media/libstagefright/codecs/m4v_h263/enc/include \
+ frameworks/av/media/libstagefright/codecs/m4v_h263/enc/src \
+ frameworks/av/media/libstagefright/codecs/common/include \
+ frameworks/native/include/media/openmax \
+ frameworks/native/include/media/hardware \
+ frameworks/av/media/libstagefright/include
+endif
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libutils \
+ libva \
+ libva-android \
+ libva-tpi \
+ libui \
+ libutils \
+ libhardware \
+ libintelmetadatabuffer
+
+LOCAL_COPY_HEADERS_TO := libmix_videoencoder
+
+LOCAL_COPY_HEADERS := \
+ VideoEncoderHost.h \
+ VideoEncoderInterface.h \
+ VideoEncoderDef.h
+
+ifeq ($(VIDEO_ENC_LOG_ENABLE),true)
+LOCAL_CPPFLAGS += -DVIDEO_ENC_LOG_ENABLE
+endif
+
+ifeq ($(NO_BUFFER_SHARE),true)
+LOCAL_CPPFLAGS += -DNO_BUFFER_SHARE
+endif
+
+ifeq ($(VIDEO_ENC_STATISTICS_ENABLE),true)
+LOCAL_CPPFLAGS += -DVIDEO_ENC_STATISTICS_ENABLE
+endif
+
+ifeq ($(ENABLE_IMG_GRAPHICS),true)
+ LOCAL_CFLAGS += -DIMG_GFX
+
+ ifeq ($(ENABLE_MRFL_GRAPHICS),true)
+ LOCAL_CFLAGS += -DMRFLD_GFX
+ endif
+endif
+
+LOCAL_CFLAGS += -Werror
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libva_videoencoder
+
+include $(BUILD_SHARED_LIBRARY)
+
+# For libintelmetadatabuffer
+# =====================================================
+
+include $(CLEAR_VARS)
+
+VIDEO_ENC_LOG_ENABLE := true
+
+LOCAL_SRC_FILES := \
+ IntelMetadataBuffer.cpp
+
+LOCAL_COPY_HEADERS_TO := libmix_videoencoder
+
+LOCAL_COPY_HEADERS := \
+ IntelMetadataBuffer.h
+
+ifeq ($(INTEL_VIDEO_XPROC_SHARING),true)
+LOCAL_SHARED_LIBRARIES := liblog libutils libbinder libgui \
+ libui libcutils libhardware
+endif
+LOCAL_CFLAGS += -Werror
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libintelmetadatabuffer
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp
new file mode 100644
index 0000000..28f8e63
--- /dev/null
+++ b/videoencoder/IntelMetadataBuffer.cpp
@@ -0,0 +1,832 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IntelMetadataBuffer"
+#include <wrs_omxil_core/log.h>
+
+#include "IntelMetadataBuffer.h"
+#include <string.h>
+#include <stdio.h>
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+#include <binder/IServiceManager.h>
+#include <binder/MemoryBase.h>
+#include <binder/Parcel.h>
+#include <utils/List.h>
+#include <utils/threads.h>
+#include <ui/GraphicBuffer.h>
+
+//#define TEST
+
+struct ShareMemMap {
+ uint32_t sessionflag;
+ intptr_t value;
+ intptr_t value_backup;
+ uint32_t type;
+ sp<MemoryBase> membase;
+ sp<GraphicBuffer> gbuffer;
+};
+
+List <ShareMemMap *> gShareMemMapList;
+Mutex gShareMemMapListLock;
+
+enum {
+ SHARE_MEM = IBinder::FIRST_CALL_TRANSACTION,
+ GET_MEM,
+ CLEAR_MEM,
+};
+
+enum {
+ ST_MEMBASE = 0,
+ ST_GFX,
+ ST_MAX,
+};
+
+#define REMOTE_PROVIDER 0x80000000
+#define REMOTE_CONSUMER 0x40000000
+
+static ShareMemMap* ReadMemObjFromBinder(const Parcel& data, uint32_t sessionflag, intptr_t value) {
+
+ uint32_t type = data.readInt32();
+ if (type >= ST_MAX)
+ return NULL;
+
+ ShareMemMap* map = new ShareMemMap;
+ map->sessionflag = sessionflag;
+ map->type = type;
+ map->value_backup = value;
+ map->membase = NULL;
+ map->gbuffer= NULL;
+
+// LOGI("ReadMemObjFromBinder");
+
+ if (type == ST_MEMBASE) /*offset, size, heap*/
+ {
+ ssize_t offset = data.readInt32();
+ size_t size = data.readInt32();
+
+ sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
+
+ sp<MemoryBase> mem = new MemoryBase(heap, offset, size);
+ if (mem == NULL)
+ {
+ delete map;
+ return NULL;
+ }
+
+ map->value = (intptr_t)( mem->pointer() + 0x0FFF) & ~0x0FFF;
+ map->membase = mem;
+
+#ifdef TEST
+ ALOGI("membase heapID:%d, pointer:%x data:%x, aligned value:%x", \
+ heap->getHeapID(), mem->pointer(), *((intptr_t *)(mem->pointer())), map->value);
+#endif
+
+ }
+ else if (type == ST_GFX) /*graphicbuffer*/
+ {
+ sp<GraphicBuffer> buffer = new GraphicBuffer();
+ if (buffer == NULL)
+ {
+ delete map;
+ return NULL;
+ }
+ data.read(*buffer);
+
+ map->value = (intptr_t)buffer->handle;
+ map->gbuffer = buffer;
+
+#ifdef TEST
+ void* usrptr[3];
+ buffer->lock(GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_READ_OFTEN, &usrptr[0]);
+ buffer->unlock();
+ ALOGI("gfx handle:%p data:%x", (intptr_t)buffer->handle, *((intptr_t *)usrptr[0]));
+#endif
+ }
+
+ gShareMemMapListLock.lock();
+ gShareMemMapList.push_back(map);
+ gShareMemMapListLock.unlock();
+ return map;
+}
+
+static status_t WriteMemObjToBinder(Parcel& data, ShareMemMap* smem) {
+
+ if (smem->type >= ST_MAX)
+ return BAD_VALUE;
+
+// LOGI("WriteMemObjToBinder");
+
+ data.writeInt32(smem->type);
+
+ if (smem->type == ST_MEMBASE) /*offset, size, heap*/
+ {
+ ssize_t offset;
+ size_t size;
+ sp<IMemoryHeap> heap = smem->membase->getMemory(&offset, &size);
+ data.writeInt32(offset);
+ data.writeInt32(size);
+ data.writeStrongBinder(heap->asBinder());
+#ifdef TEST
+ ALOGI("membase heapID:%d pointer:%x data:%x", \
+ heap->getHeapID(), smem->membase->pointer(), *((int *)(smem->membase->pointer())));
+#endif
+ }
+ else if (smem->type == ST_GFX) /*graphicbuffer*/
+ data.write(*(smem->gbuffer));
+
+ return NO_ERROR;
+}
+
+static void ClearLocalMem(uint32_t sessionflag)
+{
+ List<ShareMemMap *>::iterator node;
+
+ gShareMemMapListLock.lock();
+
+ for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); )
+ {
+ if ((*node)->sessionflag == sessionflag) //remove all buffers belong to this session
+ {
+ (*node)->membase = NULL;
+ (*node)->gbuffer = NULL;
+ delete (*node);
+ node = gShareMemMapList.erase(node);
+ }
+ else
+ node ++;
+ }
+
+ gShareMemMapListLock.unlock();
+}
+
+static ShareMemMap* FindShareMem(uint32_t sessionflag, intptr_t value, bool isBackup)
+{
+ List<ShareMemMap *>::iterator node;
+
+ gShareMemMapListLock.lock();
+ for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); node++)
+ {
+ if (isBackup)
+ {
+ if ((*node)->sessionflag == sessionflag && (*node)->value_backup == value)
+ {
+ gShareMemMapListLock.unlock();
+ return (*node);
+ }
+ }
+ else if ((*node)->sessionflag == sessionflag && (*node)->value == value)
+ {
+ gShareMemMapListLock.unlock();
+ return (*node);
+ }
+ }
+ gShareMemMapListLock.unlock();
+
+ return NULL;
+}
+
+static ShareMemMap* PopShareMem(uint32_t sessionflag, intptr_t value)
+{
+ List<ShareMemMap *>::iterator node;
+
+ gShareMemMapListLock.lock();
+ for(node = gShareMemMapList.begin(); node != gShareMemMapList.end(); node++)
+ {
+ if ((*node)->sessionflag == sessionflag && (*node)->value == value)
+ {
+ gShareMemMapList.erase(node);
+ gShareMemMapListLock.unlock();
+ return (*node);
+ }
+ }
+ gShareMemMapListLock.unlock();
+
+ return NULL;
+}
+
+static void PushShareMem(ShareMemMap* &smem)
+{
+ gShareMemMapListLock.lock();
+ gShareMemMapList.push_back(smem);
+ gShareMemMapListLock.unlock();
+}
+
+static sp<IBinder> GetIntelBufferSharingService() {
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(String16("media.IntelBufferSharing"));
+
+ if (binder == 0)
+ ALOGE("media.IntelBufferSharing service is not published");
+
+ return binder;
+}
+
+IntelBufferSharingService* IntelBufferSharingService::gBufferService = NULL;
+
+status_t IntelBufferSharingService::instantiate(){
+ status_t ret = NO_ERROR;
+
+ if (gBufferService == NULL) {
+ gBufferService = new IntelBufferSharingService();
+ ret = defaultServiceManager()->addService(String16("media.IntelBufferSharing"), gBufferService);
+ LOGI("IntelBufferSharingService::instantiate() ret = %d\n", ret);
+ }
+
+ return ret;
+}
+
+status_t IntelBufferSharingService::onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) {
+
+ //TODO: if pid is int32?
+ pid_t pid = data.readInt32();
+ uint32_t sessionflag = data.readInt32();
+
+ switch(code)
+ {
+ case SHARE_MEM:
+ {
+
+ if (pid == getpid()) //in same process, should not use binder
+ {
+ ALOGE("onTransact in same process, wrong sessionflag?");
+ return UNKNOWN_ERROR;
+ }
+
+ intptr_t value = data.readIntPtr();
+
+// LOGI("onTransact SHARE_MEM value=%x", value);
+
+ //different process
+ ShareMemMap* map = ReadMemObjFromBinder(data, sessionflag, value);
+ if (map == NULL)
+ return UNKNOWN_ERROR;
+
+ reply->writeIntPtr(map->value);
+
+ return NO_ERROR;
+ }
+ case CLEAR_MEM:
+ {
+// LOGI("onTransact CLEAR_MEM sessionflag=%x", sessionflag);
+
+ if (pid == getpid()) //in same process, should not use binder
+ {
+ //same process, return same pointer in data
+ ALOGE("onTransact CLEAR_MEM in same process, wrong sessionflag?");
+ return UNKNOWN_ERROR;
+ }
+
+ ClearLocalMem(sessionflag);
+ return NO_ERROR;
+ }
+ case GET_MEM:
+ {
+
+ if (pid == getpid()) //in same process, should not use binder
+ {
+ ALOGE("onTransact GET_MEM in same process, wrong sessionflag?");
+ return UNKNOWN_ERROR;
+ }
+
+ intptr_t value = data.readIntPtr();
+
+// LOGI("onTransact GET_MEM value=%x", value);
+
+ ShareMemMap* smem = FindShareMem(sessionflag, value, false);
+ if (smem && (NO_ERROR == WriteMemObjToBinder(*reply, smem)))
+ return NO_ERROR;
+ else
+ ALOGE("onTransact GET_MEM: Not find mem");
+
+ return UNKNOWN_ERROR;
+ }
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+
+ }
+ return NO_ERROR;
+}
+#endif
+
+IntelMetadataBuffer::IntelMetadataBuffer()
+{
+ mType = IntelMetadataBufferTypeCameraSource;
+ mValue = 0;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = 0;
+ mBytes = NULL;
+ mSize = 0;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ mSessionFlag = 0;
+#endif
+}
+
+IntelMetadataBuffer::IntelMetadataBuffer(IntelMetadataBufferType type, intptr_t value)
+{
+ mType = type;
+ mValue = value;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = 0;
+ mBytes = NULL;
+ mSize = 0;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ mSessionFlag = 0;
+#endif
+}
+
+IntelMetadataBuffer::~IntelMetadataBuffer()
+{
+ if (mInfo)
+ delete mInfo;
+
+ if (mExtraValues)
+ delete[] mExtraValues;
+
+ if (mBytes)
+ delete[] mBytes;
+}
+
+
+IntelMetadataBuffer::IntelMetadataBuffer(const IntelMetadataBuffer& imb)
+ :mType(imb.mType), mValue(imb.mValue), mInfo(NULL), mExtraValues(NULL),
+ mExtraValues_Count(imb.mExtraValues_Count), mBytes(NULL), mSize(imb.mSize)
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ ,mSessionFlag(imb.mSessionFlag)
+#endif
+{
+ if (imb.mInfo)
+ mInfo = new ValueInfo(*imb.mInfo);
+
+ if (imb.mExtraValues)
+ {
+ mExtraValues = new intptr_t[mExtraValues_Count];
+ memcpy(mExtraValues, imb.mExtraValues, sizeof(mValue) * mExtraValues_Count);
+ }
+
+ if (imb.mBytes)
+ {
+ mBytes = new uint8_t[mSize];
+ memcpy(mBytes, imb.mBytes, mSize);
+ }
+}
+
+const IntelMetadataBuffer& IntelMetadataBuffer::operator=(const IntelMetadataBuffer& imb)
+{
+ mType = imb.mType;
+ mValue = imb.mValue;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = imb.mExtraValues_Count;
+ mBytes = NULL;
+ mSize = imb.mSize;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ mSessionFlag = imb.mSessionFlag;
+#endif
+
+ if (imb.mInfo)
+ mInfo = new ValueInfo(*imb.mInfo);
+
+ if (imb.mExtraValues)
+ {
+ mExtraValues = new intptr_t[mExtraValues_Count];
+ memcpy(mExtraValues, imb.mExtraValues, sizeof(mValue) * mExtraValues_Count);
+ }
+
+ if (imb.mBytes)
+ {
+ mBytes = new uint8_t[mSize];
+ memcpy(mBytes, imb.mBytes, mSize);
+ }
+
+ return *this;
+}
+
+IMB_Result IntelMetadataBuffer::GetType(IntelMetadataBufferType& type)
+{
+ type = mType;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetType(IntelMetadataBufferType type)
+{
+ if (type < IntelMetadataBufferTypeLast)
+ mType = type;
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetValue(intptr_t& value)
+{
+ value = mValue;
+
+#ifndef INTEL_VIDEO_XPROC_SHARING
+ return IMB_SUCCESS;
+#else
+ if ((mSessionFlag & REMOTE_CONSUMER) == 0) //no sharing or is local consumer
+ return IMB_SUCCESS;
+
+ //try to find if it is already cached.
+ ShareMemMap* smem = FindShareMem(mSessionFlag, mValue, true);
+ if(smem)
+ {
+ value = smem->value;
+ return IMB_SUCCESS;
+ }
+
+ //is remote provider and not find from cache, then pull from service
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ //Detect IntelBufferSharingService, share mem to service
+ Parcel data, reply;
+
+ //send pid, sessionflag, and memtype
+ pid_t pid = getpid();
+ //TODO: if pid is int32?
+ data.writeInt32(pid);
+ data.writeInt32(mSessionFlag);
+ data.writeIntPtr(mValue);
+
+ //do transcation
+ if (binder->transact(GET_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //get type/Mem OBJ
+ smem = ReadMemObjFromBinder(reply, mSessionFlag, mValue);
+ if (smem)
+ value = smem->value;
+ else
+ return IMB_SERVICE_FAIL;
+
+ return IMB_SUCCESS;
+#endif
+}
+
+IMB_Result IntelMetadataBuffer::SetValue(intptr_t value)
+{
+ mValue = value;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetValueInfo(ValueInfo* &info)
+{
+ info = mInfo;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetValueInfo(ValueInfo* info)
+{
+ if (info)
+ {
+ if (mInfo == NULL)
+ mInfo = new ValueInfo;
+
+ memcpy(mInfo, info, sizeof(ValueInfo));
+ }
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetExtraValues(intptr_t* &values, uint32_t& num)
+{
+ values = mExtraValues;
+ num = mExtraValues_Count;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetExtraValues(intptr_t* values, uint32_t num)
+{
+ if (values && num > 0)
+ {
+ if (mExtraValues && mExtraValues_Count != num)
+ {
+ delete[] mExtraValues;
+ mExtraValues = NULL;
+ }
+
+ if (mExtraValues == NULL)
+ mExtraValues = new intptr_t[num];
+
+ memcpy(mExtraValues, values, sizeof(intptr_t) * num);
+ mExtraValues_Count = num;
+ }
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::UnSerialize(uint8_t* data, uint32_t size)
+{
+ if (!data || size == 0)
+ return IMB_INVAL_PARAM;
+
+ IntelMetadataBufferType type;
+ intptr_t value;
+ uint32_t extrasize = size - sizeof(type) - sizeof(value);
+ ValueInfo* info = NULL;
+ intptr_t* ExtraValues = NULL;
+ uint32_t ExtraValues_Count = 0;
+
+ memcpy(&type, data, sizeof(type));
+ data += sizeof(type);
+ memcpy(&value, data, sizeof(value));
+ data += sizeof(value);
+
+ switch (type)
+ {
+ case IntelMetadataBufferTypeCameraSource:
+ case IntelMetadataBufferTypeEncoder:
+ case IntelMetadataBufferTypeUser:
+ {
+ if (extrasize >0 && extrasize < sizeof(ValueInfo))
+ return IMB_INVAL_BUFFER;
+
+ if (extrasize > sizeof(ValueInfo)) //has extravalues
+ {
+ if ( (extrasize - sizeof(ValueInfo)) % sizeof(mValue) != 0 )
+ return IMB_INVAL_BUFFER;
+ ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / sizeof(mValue);
+ }
+
+ if (extrasize > 0)
+ {
+ info = new ValueInfo;
+ memcpy(info, data, sizeof(ValueInfo));
+ data += sizeof(ValueInfo);
+ }
+
+ if (ExtraValues_Count > 0)
+ {
+ ExtraValues = new intptr_t[ExtraValues_Count];
+ memcpy(ExtraValues, data, ExtraValues_Count * sizeof(mValue));
+ }
+
+ break;
+ }
+ case IntelMetadataBufferTypeGrallocSource:
+ if (extrasize > 0)
+ return IMB_INVAL_BUFFER;
+
+ break;
+ default:
+ return IMB_INVAL_BUFFER;
+ }
+
+ //store data
+ mType = type;
+ mValue = value;
+ if (mInfo)
+ delete mInfo;
+ mInfo = info;
+ if (mExtraValues)
+ delete[] mExtraValues;
+ mExtraValues = ExtraValues;
+ mExtraValues_Count = ExtraValues_Count;
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ if (mInfo != NULL)
+ mSessionFlag = mInfo->sessionFlag;
+#endif
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::Serialize(uint8_t* &data, uint32_t& size)
+{
+ if (mBytes == NULL)
+ {
+ if (mType == IntelMetadataBufferTypeGrallocSource && mInfo)
+ return IMB_INVAL_PARAM;
+
+ //assemble bytes according members
+ mSize = sizeof(mType) + sizeof(mValue);
+ if (mInfo)
+ {
+ mSize += sizeof(ValueInfo);
+ if (mExtraValues)
+ mSize += sizeof(mValue) * mExtraValues_Count;
+ }
+
+ mBytes = new uint8_t[mSize];
+ uint8_t *ptr = mBytes;
+ memcpy(ptr, &mType, sizeof(mType));
+ ptr += sizeof(mType);
+ memcpy(ptr, &mValue, sizeof(mValue));
+ ptr += sizeof(mValue);
+
+ if (mInfo)
+ {
+ #ifdef INTEL_VIDEO_XPROC_SHARING
+ mInfo->sessionFlag = mSessionFlag;
+ #endif
+ memcpy(ptr, mInfo, sizeof(ValueInfo));
+ ptr += sizeof(ValueInfo);
+
+ if (mExtraValues)
+ memcpy(ptr, mExtraValues, mExtraValues_Count * sizeof(mValue));
+ }
+ }
+
+ data = mBytes;
+ size = mSize;
+
+ return IMB_SUCCESS;
+}
+
+uint32_t IntelMetadataBuffer::GetMaxBufferSize()
+{
+ return 256;
+}
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+IMB_Result IntelMetadataBuffer::GetSessionFlag(uint32_t& sessionflag)
+{
+ sessionflag = mSessionFlag;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetSessionFlag(uint32_t sessionflag)
+{
+ mSessionFlag = sessionflag;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::ShareValue(sp<MemoryBase> mem)
+{
+ mValue = (intptr_t)((intptr_t) ( mem->pointer() + 0x0FFF) & ~0x0FFF);
+
+ if ( !(mSessionFlag & REMOTE_PROVIDER) && !(mSessionFlag & REMOTE_CONSUMER)) //no sharing
+ return IMB_SUCCESS;
+
+ if (mSessionFlag & REMOTE_PROVIDER) //is remote provider
+ {
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ //Detect IntelBufferSharingService, share mem to service
+ Parcel data, reply;
+
+ //send pid, sessionflag, and value
+ pid_t pid = getpid();
+ //TODO: if pid is int32?
+ data.writeInt32(pid);
+ data.writeInt32(mSessionFlag);
+ data.writeIntPtr(mValue);
+
+ //send type/obj (offset/size/MemHeap)
+ ShareMemMap smem;
+ smem.membase = mem;
+ smem.type = ST_MEMBASE;
+ if (WriteMemObjToBinder(data, &smem) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //do transcation
+ if (binder->transact(SHARE_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //set new value gotten from peer
+ mValue = reply.readIntPtr();
+// LOGI("ShareValue(membase) Get reply from sevice, new value:%x\n", mValue);
+ }
+ else //is local provider , direct access list
+ {
+ ShareMemMap* smem = new ShareMemMap;
+ smem->sessionflag = mSessionFlag;
+ smem->value = mValue;
+ smem->value_backup = mValue;
+ smem->type = ST_MEMBASE;
+ smem->membase = mem;
+ smem->gbuffer = NULL;
+ PushShareMem(smem);
+ }
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::ShareValue(sp<GraphicBuffer> gbuffer)
+{
+ mValue = (intptr_t)gbuffer->handle;
+
+ if ( !(mSessionFlag & REMOTE_PROVIDER) && !(mSessionFlag & REMOTE_CONSUMER)) //no sharing
+ return IMB_SUCCESS;
+
+ if (mSessionFlag & REMOTE_PROVIDER == 0) //is remote provider
+ {
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ Parcel data, reply;
+
+ //send pid, sessionflag, and memtype
+ pid_t pid = getpid();
+ //TODO: if pid is int32 ?
+ data.writeInt32(pid);
+ data.writeInt32(mSessionFlag);
+ data.writeIntPtr(mValue);
+
+ //send value/graphicbuffer obj
+ ShareMemMap smem;
+ smem.gbuffer = gbuffer;
+ smem.type = ST_GFX;
+ if (WriteMemObjToBinder(data, &smem) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //do transcation
+ if (binder->transact(SHARE_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+
+ //set new value gotten from peer
+ mValue = reply.readIntPtr();
+// LOGI("ShareValue(gfx) Get reply from sevice, new value:%x\n", mValue);
+ }
+ else //is local provider, direct access list
+ {
+ ShareMemMap* smem = new ShareMemMap;
+ smem->sessionflag = mSessionFlag;
+ smem->value = mValue;
+ smem->value_backup = mValue;
+ smem->type = ST_GFX;
+ smem->membase = NULL;
+ smem->gbuffer = gbuffer;
+ PushShareMem(smem);
+ }
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::ClearContext(uint32_t sessionflag, bool isProvider)
+{
+ if ( !(sessionflag & REMOTE_PROVIDER) && !(sessionflag & REMOTE_CONSUMER)) //no sharing
+ return IMB_SUCCESS;
+
+ //clear local firstly
+ ClearLocalMem(sessionflag);
+
+ //clear mem on service if it is remote user
+ if ((isProvider && (sessionflag & REMOTE_PROVIDER)) || (!isProvider && (sessionflag & REMOTE_CONSUMER)))
+ {
+// LOGI("CLEAR_MEM sessionflag=%x", sessionflag);
+
+ sp<IBinder> binder = GetIntelBufferSharingService();
+ if (binder == 0)
+ return IMB_NO_SERVICE;
+
+ //Detect IntelBufferSharingService, unshare mem from service
+ Parcel data, reply;
+
+ //send pid and sessionflag
+ pid_t pid = getpid();
+ //TODO: if pid is int32?
+ data.writeInt32(pid);
+ data.writeInt32(sessionflag);
+
+ if (binder->transact(CLEAR_MEM, data, &reply) != NO_ERROR)
+ return IMB_SERVICE_FAIL;
+ }
+
+ return IMB_SUCCESS;
+}
+
+uint32_t IntelMetadataBuffer::MakeSessionFlag(bool romoteProvider, bool remoteConsumer, uint16_t sindex)
+{
+ uint32_t sessionflag = 0;
+
+ if (romoteProvider)
+ sessionflag |= REMOTE_PROVIDER;
+
+ if (remoteConsumer)
+ sessionflag |= REMOTE_CONSUMER;
+
+ return sessionflag + sindex;
+}
+#endif
diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h
new file mode 100644
index 0000000..20a9590
--- /dev/null
+++ b/videoencoder/IntelMetadataBuffer.h
@@ -0,0 +1,162 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef _INTEL_METADATA_BUFFER_H_
+#define _INTEL_METADATA_BUFFER_H_
+
+#include <stdint.h>
+
+//#define INTEL_VIDEO_XPROC_SHARING
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+#include <binder/MemoryBase.h>
+#include <ui/GraphicBuffer.h>
+
+using namespace android;
+#endif
+#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24)))
+
+typedef enum {
+ IMB_SUCCESS = 0,
+ IMB_INVAL_PARAM = 1,
+ IMB_INVAL_BUFFER = 2,
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ IMB_NO_SERVICE = 3,
+ IMB_SERVICE_FAIL = 4,
+#endif
+}IMB_Result;
+
+typedef enum {
+ MEM_MODE_MALLOC = 1,
+ MEM_MODE_CI = 2,
+ MEM_MODE_V4L2 = 4,
+ MEM_MODE_SURFACE = 8,
+ MEM_MODE_USRPTR = 16,
+ MEM_MODE_GFXHANDLE = 32,
+ MEM_MODE_KBUFHANDLE = 64,
+ MEM_MODE_ION = 128,
+ MEM_MODE_NONECACHE_USRPTR = 256,
+}MemMode;
+
+typedef struct {
+ MemMode mode; //memory type, vasurface/malloc/gfx/ion/v4l2/ci etc
+ intptr_t handle; //handle
+ uint32_t size; //memory size
+ uint32_t width; //picture width
+ uint32_t height; //picture height
+ uint32_t lumaStride; //picture luma stride
+ uint32_t chromStride; //picture chrom stride
+ uint32_t format; //color format
+ uint32_t s3dformat; //S3D format
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ uint32_t sessionFlag; //for buffer sharing session
+#endif
+}ValueInfo;
+
+typedef enum {
+ IntelMetadataBufferTypeCameraSource = 0, //same with kMetadataBufferTypeCameraSource in framework
+ IntelMetadataBufferTypeGrallocSource = 1, //same with kMetadataBufferTypeGrallocSource in framework
+
+ IntelMetadataBufferTypeExtension = 0xFF, //intel extended type
+ IntelMetadataBufferTypeEncoder = IntelMetadataBufferTypeExtension, //for WiDi clone mode
+ IntelMetadataBufferTypeUser = IntelMetadataBufferTypeExtension + 1, //for WiDi user mode
+ IntelMetadataBufferTypeLast = IntelMetadataBufferTypeExtension + 2, //type number
+}IntelMetadataBufferType;
+
+class IntelMetadataBuffer {
+public:
+ IntelMetadataBuffer(); //for generator
+ IntelMetadataBuffer(IntelMetadataBufferType type, intptr_t value); //for quick generator
+ ~IntelMetadataBuffer();
+
+ IntelMetadataBuffer(const IntelMetadataBuffer& imb);
+ const IntelMetadataBuffer& operator=(const IntelMetadataBuffer& imb);
+
+ IMB_Result GetType(IntelMetadataBufferType &type);
+ IMB_Result SetType(IntelMetadataBufferType type);
+ IMB_Result GetValue(intptr_t &value);
+ IMB_Result SetValue(intptr_t value);
+ IMB_Result GetValueInfo(ValueInfo* &info);
+ IMB_Result SetValueInfo(ValueInfo *info);
+ IMB_Result GetExtraValues(intptr_t* &values, uint32_t &num);
+ IMB_Result SetExtraValues(intptr_t *values, uint32_t num);
+
+ //New API for bytes input/ouput, UnSerialize=SetBytes, Serialize=GetBytes
+ IMB_Result UnSerialize(uint8_t* data, uint32_t size);
+ IMB_Result Serialize(uint8_t* &data, uint32_t& size);
+
+ //Static, for get max IntelMetadataBuffer size
+ static uint32_t GetMaxBufferSize();
+
+private:
+ IntelMetadataBufferType mType;
+ intptr_t mValue;
+ ValueInfo* mInfo;
+
+ intptr_t* mExtraValues;
+ uint32_t mExtraValues_Count;
+
+ uint8_t* mBytes;
+ uint32_t mSize;
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+public:
+ IMB_Result ShareValue(sp<MemoryBase> mem);
+ IMB_Result ShareValue(sp<GraphicBuffer> gbuffer);
+
+ IMB_Result GetSessionFlag(uint32_t &sessionflag);
+ IMB_Result SetSessionFlag(uint32_t sessionflag);
+
+ //Static, for clear context
+ static IMB_Result ClearContext(uint32_t sessionflag, bool isProvider = true);
+
+ static const uint16_t CAMERA_BASE = 0x0000;
+ static const uint16_t WIDI_BASE = 0x1000;
+ static const uint16_t WEBRTC_BASE = 0x2000;
+ static const uint16_t VIDEOEDIT_BASE = 0x3000;
+
+ static uint32_t MakeSessionFlag(bool romoteProvider, bool remoteConsumer, uint16_t sindex);
+
+private:
+ uint32_t mSessionFlag;
+#endif
+
+};
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+
+class IntelBufferSharingService : public BBinder
+{
+private:
+ static IntelBufferSharingService *gBufferService;
+
+public:
+ static status_t instantiate();
+
+ IntelBufferSharingService(){
+ ALOGI("IntelBufferSharingService instance is created");
+ }
+
+ ~IntelBufferSharingService(){
+ ALOGI("IntelBufferSharingService instance is destroyed");
+ }
+
+ status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags);
+};
+#endif
+
+#endif
+
diff --git a/videoencoder/PVSoftMPEG4Encoder.cpp b/videoencoder/PVSoftMPEG4Encoder.cpp
new file mode 100644
index 0000000..6b893df
--- /dev/null
+++ b/videoencoder/PVSoftMPEG4Encoder.cpp
@@ -0,0 +1,513 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PVSoftMPEG4Encoder"
+#include <wrs_omxil_core/log.h>
+
+#include "mp4enc_api.h"
+#include "OMX_Video.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <ui/GraphicBufferMapper.h>
+#include <ui/Rect.h>
+
+#include "PVSoftMPEG4Encoder.h"
+#include "VideoEncoderLog.h"
+
+#define ALIGN(x, align) (((x) + (align) - 1) & (~((align) - 1)))
+
+inline static void ConvertYUV420SemiPlanarToYUV420Planar(
+ uint8_t *inyuv, uint8_t* outyuv,
+ int32_t width, int32_t height) {
+
+ int32_t outYsize = width * height;
+ uint32_t *outy = (uint32_t *) outyuv;
+ uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
+ uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
+
+ /* Y copying */
+ memcpy(outy, inyuv, outYsize);
+
+ /* U & V copying */
+ uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
+ for (int32_t i = height >> 1; i > 0; --i) {
+ for (int32_t j = width >> 2; j > 0; --j) {
+ uint32_t temp = *inyuv_4++;
+ uint32_t tempU = temp & 0xFF;
+ tempU = tempU | ((temp >> 8) & 0xFF00);
+
+ uint32_t tempV = (temp >> 8) & 0xFF;
+ tempV = tempV | ((temp >> 16) & 0xFF00);
+
+ // Flip U and V
+ *outcb++ = tempU;
+ *outcr++ = tempV;
+ }
+ }
+}
+
+inline static void trimBuffer(uint8_t *dataIn, uint8_t *dataOut,
+ int32_t width, int32_t height,
+ int32_t alignedHeight, int32_t stride) {
+ int32_t h;
+ uint8_t *y_start, *uv_start, *_y_start, *_uv_start;
+ y_start = dataOut;
+ uv_start = dataOut + width * height;
+ _y_start = dataIn;
+ _uv_start = dataIn + stride * alignedHeight;
+
+ for (h = 0; h < height; h++)
+ memcpy(y_start + h * width, _y_start + h * stride, width);
+ for (h = 0; h < height / 2; h++)
+ memcpy(uv_start + h * width,
+ _uv_start + h * stride, width);
+}
+
+PVSoftMPEG4Encoder::PVSoftMPEG4Encoder(const char *name)
+ : mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
+ mVideoWidth(176),
+ mVideoHeight(144),
+ mVideoFrameRate(30),
+ mVideoBitRate(192000),
+ mVideoColorFormat(OMX_COLOR_FormatYUV420SemiPlanar),
+ mStoreMetaDataInBuffers(false),
+ mIDRFrameRefreshIntervalInSec(1),
+ mNumInputFrames(-1),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSignalledError(false),
+ mHandle(new tagvideoEncControls),
+ mEncParams(new tagvideoEncOptions),
+ mInputFrameData(NULL)
+{
+
+ if (!strcmp(name, "OMX.google.h263.encoder")) {
+ mEncodeMode = H263_MODE;
+ LOG_I("construct h263 encoder");
+ } else {
+ CHECK(!strcmp(name, "OMX.google.mpeg4.encoder"));
+ LOG_I("construct mpeg4 encoder");
+ }
+
+ setDefaultParams();
+#if NO_BUFFER_SHARE
+ mVASurfaceMappingAction |= MAPACT_COPY;
+#endif
+
+ LOG_I("Construct PVSoftMPEG4Encoder");
+
+}
+
+PVSoftMPEG4Encoder::~PVSoftMPEG4Encoder() {
+ LOG_I("Destruct PVSoftMPEG4Encoder");
+ releaseEncoder();
+
+}
+
+void PVSoftMPEG4Encoder::setDefaultParams() {
+
+ // Set default value for input parameters
+ mComParams.profile = VAProfileH264Baseline;
+ mComParams.level = 41;
+ mComParams.rawFormat = RAW_FORMAT_NV12;
+ mComParams.frameRate.frameRateNum = 30;
+ mComParams.frameRate.frameRateDenom = 1;
+ mComParams.resolution.width = 0;
+ mComParams.resolution.height = 0;
+ mComParams.intraPeriod = 30;
+ mComParams.rcMode = RATE_CONTROL_NONE;
+ mComParams.rcParams.initQP = 15;
+ mComParams.rcParams.minQP = 0;
+ mComParams.rcParams.bitRate = 640000;
+ mComParams.rcParams.targetPercentage= 0;
+ mComParams.rcParams.windowSize = 0;
+ mComParams.rcParams.disableFrameSkip = 0;
+ mComParams.rcParams.disableBitsStuffing = 1;
+ mComParams.cyclicFrameInterval = 30;
+ mComParams.refreshType = VIDEO_ENC_NONIR;
+ mComParams.airParams.airMBs = 0;
+ mComParams.airParams.airThreshold = 0;
+ mComParams.airParams.airAuto = 1;
+ mComParams.disableDeblocking = 2;
+ mComParams.syncEncMode = false;
+ mComParams.codedBufNum = 2;
+
+}
+
+Encode_Status PVSoftMPEG4Encoder::initEncParams() {
+ CHECK(mHandle != NULL);
+ memset(mHandle, 0, sizeof(tagvideoEncControls));
+
+ CHECK(mEncParams != NULL);
+ memset(mEncParams, 0, sizeof(tagvideoEncOptions));
+ if (!PVGetDefaultEncOption(mEncParams, 0)) {
+ LOG_E("Failed to get default encoding parameters");
+ return ENCODE_FAIL;
+ }
+ mEncParams->encMode = mEncodeMode;
+ mEncParams->encWidth[0] = mVideoWidth;
+ mEncParams->encHeight[0] = mVideoHeight;
+ mEncParams->encFrameRate[0] = mVideoFrameRate;
+ mEncParams->rcType = VBR_1;
+ mEncParams->vbvDelay = 5.0f;
+
+ // FIXME:
+ // Add more profile and level support for MPEG4 encoder
+ mEncParams->profile_level = CORE_PROFILE_LEVEL2;
+ mEncParams->packetSize = 32;
+ mEncParams->rvlcEnable = PV_OFF;
+ mEncParams->numLayers = 1;
+ mEncParams->timeIncRes = 1000;
+ mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate;
+
+ mEncParams->bitRate[0] = mVideoBitRate <= 2000000 ? mVideoBitRate : 2000000;
+ mEncParams->iQuant[0] = 15;
+ mEncParams->pQuant[0] = 12;
+ mEncParams->quantType[0] = 0;
+ mEncParams->noFrameSkipped = PV_OFF;
+
+ mTrimedInputData =
+ (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ CHECK(mTrimedInputData != NULL);
+
+ if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ // Color conversion is needed.
+ CHECK(mInputFrameData == NULL);
+ mInputFrameData =
+ (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ CHECK(mInputFrameData != NULL);
+ }
+
+ // PV's MPEG4 encoder requires the video dimension of multiple
+ if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+ LOG_E("Video frame size %dx%d must be a multiple of 16",
+ mVideoWidth, mVideoHeight);
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ // Set IDR frame refresh interval
+ if (mIDRFrameRefreshIntervalInSec < 0) {
+ mEncParams->intraPeriod = -1;
+ } else if (mIDRFrameRefreshIntervalInSec == 0) {
+ mEncParams->intraPeriod = 1; // All I frames
+ } else {
+ mEncParams->intraPeriod =
+ (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+ }
+
+ mEncParams->numIntraMB = 0;
+ mEncParams->sceneDetect = PV_ON;
+ mEncParams->searchRange = 16;
+ mEncParams->mv8x8Enable = PV_OFF;
+ mEncParams->gobHeaderInterval = 0;
+ mEncParams->useACPred = PV_ON;
+ mEncParams->intraDCVlcTh = 0;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status PVSoftMPEG4Encoder::initEncoder() {
+ LOG_V("Begin\n");
+
+ CHECK(!mStarted);
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ if (ENCODE_SUCCESS != (ret = initEncParams())) {
+ LOG_E("Failed to initialized encoder params");
+ mSignalledError = true;
+ return ret;
+ }
+
+ if (!PVInitVideoEncoder(mHandle, mEncParams)) {
+ LOG_E("Failed to initialize the encoder");
+ mSignalledError = true;
+ return ENCODE_FAIL;
+ }
+
+ mNumInputFrames = -1; // 1st buffer for codec specific data
+ mStarted = true;
+ mCurTimestampUs = 0;
+ mLastTimestampUs = 0;
+ mVolHeaderLength = 256;
+
+ LOG_V("End\n");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status PVSoftMPEG4Encoder::releaseEncoder() {
+ LOG_V("Begin\n");
+
+ if (!mStarted) {
+ return ENCODE_SUCCESS;
+ }
+
+ PVCleanUpVideoEncoder(mHandle);
+
+ delete mTrimedInputData;
+ mTrimedInputData = NULL;
+
+ delete mInputFrameData;
+ mInputFrameData = NULL;
+
+ delete mEncParams;
+ mEncParams = NULL;
+
+ delete mHandle;
+ mHandle = NULL;
+
+ mStarted = false;
+
+ LOG_V("End\n");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status PVSoftMPEG4Encoder::setParameters(
+ VideoParamConfigSet *videoEncParams)
+{
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+ if (mStarted) {
+ LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
+ return ENCODE_ALREADY_INIT;
+ }
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ if(paramsCommon->codedBufNum < 2)
+ paramsCommon->codedBufNum =2;
+ mComParams = *paramsCommon;
+
+ mVideoWidth = mComParams.resolution.width;
+ mVideoHeight = mComParams.resolution.height;
+ mVideoFrameRate = mComParams.frameRate.frameRateNum / \
+ mComParams.frameRate.frameRateDenom;
+ mVideoBitRate = mComParams.rcParams.bitRate;
+ mVideoColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mStoreMetaDataInBuffers = metadata->isEnabled;
+
+ break;
+ }
+
+ default: {
+ LOG_I ("Wrong ParamType here\n");
+ break;
+ }
+ }
+
+ return ret;
+}
+
+Encode_Status PVSoftMPEG4Encoder::getParameters(
+ VideoParamConfigSet *videoEncParams) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ *paramsCommon = mComParams;
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ metadata->isEnabled = mStoreMetaDataInBuffers;
+
+ break;
+ }
+
+ default: {
+ LOG_I ("Wrong ParamType here\n");
+ break;
+ }
+
+ }
+ return ret;
+}
+
+Encode_Status PVSoftMPEG4Encoder::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout)
+{
+ LOG_V("Begin\n");
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ if (mCurTimestampUs <= inBuffer->timeStamp) {
+ mLastTimestampUs = mCurTimestampUs;
+ mCurTimestampUs = inBuffer->timeStamp;
+ }
+
+ if (mNumInputFrames < 0) {
+ if (!PVGetVolHeader(mHandle, mVolHeader, &mVolHeaderLength, 0)) {
+ LOG_E("Failed to get VOL header");
+ mSignalledError = true;
+ return ENCODE_FAIL;
+ }
+ LOG_I("Output VOL header: %d bytes", mVolHeaderLength);
+ mNumInputFrames++;
+ //return ENCODE_SUCCESS;
+ }
+
+ if (mStoreMetaDataInBuffers) {
+ IntelMetadataBuffer imb;
+ int32_t type;
+ int32_t value;
+ uint8_t *img;
+ const android::Rect rect(mVideoWidth, mVideoHeight);
+ android::status_t res;
+ ValueInfo vinfo;
+ ValueInfo *pvinfo = &vinfo;
+ CHECK(IMB_SUCCESS == imb.UnSerialize(inBuffer->data, inBuffer->size));
+ imb.GetType((::IntelMetadataBufferType&)type);
+ imb.GetValue(value);
+ imb.GetValueInfo(pvinfo);
+ if(pvinfo == NULL) {
+ res = android::GraphicBufferMapper::get().lock((buffer_handle_t)value,
+ GRALLOC_USAGE_SW_READ_MASK,
+ rect, (void**)&img);
+ } else {
+ img = (uint8_t*)value;
+ }
+ if (pvinfo != NULL)
+ trimBuffer(img, mTrimedInputData, pvinfo->width, pvinfo->height,
+ pvinfo->height, pvinfo->lumaStride);
+ else {
+ //NV12 Y-TILED
+ trimBuffer(img, mTrimedInputData, mVideoWidth, mVideoHeight,
+ ALIGN(mVideoHeight, 32), ALIGN(mVideoWidth, 128));
+ android::GraphicBufferMapper::get().unlock((buffer_handle_t)value);
+ }
+ } else {
+ memcpy(mTrimedInputData, inBuffer->data,
+ (mVideoWidth * mVideoHeight * 3 ) >> 1);
+ }
+
+ if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ ConvertYUV420SemiPlanarToYUV420Planar(
+ mTrimedInputData, mInputFrameData, mVideoWidth, mVideoHeight);
+ } else {
+ memcpy(mTrimedInputData, mInputFrameData,
+ (mVideoWidth * mVideoHeight * 3 ) >> 1);
+ }
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
+Encode_Status PVSoftMPEG4Encoder::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout)
+{
+ LOG_V("Begin\n");
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint8_t *outPtr = outBuffer->data;
+ int32_t dataLength = outBuffer->bufferSize;
+ outBuffer->flag = 0;
+
+ if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES) &&
+ (outBuffer->format == OUTPUT_CODEC_DATA)) {
+ memcpy(outPtr, mVolHeader, mVolHeaderLength);
+ ++mNumInputFrames;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ outBuffer->dataSize = mVolHeaderLength;
+ outBuffer->remainingSize = 0;
+ return ENCODE_SUCCESS;
+ }
+
+ outBuffer->timeStamp = mCurTimestampUs;
+ LOG_I("info.mTimeUs %lld\n", outBuffer->timeStamp);
+
+ VideoEncFrameIO vin, vout;
+ memset(&vin, 0, sizeof(vin));
+ memset(&vout, 0, sizeof(vout));
+ vin.height = ((mVideoHeight + 15) >> 4) << 4;
+ vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
+ vin.timestamp = (outBuffer->timeStamp + 500) / 1000; // in ms
+ vin.yChan = mInputFrameData;
+ vin.uChan = vin.yChan + vin.height * vin.pitch;
+ vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
+
+ unsigned long modTimeMs = 0;
+ int32_t nLayer = 0;
+ MP4HintTrack hintTrack;
+ if (!PVEncodeVideoFrame(mHandle, &vin, &vout,
+ &modTimeMs, outPtr, &dataLength, &nLayer) ||
+ !PVGetHintTrack(mHandle, &hintTrack)) {
+ LOG_E("Failed to encode frame or get hink track at frame %lld",
+ mNumInputFrames);
+ mSignalledError = true;
+ hintTrack.CodeType = 0;
+ ret = ENCODE_FAIL;
+ }
+ LOG_I("dataLength %d\n", dataLength);
+ CHECK(NULL == PVGetOverrunBuffer(mHandle));
+ if (hintTrack.CodeType == 0) { // I-frame serves as sync frame
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ }
+
+ ++mNumInputFrames;
+
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->dataSize = dataLength;
+
+ LOG_V("End\n");
+
+ return ret;
+}
+
diff --git a/videoencoder/PVSoftMPEG4Encoder.h b/videoencoder/PVSoftMPEG4Encoder.h
new file mode 100644
index 0000000..5d34e9f
--- /dev/null
+++ b/videoencoder/PVSoftMPEG4Encoder.h
@@ -0,0 +1,84 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __PV_SOFT_MPEG4_ENCODER__
+#define __PV_SOFT_MPEG4_ENCODER__
+
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoEncoderDef.h"
+#include "VideoEncoderInterface.h"
+#include "IntelMetadataBuffer.h"
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ABase.h>
+#include "SimpleSoftOMXComponent.h"
+#include "mp4enc_api.h"
+
+class PVSoftMPEG4Encoder : IVideoEncoder {
+
+public:
+ PVSoftMPEG4Encoder(const char *name);
+ virtual ~PVSoftMPEG4Encoder();
+
+ virtual Encode_Status start(void) {return initEncoder();}
+ virtual void flush(void) { }
+ virtual Encode_Status stop(void) {return releaseEncoder();}
+ virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout);
+
+ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout);
+
+ virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) {return ENCODE_SUCCESS;}
+ virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) {return ENCODE_SUCCESS;}
+ virtual Encode_Status getMaxOutSize(uint32_t *maxSize) {return ENCODE_SUCCESS;}
+
+private:
+ void setDefaultParams(void);
+ VideoParamsCommon mComParams;
+
+ MP4EncodingMode mEncodeMode;
+ int32_t mVideoWidth;
+ int32_t mVideoHeight;
+ int32_t mVideoFrameRate;
+ int32_t mVideoBitRate;
+ int32_t mVideoColorFormat;
+ bool mStoreMetaDataInBuffers;
+ int32_t mIDRFrameRefreshIntervalInSec;
+
+ int64_t mNumInputFrames;
+ bool mStarted;
+ bool mSawInputEOS;
+ bool mSignalledError;
+ int64_t mCurTimestampUs;
+ int64_t mLastTimestampUs;
+
+ tagvideoEncControls *mHandle;
+ tagvideoEncOptions *mEncParams;
+ uint8_t *mInputFrameData;
+ uint8_t *mTrimedInputData;
+ uint8_t mVolHeader[256];
+ int32_t mVolHeaderLength;
+
+ Encode_Status initEncParams();
+ Encode_Status initEncoder();
+ Encode_Status releaseEncoder();
+
+ DISALLOW_EVIL_CONSTRUCTORS(PVSoftMPEG4Encoder);
+};
+
+#endif
diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp
new file mode 100644
index 0000000..47d8174
--- /dev/null
+++ b/videoencoder/VideoEncoderAVC.cpp
@@ -0,0 +1,1377 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderAVC.h"
+#include <va/va_tpi.h>
+#include <va/va_enc_h264.h>
+#include <bitstream.h>
+
+VideoEncoderAVC::VideoEncoderAVC()
+ :VideoEncoderBase() {
+ if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264High) == ENCODE_SUCCESS){
+ mComParams.profile = VAProfileH264High;
+ mComParams.level = 42;
+ }else if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264Main) == ENCODE_SUCCESS){
+ mComParams.profile = VAProfileH264Main;
+ mComParams.level = 41;
+ }
+ mVideoParamsAVC.basicUnitSize = 0;
+ mVideoParamsAVC.VUIFlag = 0;
+ mVideoParamsAVC.sliceNum.iSliceNum = 2;
+ mVideoParamsAVC.sliceNum.pSliceNum = 2;
+ mVideoParamsAVC.idrInterval = 2;
+ mVideoParamsAVC.ipPeriod = 1;
+ mVideoParamsAVC.maxSliceSize = 0;
+ mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB;
+ mSliceNum = 2;
+ mVideoParamsAVC.crop.LeftOffset = 0;
+ mVideoParamsAVC.crop.RightOffset = 0;
+ mVideoParamsAVC.crop.TopOffset = 0;
+ mVideoParamsAVC.crop.BottomOffset = 0;
+ mVideoParamsAVC.SAR.SarWidth = 0;
+ mVideoParamsAVC.SAR.SarHeight = 0;
+ mVideoParamsAVC.bEntropyCodingCABAC = 0;
+ mVideoParamsAVC.bWeightedPPrediction = 0;
+ mVideoParamsAVC.bDirect8x8Inference = 0;
+ mVideoParamsAVC.bConstIpred = 0;
+ mAutoReferenceSurfaceNum = 4;
+
+ packed_seq_header_param_buf_id = VA_INVALID_ID;
+ packed_seq_buf_id = VA_INVALID_ID;
+ packed_pic_header_param_buf_id = VA_INVALID_ID;
+ packed_pic_buf_id = VA_INVALID_ID;
+ packed_sei_header_param_buf_id = VA_INVALID_ID; /* the SEI buffer */
+ packed_sei_buf_id = VA_INVALID_ID;
+}
+
+Encode_Status VideoEncoderAVC::start() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ if (mComParams.rcMode == VA_RC_VCM) {
+ // If we are in VCM, we will set slice num to max value
+ // mVideoParamsAVC.sliceNum.iSliceNum = (mComParams.resolution.height + 15) / 16;
+ // mVideoParamsAVC.sliceNum.pSliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
+ }
+
+ ret = VideoEncoderBase::start ();
+ CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start");
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderAVC::derivedSetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsAVC *encParamsAVC = reinterpret_cast <VideoParamsAVC *> (videoEncParams);
+
+ // AVC parames
+ if (encParamsAVC->size != sizeof (VideoParamsAVC)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ if(encParamsAVC->ipPeriod == 0 || encParamsAVC->ipPeriod >4)
+ return ENCODE_INVALID_PARAMS;
+
+ if((mComParams.intraPeriod >1)&&(mComParams.intraPeriod % encParamsAVC->ipPeriod !=0))
+ return ENCODE_INVALID_PARAMS;
+
+ mVideoParamsAVC = *encParamsAVC;
+ if(mComParams.profile == VAProfileH264Baseline){
+ mVideoParamsAVC.bEntropyCodingCABAC = 0;
+ mVideoParamsAVC.bDirect8x8Inference = 0;
+ mVideoParamsAVC.bWeightedPPrediction = 0;
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC:: derivedGetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsAVC *encParamsAVC = reinterpret_cast <VideoParamsAVC *> (videoEncParams);
+
+ // AVC parames
+ if (encParamsAVC->size != sizeof (VideoParamsAVC)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encParamsAVC = mVideoParamsAVC;
+ return ENCODE_SUCCESS;
+
+}
+
+Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ switch (videoEncConfig->type) {
+ case VideoConfigTypeAVCIntraPeriod: {
+
+ VideoConfigAVCIntraPeriod *configAVCIntraPeriod =
+ reinterpret_cast <VideoConfigAVCIntraPeriod *> (videoEncConfig);
+ // Config Intra Peroid
+ if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ if(configAVCIntraPeriod->ipPeriod == 0 || configAVCIntraPeriod->ipPeriod >4)
+ return ENCODE_INVALID_PARAMS;
+ if((configAVCIntraPeriod->intraPeriod >1)&&(configAVCIntraPeriod->intraPeriod % configAVCIntraPeriod->ipPeriod !=0))
+ return ENCODE_INVALID_PARAMS;
+
+ mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval;
+ mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod;
+ mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod;
+ mNewHeader = true;
+ break;
+ }
+ case VideoConfigTypeNALSize: {
+ // Config MTU
+ VideoConfigNALSize *configNALSize =
+ reinterpret_cast <VideoConfigNALSize *> (videoEncConfig);
+ if (configNALSize->size != sizeof (VideoConfigNALSize)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsAVC.maxSliceSize = configNALSize->maxSliceSize;
+ mRenderMaxSliceSize = true;
+ break;
+ }
+ case VideoConfigTypeIDRRequest: {
+ if(mVideoParamsAVC.ipPeriod >1)
+ return ENCODE_FAIL;
+ else
+ mNewHeader = true;
+ break;
+ }
+ case VideoConfigTypeSliceNum: {
+
+ VideoConfigSliceNum *configSliceNum =
+ reinterpret_cast <VideoConfigSliceNum *> (videoEncConfig);
+ // Config Slice size
+ if (configSliceNum->size != sizeof (VideoConfigSliceNum)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsAVC.sliceNum = configSliceNum->sliceNum;
+ break;
+ }
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC:: derivedGetConfig(
+ VideoParamConfigSet *videoEncConfig) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ switch (videoEncConfig->type) {
+
+ case VideoConfigTypeAVCIntraPeriod: {
+
+ VideoConfigAVCIntraPeriod *configAVCIntraPeriod =
+ reinterpret_cast <VideoConfigAVCIntraPeriod *> (videoEncConfig);
+ if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval;
+ configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod;
+ configAVCIntraPeriod->ipPeriod = mVideoParamsAVC.ipPeriod;
+
+ break;
+ }
+ case VideoConfigTypeNALSize: {
+
+ VideoConfigNALSize *configNALSize =
+ reinterpret_cast <VideoConfigNALSize *> (videoEncConfig);
+ if (configNALSize->size != sizeof (VideoConfigNALSize)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configNALSize->maxSliceSize = mVideoParamsAVC.maxSliceSize;
+ break;
+ }
+ case VideoConfigTypeIDRRequest: {
+ break;
+
+ }
+ case VideoConfigTypeSliceNum: {
+
+ VideoConfigSliceNum *configSliceNum =
+ reinterpret_cast <VideoConfigSliceNum *> (videoEncConfig);
+ if (configSliceNum->size != sizeof (VideoConfigSliceNum)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configSliceNum->sliceNum = mVideoParamsAVC.sliceNum;
+ break;
+ }
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) {
+ uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval;
+ FrameType frametype;
+ uint32_t frame_num = mFrameNum;
+ uint32_t intraPeriod = mComParams.intraPeriod;
+
+ if (idrPeroid != 0) {
+ if(mVideoParamsAVC.ipPeriod > 1)
+ frame_num = frame_num % (idrPeroid + 1);
+ else
+ frame_num = frame_num % idrPeroid ;
+ }else{
+ if (mComParams.intraPeriod == 0)
+ intraPeriod = 0xFFFFFFFF;
+ }
+
+
+ if(frame_num ==0){
+ frametype = FTYPE_IDR;
+ }else if(intraPeriod ==1)
+ // only I frame need intraPeriod=idrInterval=ipPeriod=0
+ frametype = FTYPE_I;
+ else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame
+ if((frame_num > 1) &&((frame_num -1)%intraPeriod == 0))
+ frametype = FTYPE_I;
+ else
+ frametype = FTYPE_P;
+ } else {
+ if(((frame_num-1)%intraPeriod == 0)&&(frame_num >intraPeriod))
+ frametype = FTYPE_I;
+ else{
+ frame_num = frame_num%intraPeriod;
+ if(frame_num == 0)
+ frametype = FTYPE_B;
+ else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0)
+ frametype = FTYPE_P;
+ else
+ frametype = FTYPE_B;
+ }
+ }
+
+ if (frametype == FTYPE_IDR || frametype == FTYPE_I)
+ task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+
+ if (frametype != task->type) {
+ const char* FrameTypeStr[10] = {"UNKNOWN", "I", "P", "B", "SI", "SP", "EI", "EP", "S", "IDR"};
+ if ((uint32_t) task->type < 9)
+ LOG_V("libMIX thinks it is %s Frame, the input is %s Frame", FrameTypeStr[frametype], FrameTypeStr[task->type]);
+ else
+ LOG_V("Wrong Frame type %d, type may not be initialized ?\n", task->type);
+ }
+
+//temparily comment out to avoid uninitialize error
+// if (task->type == FTYPE_UNKNOWN || (uint32_t) task->type > 9)
+ task->type = frametype;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V("Begin\n");
+
+ switch (outBuffer->format) {
+ case OUTPUT_CODEC_DATA: {
+ // Output the codec data
+ ret = outputCodecData(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputCodecData");
+ break;
+ }
+
+ case OUTPUT_ONE_NAL: {
+ // Output only one NAL unit
+ ret = outputOneNALU(outBuffer, true);
+ CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU");
+ break;
+ }
+
+ case OUTPUT_ONE_NAL_WITHOUT_STARTCODE: {
+ ret = outputOneNALU(outBuffer, false);
+ CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU");
+ break;
+ }
+
+ case OUTPUT_LENGTH_PREFIXED: {
+ // Output length prefixed
+ ret = outputLengthPrefixed(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputLengthPrefixed");
+ break;
+ }
+
+ case OUTPUT_NALULENGTHS_PREFIXED: {
+ // Output nalu lengths ahead of bitstream
+ ret = outputNaluLengthsPrefixed(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputNaluLengthsPrefixed");
+ break;
+ }
+
+ default:
+ LOG_E("Invalid buffer mode\n");
+ ret = ENCODE_FAIL;
+ break;
+ }
+
+ LOG_I("out size is = %d\n", outBuffer->dataSize);
+
+
+CLEAN_UP:
+
+
+ LOG_V("End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderAVC::getOneNALUnit(
+ uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize,
+ uint32_t *nalType, uint32_t *nalOffset, uint32_t status) {
+ uint32_t pos = 0;
+ uint32_t zeroByteCount = 0;
+ uint32_t singleByteTable[3][2] = {{1,0},{2,0},{2,3}};
+ uint32_t dataRemaining = 0;
+ uint8_t *dataPtr;
+
+ // Don't need to check parameters here as we just checked by caller
+ while ((inBuffer[pos++] == 0x00)) {
+ zeroByteCount ++;
+ if (pos >= bufSize) //to make sure the buffer to be accessed is valid
+ break;
+ }
+
+ if (inBuffer[pos - 1] != 0x01 || zeroByteCount < 2) {
+ LOG_E("The stream is not AnnexB format \n");
+ LOG_E("segment status is %x \n", status);
+ return ENCODE_FAIL; //not AnnexB, we won't process it
+ }
+
+ *nalType = (*(inBuffer + pos)) & 0x1F;
+ LOG_I ("NAL type = 0x%x\n", *nalType);
+
+ zeroByteCount = 0;
+ *nalOffset = pos;
+
+ if (status & VA_CODED_BUF_STATUS_SINGLE_NALU) {
+ *nalSize = bufSize - pos;
+ return ENCODE_SUCCESS;
+ }
+
+ dataPtr = inBuffer + pos;
+ dataRemaining = bufSize - pos + 1;
+
+ while ((dataRemaining > 0) && (zeroByteCount < 3)) {
+ if (((((intptr_t)dataPtr) & 0xF ) == 0) && (0 == zeroByteCount)
+ && (dataRemaining > 0xF)) {
+
+ __asm__ (
+ //Data input
+ "movl %1, %%ecx\n\t"//data_ptr=>ecx
+ "movl %0, %%eax\n\t"//data_remaing=>eax
+ //Main compare loop
+ //
+ "0:\n\t" //MATCH_8_ZERO:
+ "pxor %%xmm0,%%xmm0\n\t"//set 0=>xmm0
+ "pcmpeqb (%%ecx),%%xmm0\n\t"//data_ptr=xmm0,(byte==0)?0xFF:0x00
+ "pmovmskb %%xmm0, %%edx\n\t"//edx[0]=xmm0[7],edx[1]=xmm0[15],...,edx[15]=xmm0[127]
+ "test $0xAAAA, %%edx\n\t"//edx& 1010 1010 1010 1010b
+ "jnz 2f\n\t"//Not equal to zero means that at least one byte 0x00
+
+ "1:\n\t" //PREPARE_NEXT_MATCH:
+ "sub $0x10, %%eax\n\t"//16 + ecx --> ecx
+ "add $0x10, %%ecx\n\t"//eax-16 --> eax
+ "cmp $0x10, %%eax\n\t"
+ "jge 0b\n\t"//search next 16 bytes
+
+ "2:\n\t" //DATA_RET:
+ "movl %%ecx, %1\n\t"//output ecx->data_ptr
+ "movl %%eax, %0\n\t"//output eax->data_remaining
+ : "+m"(dataRemaining), "+m"(dataPtr)
+ :
+ :"eax", "ecx", "edx", "xmm0"
+ );
+ if (0 >= dataRemaining) {
+ break;
+ }
+
+ }
+ //check the value of each byte
+ if ((*dataPtr) >= 2) {
+
+ zeroByteCount = 0;
+
+ }
+ else {
+ zeroByteCount = singleByteTable[zeroByteCount][*dataPtr];
+ }
+
+ dataPtr ++;
+ dataRemaining --;
+ }
+
+ if ((3 == zeroByteCount) && (dataRemaining > 0)) {
+
+ *nalSize = bufSize - dataRemaining - *nalOffset - 3;
+
+ } else if (0 == dataRemaining) {
+
+ *nalSize = bufSize - *nalOffset;
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getHeader(
+ uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize, uint32_t status) {
+
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t size = 0;
+ uint8_t *buf = inBuffer;
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ *headerSize = 0;
+ CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+ if (bufSize == 0) {
+ //bufSize shoule not be 0, error happens
+ LOG_E("Buffer size is 0\n");
+ return ENCODE_FAIL;
+ }
+
+ while (1) {
+ nalType = nalSize = nalOffset = 0;
+ ret = getOneNALUnit(buf, bufSize, &nalSize, &nalType, &nalOffset, status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ LOG_I("NAL type = %d, NAL size = %d, offset = %d\n", nalType, nalSize, nalOffset);
+ size = nalSize + nalOffset;
+
+ // Codec_data should be SPS or PPS
+ if (nalType == 7 || nalType == 8) {
+ *headerSize += size;
+ buf += size;
+ bufSize -= size;
+ } else {
+ LOG_V("No header found or no header anymore\n");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputCodecData(
+ VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t headerSize = 0;
+
+ ret = getHeader((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &headerSize, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getHeader");
+ if (headerSize == 0) {
+ outBuffer->dataSize = 0;
+ mCurSegment = NULL;
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (headerSize <= outBuffer->bufferSize) {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize);
+ mTotalSizeCopied += headerSize;
+ mOffsetInSeg += headerSize;
+ outBuffer->dataSize = headerSize;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ } else {
+ // we need a big enough buffer, otherwise we won't output anything
+ outBuffer->dataSize = 0;
+ outBuffer->remainingSize = headerSize;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ return ret;
+}
+
+Encode_Status VideoEncoderAVC::outputOneNALU(
+ VideoEncOutputBuffer *outBuffer, bool startCode) {
+
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t sizeToBeCopied = 0;
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+ ret = getOneNALUnit((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ // check if we need startcode along with the payload
+ if (startCode) {
+ sizeToBeCopied = nalSize + nalOffset;
+ } else {
+ sizeToBeCopied = nalSize;
+ }
+
+ if (sizeToBeCopied <= outBuffer->bufferSize) {
+ if (startCode) {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
+ } else {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset,
+ sizeToBeCopied);
+ }
+ mTotalSizeCopied += sizeToBeCopied;
+ mOffsetInSeg += (nalSize + nalOffset);
+ outBuffer->dataSize = sizeToBeCopied;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ outBuffer->remainingSize = 0;
+ } else {
+ // if nothing to be copied out, set flag to invalid
+ outBuffer->dataSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ outBuffer->remainingSize = sizeToBeCopied;
+ LOG_W("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ // check if all data in current segment has been copied out
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("End of stream\n");
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t sizeCopiedHere = 0;
+
+ CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+ while (1) {
+
+ if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+ LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
+ return ENCODE_FAIL;
+ }
+
+ // we need to handle the whole bitstream NAL by NAL
+ ret = getOneNALUnit(
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ if (nalSize + 4 <= outBuffer->bufferSize - sizeCopiedHere) {
+ // write the NAL length to bit stream
+ outBuffer->data[sizeCopiedHere] = (nalSize >> 24) & 0xff;
+ outBuffer->data[sizeCopiedHere + 1] = (nalSize >> 16) & 0xff;
+ outBuffer->data[sizeCopiedHere + 2] = (nalSize >> 8) & 0xff;
+ outBuffer->data[sizeCopiedHere + 3] = nalSize & 0xff;
+
+ sizeCopiedHere += 4;
+ mTotalSizeCopied += 4;
+
+ memcpy(outBuffer->data + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, nalSize);
+
+ sizeCopiedHere += nalSize;
+ mTotalSizeCopied += nalSize;
+ mOffsetInSeg += (nalSize + nalOffset);
+
+ } else {
+ outBuffer->dataSize = sizeCopiedHere;
+ // In case the start code is 3-byte length but we use 4-byte for length prefixed
+ // so the remainingSize size may larger than the remaining data size
+ outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ // check if all data in current segment has been copied out
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("End of stream\n");
+ outBuffer->dataSize = sizeCopiedHere;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ break;
+ }
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputNaluLengthsPrefixed(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t nalType = 0;
+ uint32_t nalSize = 0;
+ uint32_t nalOffset = 0;
+ uint32_t sizeCopiedHere = 0;
+ const uint32_t NALUINFO_OFFSET = 256;
+ uint32_t nalNum = 0;
+
+ CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+ while (1) {
+
+ if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+ LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
+ return ENCODE_FAIL;
+ }
+
+ // we need to handle the whole bitstream NAL by NAL
+ ret = getOneNALUnit(
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset, mCurSegment->status);
+ CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+ if (nalSize + 4 <= outBuffer->bufferSize - NALUINFO_OFFSET - sizeCopiedHere) {
+
+ memcpy(outBuffer->data + NALUINFO_OFFSET + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg, nalSize + nalOffset);
+
+ sizeCopiedHere += nalSize + nalOffset;
+ mTotalSizeCopied += nalSize + nalOffset;
+ mOffsetInSeg += (nalSize + nalOffset);
+
+ } else {
+ outBuffer->dataSize = sizeCopiedHere;
+ // In case the start code is 3-byte length but we use 4-byte for length prefixed
+ // so the remainingSize size may larger than the remaining data size
+ outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ nalNum ++;
+ uint32_t *nalLength = (uint32_t *) (outBuffer->data + (nalNum+1) * 4);
+
+ *nalLength = nalSize + nalOffset;
+
+ // check if all data in current segment has been copied out
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("End of stream\n");
+ outBuffer->dataSize = sizeCopiedHere;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ break;
+ }
+ }
+ }
+
+ outBuffer->offset = NALUINFO_OFFSET;
+ uint32_t *nalHead = (uint32_t *) outBuffer->data;
+ *nalHead = 0x4E414C4C; //'nall'
+ *(++nalHead) = nalNum;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) {
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ if (mFrameNum == 0 || mNewHeader) {
+ if (mRenderHrd) {
+ ret = renderHrd();
+ mRenderHrd = false;
+ CHECK_ENCODE_STATUS_RETURN("renderHrd");
+ }
+
+ mFrameNum = 0;
+ ret = renderSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ if (mNewHeader) {
+ mNewHeader = false; //Set to require new header filed to false
+ mFrameNum = 0; //reset mFrameNum to 0
+ updateFrameInfo(task); //recalculate frame info if mNewHeader is set true after PrepareFrameInfo in encode()
+ }
+ }
+
+ if (mRenderMaxSliceSize && mVideoParamsAVC.maxSliceSize != 0) {
+ ret = renderMaxSliceSize();
+ CHECK_ENCODE_STATUS_RETURN("renderMaxSliceSize");
+ mRenderMaxSliceSize = false;
+ }
+
+ if (mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I))
+ mRenderBitRate = true;
+
+ if (mRenderBitRate) {
+ ret = VideoEncoderBase::renderDynamicBitrate(task);
+ CHECK_ENCODE_STATUS_RETURN("renderDynamicBitrate");
+ }
+
+ if (mRenderAIR &&
+ (mComParams.refreshType == VIDEO_ENC_AIR ||
+ mComParams.refreshType == VIDEO_ENC_BOTH)) {
+
+ ret = renderAIR();
+ CHECK_ENCODE_STATUS_RETURN("renderAIR");
+
+ mRenderAIR = false;
+ }
+
+ if (mRenderCIR) {
+
+ ret = renderCIR();
+ CHECK_ENCODE_STATUS_RETURN("renderCIR");
+
+ mRenderCIR = false;
+ }
+
+ if (mRenderFrameRate) {
+
+ ret = VideoEncoderBase::renderDynamicFrameRate();
+ CHECK_ENCODE_STATUS_RETURN("renderDynamicFrameRate");
+
+ mRenderFrameRate = false;
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ if (mFrameNum == 0 && (mEncPackedHeaders != VA_ATTRIB_NOT_SUPPORTED)) {
+ ret = renderPackedSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPackedSequenceParams");
+
+ ret = renderPackedPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPackedPictureParams");
+ }
+
+ ret = renderSliceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
+
+ LOG_V( "End\n");
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderAVC::renderMaxSliceSize() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ LOG_V( "Begin\n\n");
+
+ if (mComParams.rcMode != RATE_CONTROL_VCM) {
+ LOG_W ("Not in VCM mode, but call send_max_slice_size\n");
+ return ENCODE_SUCCESS;
+ }
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterMaxSliceSize *maxSliceSizeParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeMaxSliceSize;
+ maxSliceSizeParam = (VAEncMiscParameterMaxSliceSize *)miscEncParamBuf->data;
+
+ maxSliceSizeParam->max_slice_size = mVideoParamsAVC.maxSliceSize;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ LOG_I( "max slice size = %d\n", maxSliceSizeParam->max_slice_size);
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderCIR(){
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ LOG_I( "%s Begin\n", __FUNCTION__);
+
+ VABufferID miscParamBufferCIRid;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterCIR *misc_cir_param;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterCIR),
+ 1,
+ NULL,
+ &miscParamBufferCIRid);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferCIRid, (void **)&misc_param);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ misc_param->type = VAEncMiscParameterTypeCIR;
+ misc_cir_param = (VAEncMiscParameterCIR *)misc_param->data;
+ misc_cir_param->cir_num_mbs = mComParams.cirParams.cir_num_mbs;
+ LOG_I( "cir_num_mbs %d \n", misc_cir_param->cir_num_mbs);
+
+ vaUnmapBuffer(mVADisplay, miscParamBufferCIRid);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferCIRid, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderAIR() {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ LOG_V( "Begin\n\n");
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterAIR *airParams;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterAIR),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeAIR;
+ airParams = (VAEncMiscParameterAIR *)miscEncParamBuf->data;
+
+ airParams->air_num_mbs = mComParams.airParams.airMBs;
+ airParams->air_threshold= mComParams.airParams.airThreshold;
+ airParams->air_auto = mComParams.airParams.airAuto;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_I( "airThreshold = %d\n", airParams->air_threshold);
+ return ENCODE_SUCCESS;
+}
+
+int VideoEncoderAVC::calcLevel(int numMbs) {
+ int level = 30;
+
+ if (numMbs < 1620) {
+ level = 30;
+ } else if (numMbs < 3600) {
+ level = 31;
+ } else if (numMbs < 5120) {
+ level = 32;
+ } else if (numMbs < 8192) {
+ level = 41;
+ } else if (numMbs < 8704) {
+ level = 42;
+ } else if (numMbs < 22080) {
+ level = 50;
+ } else if (numMbs < 36864) {
+ level = 51;
+ } else {
+ LOG_W("No such level can support that resolution");
+ level = 51;
+ }
+ return level;
+}
+
+Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferH264 avcSeqParams = VAEncSequenceParameterBufferH264();
+ VAEncMiscParameterBuffer *miscEncRCParamBuf;
+ VAEncMiscParameterBuffer *miscEncFrameRateParamBuf;
+ VAEncMiscParameterRateControl *rcMiscParam;
+ VAEncMiscParameterFrameRate *framerateParam;
+ int level;
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ LOG_V( "Begin\n\n");
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
+ 1, NULL,
+ &mRcParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate),
+ 1, NULL,
+ &mFrameRateParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl;
+ rcMiscParam = (VAEncMiscParameterRateControl *)miscEncRCParamBuf->data;
+ miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate;
+ framerateParam = (VAEncMiscParameterFrameRate *)miscEncFrameRateParamBuf->data;
+ // set up the sequence params for HW
+ // avcSeqParams.level_idc = mLevel;
+ avcSeqParams.intra_period = mComParams.intraPeriod;
+ avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval;
+ avcSeqParams.ip_period = mVideoParamsAVC.ipPeriod;
+ avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16;
+ avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16;
+
+ level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs);
+ avcSeqParams.level_idc = level;
+ avcSeqParams.bits_per_second = mComParams.rcParams.bitRate;
+ framerateParam->framerate =
+ (unsigned int) (frameRateNum + frameRateDenom /2 ) / frameRateDenom;
+ rcMiscParam->initial_qp = mComParams.rcParams.initQP;
+ rcMiscParam->min_qp = mComParams.rcParams.minQP;
+ rcMiscParam->max_qp = mComParams.rcParams.maxQP;
+ if (mComParams.rcParams.enableIntraFrameQPControl) {
+ rcMiscParam->min_qp = mComParams.rcParams.I_minQP;
+ rcMiscParam->max_qp = mComParams.rcParams.I_maxQP;
+ }
+ rcMiscParam->window_size = mComParams.rcParams.windowSize;
+ //target bitrate is sent to libva through Sequence Parameter Buffer
+ rcMiscParam->bits_per_second = 0;
+ rcMiscParam->basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage
+ avcSeqParams.intra_period = mComParams.intraPeriod;
+ //avcSeqParams.vui_flag = 248;
+ avcSeqParams.vui_parameters_present_flag = mVideoParamsAVC.VUIFlag;
+ avcSeqParams.num_units_in_tick = frameRateDenom;
+ avcSeqParams.time_scale = 2 * frameRateNum;
+ avcSeqParams.seq_parameter_set_id = 0;
+ if (mVideoParamsAVC.crop.LeftOffset ||
+ mVideoParamsAVC.crop.RightOffset ||
+ mVideoParamsAVC.crop.TopOffset ||
+ mVideoParamsAVC.crop.BottomOffset) {
+ avcSeqParams.frame_cropping_flag = true;
+ avcSeqParams.frame_crop_left_offset = mVideoParamsAVC.crop.LeftOffset;
+ avcSeqParams.frame_crop_right_offset = mVideoParamsAVC.crop.RightOffset;
+ avcSeqParams.frame_crop_top_offset = mVideoParamsAVC.crop.TopOffset;
+ avcSeqParams.frame_crop_bottom_offset = mVideoParamsAVC.crop.BottomOffset;
+ } else {
+ avcSeqParams.frame_cropping_flag = false;
+
+ if (mComParams.resolution.width & 0xf) {
+ avcSeqParams.frame_cropping_flag = true;
+ uint32_t AWidth = (mComParams.resolution.width + 0xf) & (~0xf);
+ avcSeqParams.frame_crop_right_offset = ( AWidth - mComParams.resolution.width ) / 2;
+ }
+
+ if (mComParams.resolution.height & 0xf) {
+ avcSeqParams.frame_cropping_flag = true;
+ uint32_t AHeight = (mComParams.resolution.height + 0xf) & (~0xf);
+ avcSeqParams.frame_crop_bottom_offset = ( AHeight - mComParams.resolution.height ) / 2;
+ }
+ }
+
+ if(avcSeqParams.vui_parameters_present_flag && (mVideoParamsAVC.SAR.SarWidth || mVideoParamsAVC.SAR.SarHeight)) {
+ avcSeqParams.vui_fields.bits.aspect_ratio_info_present_flag = true;
+ avcSeqParams.aspect_ratio_idc = 0xff /* Extended_SAR */;
+ avcSeqParams.sar_width = mVideoParamsAVC.SAR.SarWidth;
+ avcSeqParams.sar_height = mVideoParamsAVC.SAR.SarHeight;
+ }
+
+ avcSeqParams.max_num_ref_frames = 1;
+
+ if(avcSeqParams.ip_period > 1)
+ avcSeqParams.max_num_ref_frames = 2;
+
+ LOG_V("===h264 sequence params===\n");
+ LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id);
+ LOG_I( "level_idc = %d\n", (uint32_t)avcSeqParams.level_idc);
+ LOG_I( "intra_period = %d\n", avcSeqParams.intra_period);
+ LOG_I( "idr_interval = %d\n", avcSeqParams.intra_idr_period);
+ LOG_I( "picture_width_in_mbs = %d\n", avcSeqParams.picture_width_in_mbs);
+ LOG_I( "picture_height_in_mbs = %d\n", avcSeqParams.picture_height_in_mbs);
+ LOG_I( "bitrate = %d\n", rcMiscParam->bits_per_second);
+ LOG_I( "frame_rate = %d\n", framerateParam->framerate);
+ LOG_I( "initial_qp = %d\n", rcMiscParam->initial_qp);
+ LOG_I( "min_qp = %d\n", rcMiscParam->min_qp);
+ LOG_I( "basic_unit_size = %d\n", rcMiscParam->basic_unit_size);
+ LOG_I( "bDirect8x8Inference = %d\n",mVideoParamsAVC.bDirect8x8Inference);
+
+ // Not sure whether these settings work for all drivers
+ avcSeqParams.seq_fields.bits.frame_mbs_only_flag = 1;
+ avcSeqParams.seq_fields.bits.pic_order_cnt_type = 0;
+ avcSeqParams.seq_fields.bits.direct_8x8_inference_flag = mVideoParamsAVC.bDirect8x8Inference;
+
+ avcSeqParams.seq_fields.bits.log2_max_frame_num_minus4 = 0;
+ avcSeqParams.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 2;
+// avcSeqParams.time_scale = 900;
+// avcSeqParams.num_units_in_tick = 15; /* Tc = num_units_in_tick / time_sacle */
+ // Not sure whether these settings work for all drivers
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mRcParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaUnmapBuffer(mVADisplay, mFrameRateParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(avcSeqParams), 1, &avcSeqParams,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderPackedSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferH264 *avcSeqParams;
+ VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
+ unsigned char *packed_seq_buffer = NULL;
+ unsigned int length_in_bits, offset_in_bytes;
+
+ LOG_V("Begin\n");
+
+ vaStatus = vaMapBuffer(mVADisplay, mSeqParamBuf, (void **)&avcSeqParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ length_in_bits = build_packed_seq_buffer(&packed_seq_buffer, mComParams.profile, avcSeqParams);
+ packed_header_param_buffer.type = VAEncPackedHeaderSequence;
+ packed_header_param_buffer.bit_length = length_in_bits;
+ packed_header_param_buffer.has_emulation_bytes = 0;
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderParameterBufferType,
+ sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
+ &packed_seq_header_param_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderDataBufferType,
+ (length_in_bits + 7) / 8, 1, packed_seq_buffer,
+ &packed_seq_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_seq_header_param_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_seq_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ free(packed_seq_buffer);
+
+ LOG_V("End\n");
+
+ return vaStatus;
+}
+
+Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferH264 avcPicParams = VAEncPictureParameterBufferH264();
+ uint32_t RefFrmIdx;
+
+ LOG_V( "Begin\n\n");
+ // set picture params for HW
+ if (mAutoReference == false) {
+ for (RefFrmIdx = 0; RefFrmIdx < 16; RefFrmIdx++) {
+ avcPicParams.ReferenceFrames[RefFrmIdx].picture_id = VA_INVALID_ID;
+ avcPicParams.ReferenceFrames[RefFrmIdx].flags = VA_PICTURE_H264_INVALID;
+ }
+ avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface;
+ avcPicParams.ReferenceFrames[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ avcPicParams.CurrPic.picture_id= task->rec_surface;
+ // Not sure whether these settings work for all drivers
+ avcPicParams.CurrPic.TopFieldOrderCnt = mFrameNum * 2;
+
+ avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0;
+ avcPicParams.seq_parameter_set_id = 0;
+ avcPicParams.pic_parameter_set_id = 0;
+
+ avcPicParams.last_picture = 0;
+ avcPicParams.frame_num = 0;
+
+ avcPicParams.pic_init_qp = 26;
+ avcPicParams.num_ref_idx_l0_active_minus1 = 0;
+ avcPicParams.num_ref_idx_l1_active_minus1 = 0;
+
+ avcPicParams.pic_fields.bits.idr_pic_flag = 0;
+ avcPicParams.pic_fields.bits.reference_pic_flag = 0;
+ avcPicParams.pic_fields.bits.entropy_coding_mode_flag = 0;
+ avcPicParams.pic_fields.bits.weighted_pred_flag = 0;
+ avcPicParams.pic_fields.bits.weighted_bipred_idc = 0;
+ avcPicParams.pic_fields.bits.transform_8x8_mode_flag = 0;
+ avcPicParams.pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+ avcPicParams.frame_num = mFrameNum;
+ avcPicParams.pic_fields.bits.reference_pic_flag = 1;
+ // Not sure whether these settings work for all drivers
+ }else {
+ avcPicParams.CurrPic.picture_id= VA_INVALID_SURFACE;
+ for(uint32_t i =0; i< mAutoReferenceSurfaceNum; i++)
+ avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i];
+ }
+
+ avcPicParams.pic_fields.bits.idr_pic_flag = (mFrameNum == 0);
+ avcPicParams.pic_fields.bits.entropy_coding_mode_flag = mVideoParamsAVC.bEntropyCodingCABAC;
+ avcPicParams.coded_buf = task->coded_buffer;
+ avcPicParams.last_picture = 0;
+
+ LOG_V("======h264 picture params======\n");
+ LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id);
+ LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id);
+ LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf);
+ //LOG_I( "picture_width = %d\n", avcPicParams.picture_width);
+ //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(avcPicParams),
+ 1,&avcPicParams,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderPackedPictureParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferH264 *avcPicParams;
+ VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
+ unsigned char *packed_pic_buffer = NULL;
+ unsigned int length_in_bits, offset_in_bytes;
+
+ LOG_V("Begin\n");
+
+ vaStatus = vaMapBuffer(mVADisplay, mPicParamBuf, (void **)&avcPicParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ length_in_bits = build_packed_pic_buffer(&packed_pic_buffer, avcPicParams);
+ packed_header_param_buffer.type = VAEncPackedHeaderPicture;
+ packed_header_param_buffer.bit_length = length_in_bits;
+ packed_header_param_buffer.has_emulation_bytes = 0;
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderParameterBufferType,
+ sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
+ &packed_pic_header_param_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncPackedHeaderDataBufferType,
+ (length_in_bits + 7) / 8, 1, packed_pic_buffer,
+ &packed_pic_buf_id);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_pic_header_param_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &packed_pic_buf_id, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ free(packed_pic_buffer);
+
+ LOG_V("End\n");
+
+ return vaStatus;
+}
+
+Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ uint32_t sliceNum = 0;
+ uint32_t sliceIndex = 0;
+ uint32_t sliceHeightInMB = 0;
+ uint32_t maxSliceNum = 0;
+ uint32_t minSliceNum = 0;
+ uint32_t actualSliceHeightInMB = 0;
+ uint32_t startRowInMB = 0;
+ uint32_t modulus = 0;
+ uint32_t RefFrmIdx;
+
+ LOG_V( "Begin\n\n");
+
+ maxSliceNum = (mComParams.resolution.height + 15) / 16;
+ minSliceNum = 1;
+
+ if (task->type == FTYPE_I || task->type == FTYPE_IDR) {
+ sliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
+ } else {
+ sliceNum = mVideoParamsAVC.sliceNum.pSliceNum;
+ }
+
+ if (sliceNum < minSliceNum) {
+ LOG_W("Slice Number is too small");
+ sliceNum = minSliceNum;
+ }
+
+ if (sliceNum > maxSliceNum) {
+ LOG_W("Slice Number is too big");
+ sliceNum = maxSliceNum;
+ }
+
+ mSliceNum= sliceNum;
+ modulus = maxSliceNum % sliceNum;
+ sliceHeightInMB = (maxSliceNum - modulus) / sliceNum ;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBufferH264),
+ sliceNum, NULL,
+ &mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ VAEncSliceParameterBufferH264 *sliceParams, *currentSlice;
+
+ vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+ if(!sliceParams)
+ return ENCODE_NULL_PTR;
+ memset(sliceParams, 0 , sizeof(VAEncSliceParameterBufferH264));
+ if(!sliceParams)
+ return ENCODE_NULL_PTR;
+
+ currentSlice = sliceParams;
+ startRowInMB = 0;
+ for (sliceIndex = 0; sliceIndex < sliceNum; sliceIndex++) {
+ currentSlice = sliceParams + sliceIndex;
+ actualSliceHeightInMB = sliceHeightInMB;
+ if (sliceIndex < modulus) {
+ actualSliceHeightInMB ++;
+ }
+
+ // starting MB row number for this slice, suppose macroblock 16x16
+ currentSlice->macroblock_address = startRowInMB * ((mComParams.resolution.width + 0xf) & ~0xf) / 16;
+ // slice height measured in MB
+ currentSlice->num_macroblocks = actualSliceHeightInMB * ((mComParams.resolution.width + 0xf) & ~0xf) / 16;
+ if(task->type == FTYPE_I||task->type == FTYPE_IDR)
+ currentSlice->slice_type = 2;
+ else if(task->type == FTYPE_P)
+ currentSlice->slice_type = 0;
+ else if(task->type == FTYPE_B)
+ currentSlice->slice_type = 1;
+ currentSlice->disable_deblocking_filter_idc = mComParams.disableDeblocking;
+
+ // This is a temporary fix suggested by Binglin for bad encoding quality issue
+ // TODO: We need a long term design for this field
+ //currentSlice->slice_flags.bits.uses_long_term_ref = 0;
+ //currentSlice->slice_flags.bits.is_long_term_ref = 0;
+
+ LOG_V("======AVC slice params======\n");
+ LOG_I( "slice_index = %d\n", (int) sliceIndex);
+ LOG_I( "macroblock_address = %d\n", (int) currentSlice->macroblock_address);
+ LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->num_macroblocks);
+ LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type);
+ LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc);
+
+ // Not sure whether these settings work for all drivers
+ currentSlice->pic_parameter_set_id = 0;
+ currentSlice->pic_order_cnt_lsb = mFrameNum * 2;
+ currentSlice->direct_spatial_mv_pred_flag = 0;
+ currentSlice->num_ref_idx_l0_active_minus1 = 0; /* FIXME: ??? */
+ currentSlice->num_ref_idx_l1_active_minus1 = 0;
+ currentSlice->cabac_init_idc = 0;
+ currentSlice->slice_qp_delta = 0;
+ currentSlice->disable_deblocking_filter_idc = 0;
+ currentSlice->slice_alpha_c0_offset_div2 = 2;
+ currentSlice->slice_beta_offset_div2 = 2;
+ currentSlice->idr_pic_id = 0;
+ for (RefFrmIdx = 0; RefFrmIdx < 32; RefFrmIdx++) {
+ currentSlice->RefPicList0[RefFrmIdx].picture_id = VA_INVALID_ID;
+ currentSlice->RefPicList0[RefFrmIdx].flags = VA_PICTURE_H264_INVALID;
+ }
+ currentSlice->RefPicList0[0].picture_id = task->ref_surface;
+ currentSlice->RefPicList0[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ // Not sure whether these settings work for all drivers
+
+ startRowInMB += actualSliceHeightInMB;
+ }
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h
new file mode 100644
index 0000000..87c9407
--- /dev/null
+++ b/videoencoder/VideoEncoderAVC.h
@@ -0,0 +1,73 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_AVC_H__
+#define __VIDEO_ENCODER_AVC_H__
+
+#include "VideoEncoderBase.h"
+
+class VideoEncoderAVC : public VideoEncoderBase {
+
+public:
+ VideoEncoderAVC();
+ ~VideoEncoderAVC() {};
+
+ virtual Encode_Status start();
+
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig);
+
+protected:
+
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer);
+ virtual Encode_Status updateFrameInfo(EncodeTask* task);
+private:
+ // Local Methods
+
+ Encode_Status getOneNALUnit(uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, uint32_t *nalType, uint32_t *nalOffset, uint32_t status);
+ Encode_Status getHeader(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize, uint32_t status);
+ Encode_Status outputCodecData(VideoEncOutputBuffer *outBuffer);
+ Encode_Status outputOneNALU(VideoEncOutputBuffer *outBuffer, bool startCode);
+ Encode_Status outputLengthPrefixed(VideoEncOutputBuffer *outBuffer);
+ Encode_Status outputNaluLengthsPrefixed(VideoEncOutputBuffer *outBuffer);
+
+ Encode_Status renderMaxSliceSize();
+ Encode_Status renderAIR();
+ Encode_Status renderCIR();
+ Encode_Status renderSequenceParams(EncodeTask *task);
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderSliceParams(EncodeTask *task);
+ int calcLevel(int numMbs);
+ Encode_Status renderPackedSequenceParams(EncodeTask *task);
+ Encode_Status renderPackedPictureParams(EncodeTask *task);
+
+public:
+
+ VideoParamsAVC mVideoParamsAVC;
+ uint32_t mSliceNum;
+ VABufferID packed_seq_header_param_buf_id;
+ VABufferID packed_seq_buf_id;
+ VABufferID packed_pic_header_param_buf_id;
+ VABufferID packed_pic_buf_id;
+ VABufferID packed_sei_header_param_buf_id; /* the SEI buffer */
+ VABufferID packed_sei_buf_id;
+
+};
+
+#endif /* __VIDEO_ENCODER_AVC_H__ */
diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp
new file mode 100644
index 0000000..b3fd3c2
--- /dev/null
+++ b/videoencoder/VideoEncoderBase.cpp
@@ -0,0 +1,1928 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderBase.h"
+#include "IntelMetadataBuffer.h"
+#include <va/va_tpi.h>
+#include <va/va_android.h>
+
+VideoEncoderBase::VideoEncoderBase()
+ :mInitialized(true)
+ ,mStarted(false)
+ ,mVADisplay(NULL)
+ ,mVAContext(VA_INVALID_ID)
+ ,mVAConfig(VA_INVALID_ID)
+ ,mVAEntrypoint(VAEntrypointEncSlice)
+ ,mNewHeader(false)
+ ,mRenderMaxSliceSize(false)
+ ,mRenderQP (false)
+ ,mRenderAIR(false)
+ ,mRenderCIR(false)
+ ,mRenderFrameRate(false)
+ ,mRenderBitRate(false)
+ ,mRenderHrd(false)
+ ,mRenderMultiTemporal(false)
+ ,mForceKFrame(false)
+ ,mSeqParamBuf(0)
+ ,mPicParamBuf(0)
+ ,mSliceParamBuf(0)
+ ,mAutoRefSurfaces(NULL)
+ ,mRefSurface(VA_INVALID_SURFACE)
+ ,mRecSurface(VA_INVALID_SURFACE)
+ ,mFrameNum(0)
+ ,mCodedBufSize(0)
+ ,mAutoReference(false)
+ ,mAutoReferenceSurfaceNum(4)
+ ,mEncPackedHeaders(VA_ATTRIB_NOT_SUPPORTED)
+ ,mSliceSizeOverflow(false)
+ ,mCurOutputTask(NULL)
+ ,mOutCodedBuffer(0)
+ ,mOutCodedBufferPtr(NULL)
+ ,mCurSegment(NULL)
+ ,mOffsetInSeg(0)
+ ,mTotalSize(0)
+ ,mTotalSizeCopied(0)
+ ,mFrameSkipped(false)
+ ,mSupportedSurfaceMemType(0)
+ ,mVASurfaceMappingAction(0)
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ ,mSessionFlag(0)
+#endif
+ {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ // here the display can be any value, use following one
+ // just for consistence purpose, so don't define it
+ unsigned int display = 0x18C34078;
+ int majorVersion = -1;
+ int minorVersion = -1;
+
+ setDefaultParams();
+
+ LOG_V("vaGetDisplay \n");
+ mVADisplay = vaGetDisplay(&display);
+ if (mVADisplay == NULL) {
+ LOG_E("vaGetDisplay failed.");
+ }
+
+ vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
+ LOG_V("vaInitialize \n");
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
+ mInitialized = false;
+ }
+}
+
+VideoEncoderBase::~VideoEncoderBase() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ stop();
+
+ vaStatus = vaTerminate(mVADisplay);
+ LOG_V( "vaTerminate\n");
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus);
+ } else {
+ mVADisplay = NULL;
+ }
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ IntelMetadataBuffer::ClearContext(mSessionFlag, false);
+#endif
+}
+
+Encode_Status VideoEncoderBase::start() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ if (!mInitialized) {
+ LOGE("Encoder Initialize fail can not start");
+ return ENCODE_DRIVER_FAIL;
+ }
+
+ if (mStarted) {
+ LOG_V("Encoder has been started\n");
+ return ENCODE_ALREADY_INIT;
+ }
+
+ if (mComParams.rawFormat != RAW_FORMAT_NV12)
+#ifdef IMG_GFX
+ mVASurfaceMappingAction |= MAP_ACTION_COLORCONVERT;
+#else
+ return ENCODE_NOT_SUPPORTED;
+#endif
+
+ if (mComParams.resolution.width > 2048 || mComParams.resolution.height > 2048){
+ LOGE("Unsupported resolution width %d, height %d\n",
+ mComParams.resolution.width, mComParams.resolution.height);
+ return ENCODE_NOT_SUPPORTED;
+ }
+ queryAutoReferenceConfig(mComParams.profile);
+
+ VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax];
+ int vaAttribNumber = 0;
+ vaAttrib_tmp[0].type = VAConfigAttribRTFormat;
+ vaAttrib_tmp[1].type = VAConfigAttribRateControl;
+ vaAttrib_tmp[2].type = VAConfigAttribEncAutoReference;
+ vaAttrib_tmp[3].type = VAConfigAttribEncPackedHeaders;
+ vaAttrib_tmp[4].type = VAConfigAttribEncMaxRefFrames;
+ vaAttrib_tmp[5].type = VAConfigAttribEncRateControlExt;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile,
+ VAEntrypointEncSlice, &vaAttrib_tmp[0], 6);
+ CHECK_VA_STATUS_RETURN("vaGetConfigAttributes");
+
+ if((vaAttrib_tmp[0].value & VA_RT_FORMAT_YUV420) != 0)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribRTFormat;
+ vaAttrib[vaAttribNumber].value = VA_RT_FORMAT_YUV420;
+ vaAttribNumber++;
+ }
+
+ vaAttrib[vaAttribNumber].type = VAConfigAttribRateControl;
+ vaAttrib[vaAttribNumber].value = mComParams.rcMode;
+ vaAttribNumber++;
+
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncAutoReference;
+ vaAttrib[vaAttribNumber].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED;
+ vaAttribNumber++;
+
+ if(vaAttrib_tmp[3].value != VA_ATTRIB_NOT_SUPPORTED)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncPackedHeaders;
+ vaAttrib[vaAttribNumber].value = vaAttrib[3].value;
+ vaAttribNumber++;
+ mEncPackedHeaders = vaAttrib[3].value;
+ }
+
+ if(vaAttrib_tmp[4].value != VA_ATTRIB_NOT_SUPPORTED)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncMaxRefFrames;
+ vaAttrib[vaAttribNumber].value = vaAttrib[4].value;
+ vaAttribNumber++;
+ mEncMaxRefFrames = vaAttrib[4].value;
+ }
+
+ if(vaAttrib_tmp[5].value != VA_ATTRIB_NOT_SUPPORTED)
+ {
+ vaAttrib[vaAttribNumber].type = VAConfigAttribEncRateControlExt;
+ vaAttrib[vaAttribNumber].value = mComParams.numberOfLayer;
+ vaAttribNumber++;
+ }
+
+ LOG_V( "======VA Configuration======\n");
+ LOG_I( "profile = %d\n", mComParams.profile);
+ LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint);
+ LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
+ LOG_I( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
+ LOG_I( "vaAttrib[2].type = %d\n", vaAttrib[2].type);
+ LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
+ LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
+ LOG_I( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value);
+ LOG_I( "vaAttribNumber is %d\n", vaAttribNumber);
+ LOG_I( "mComParams.numberOfLayer is %d\n", mComParams.numberOfLayer);
+
+ LOG_V( "vaCreateConfig\n");
+
+ vaStatus = vaCreateConfig(
+ mVADisplay, mComParams.profile, mVAEntrypoint,
+ &vaAttrib[0], vaAttribNumber, &(mVAConfig));
+// &vaAttrib[0], 3, &(mVAConfig)); //uncomment this after psb_video supports
+ CHECK_VA_STATUS_RETURN("vaCreateConfig");
+
+ querySupportedSurfaceMemTypes();
+
+ if (mComParams.rcMode == VA_RC_VCM) {
+ // Following three features are only enabled in VCM mode
+ mRenderMaxSliceSize = true;
+ mRenderAIR = true;
+ mRenderBitRate = true;
+ }
+
+ LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n");
+
+ uint32_t stride_aligned, height_aligned;
+ if(mAutoReference == false){
+ stride_aligned = (mComParams.resolution.width + 15) & ~15;
+ height_aligned = (mComParams.resolution.height + 15) & ~15;
+ }else{
+ // this alignment is used for AVC. For vp8 encode, driver will handle the alignment
+ if(mComParams.profile == VAProfileVP8Version0_3)
+ {
+ stride_aligned = mComParams.resolution.width;
+ height_aligned = mComParams.resolution.height;
+ mVASurfaceMappingAction |= MAP_ACTION_COPY;
+ }
+ else
+ {
+ stride_aligned = (mComParams.resolution.width + 63) & ~63; //on Merr, stride must be 64 aligned.
+ height_aligned = (mComParams.resolution.height + 31) & ~31;
+ mVASurfaceMappingAction |= MAP_ACTION_ALIGN64;
+ }
+ }
+
+ if(mAutoReference == false){
+ mRefSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
+ mRecSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
+
+ }else {
+ mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum];
+ for(uint32_t i = 0; i < mAutoReferenceSurfaceNum; i ++)
+ mAutoRefSurfaces[i] = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
+ }
+ CHECK_VA_STATUS_RETURN("vaCreateSurfaces");
+
+ //Prepare all Surfaces to be added into Context
+ uint32_t contextSurfaceCnt;
+ if(mAutoReference == false )
+ contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
+ else
+ contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size();
+
+ VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
+ int32_t index = -1;
+ android::List<VASurfaceMap *>::iterator map_node;
+
+ for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++)
+ {
+ contextSurfaces[++index] = (*map_node)->getVASurface();
+ (*map_node)->setTracked();
+ }
+
+ if(mAutoReference == false){
+ contextSurfaces[++index] = mRefSurface;
+ contextSurfaces[++index] = mRecSurface;
+ } else {
+ for (uint32_t i=0; i < mAutoReferenceSurfaceNum; i++)
+ contextSurfaces[++index] = mAutoRefSurfaces[i];
+ }
+
+ //Initialize and save the VA context ID
+ LOG_V( "vaCreateContext\n");
+ vaStatus = vaCreateContext(mVADisplay, mVAConfig,
+#ifdef IMG_GFX
+ mComParams.resolution.width,
+ mComParams.resolution.height,
+#else
+ stride_aligned,
+ height_aligned,
+#endif
+ VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt,
+ &(mVAContext));
+ CHECK_VA_STATUS_RETURN("vaCreateContext");
+
+ delete [] contextSurfaces;
+
+ LOG_I("Success to create libva context width %d, height %d\n",
+ mComParams.resolution.width, mComParams.resolution.height);
+
+ uint32_t maxSize = 0;
+ ret = getMaxOutSize(&maxSize);
+ CHECK_ENCODE_STATUS_RETURN("getMaxOutSize");
+
+ // Create CodedBuffer for output
+ VABufferID VACodedBuffer;
+
+ for(uint32_t i = 0; i <mComParams.codedBufNum; i++) {
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncCodedBufferType,
+ mCodedBufSize,
+ 1, NULL,
+ &VACodedBuffer);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer::VAEncCodedBufferType");
+
+ mVACodedBufferList.push_back(VACodedBuffer);
+ }
+
+ if (ret == ENCODE_SUCCESS)
+ mStarted = true;
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ if (!mStarted) {
+ LOG_E("Encoder has not initialized yet\n");
+ return ENCODE_NOT_INIT;
+ }
+
+ CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+ //======Prepare all resources encoder needed=====.
+
+ //Prepare encode vaSurface
+ VASurfaceID sid = VA_INVALID_SURFACE;
+ ret = manageSrcSurface(inBuffer, &sid);
+ CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
+
+ //Prepare CodedBuffer
+ mCodedBuffer_Lock.lock();
+ if(mVACodedBufferList.empty()){
+ if(timeout == FUNC_BLOCK)
+ mCodedBuffer_Cond.wait(mCodedBuffer_Lock);
+ else if (timeout > 0) {
+ if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){
+ mCodedBuffer_Lock.unlock();
+ LOG_E("Time out wait for Coded buffer.\n");
+ return ENCODE_DEVICE_BUSY;
+ }
+ }
+ else {//Nonblock
+ mCodedBuffer_Lock.unlock();
+ LOG_E("Coded buffer is not ready now.\n");
+ return ENCODE_DEVICE_BUSY;
+ }
+ }
+
+ if(mVACodedBufferList.empty()){
+ mCodedBuffer_Lock.unlock();
+ return ENCODE_DEVICE_BUSY;
+ }
+ VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin());
+ mVACodedBufferList.erase(mVACodedBufferList.begin());
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("CodedBuffer ID 0x%08x\n", coded_buf);
+
+ //All resources are ready, start to assemble EncodeTask
+ EncodeTask* task = new EncodeTask();
+
+ task->completed = false;
+ task->enc_surface = sid;
+ task->coded_buffer = coded_buf;
+ task->timestamp = inBuffer->timeStamp;
+ task->priv = inBuffer->priv;
+
+ //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
+ task->type = inBuffer->type;
+ task->flag = inBuffer->flag;
+ PrepareFrameInfo(task);
+
+ if(mAutoReference == false){
+ //Setup ref /rec frames
+ //TODO: B frame support, temporary use same logic
+ switch (inBuffer->type) {
+ case FTYPE_UNKNOWN:
+ case FTYPE_IDR:
+ case FTYPE_I:
+ case FTYPE_P:
+ {
+ if(!mFrameSkipped) {
+ VASurfaceID tmpSurface = mRecSurface;
+ mRecSurface = mRefSurface;
+ mRefSurface = tmpSurface;
+ }
+
+ task->ref_surface = mRefSurface;
+ task->rec_surface = mRecSurface;
+
+ break;
+ }
+ case FTYPE_B:
+ default:
+ LOG_V("Something wrong, B frame may not be supported in this mode\n");
+ ret = ENCODE_NOT_SUPPORTED;
+ goto CLEAN_UP;
+ }
+ }else {
+ task->ref_surface = VA_INVALID_SURFACE;
+ task->rec_surface = VA_INVALID_SURFACE;
+ }
+ //======Start Encoding, add task to list======
+ LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
+
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture");
+
+ ret = sendEncodeCommand(task);
+ CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand");
+
+ vaStatus = vaEndPicture(mVADisplay, mVAContext);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture");
+
+ LOG_V("Add Task %p into Encode Task list\n", task);
+ mEncodeTask_Lock.lock();
+ mEncodeTaskList.push_back(task);
+ mEncodeTask_Cond.signal();
+ mEncodeTask_Lock.unlock();
+
+ mFrameNum ++;
+
+ LOG_V("encode return Success\n");
+
+ return ENCODE_SUCCESS;
+
+CLEAN_UP:
+
+ delete task;
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used
+ mCodedBuffer_Cond.signal();
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("encode return error=%x\n", ret);
+
+ return ret;
+}
+
+/*
+ 1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list.
+ 2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes,
+ start output data
+ 3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure
+ on non-block/block with timeout modes.
+ 4. if complete all output data, curoutputtask should be set NULL
+*/
+Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ bool useLocalBuffer = false;
+
+ CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+ if (mCurOutputTask == NULL) {
+ mEncodeTask_Lock.lock();
+ if(mEncodeTaskList.empty()) {
+ LOG_V("getOutput CurrentTask is NULL\n");
+ if(timeout == FUNC_BLOCK) {
+ LOG_V("waiting for task....\n");
+ mEncodeTask_Cond.wait(mEncodeTask_Lock);
+ } else if (timeout > 0) {
+ LOG_V("waiting for task in %i ms....\n", timeout);
+ if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) {
+ mEncodeTask_Lock.unlock();
+ LOG_E("Time out wait for encode task.\n");
+ return ENCODE_NO_REQUEST_DATA;
+ }
+ } else {//Nonblock
+ mEncodeTask_Lock.unlock();
+ return ENCODE_NO_REQUEST_DATA;
+ }
+ }
+
+ if(mEncodeTaskList.empty()){
+ mEncodeTask_Lock.unlock();
+ return ENCODE_DATA_NOT_READY;
+ }
+ mCurOutputTask = *(mEncodeTaskList.begin());
+ mEncodeTaskList.erase(mEncodeTaskList.begin());
+ mEncodeTask_Lock.unlock();
+ }
+
+ //sync/query/wait task if not completed
+ if (mCurOutputTask->completed == false) {
+ VASurfaceStatus vaSurfaceStatus;
+
+ if (timeout == FUNC_BLOCK) {
+ //block mode, direct sync surface to output data
+
+ mOutCodedBuffer = mCurOutputTask->coded_buffer;
+
+ // Check frame skip
+ // Need encoding to be completed before calling query surface below to
+ // get the right skip frame flag for current frame
+ // It is a requirement of video driver
+ // vaSyncSurface syncs the wrong frame when rendering the same surface multiple times,
+ // so use vaMapbuffer instead
+ LOG_I ("block mode, vaMapBuffer ID = 0x%08x\n", mOutCodedBuffer);
+ if (mOutCodedBufferPtr == NULL) {
+ vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaMapBuffer");
+ CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
+ }
+
+ vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus);
+ CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
+ mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
+
+ mCurOutputTask->completed = true;
+
+ } else {
+ //For both block with timeout and non-block mode, query surface, if ready, output data
+ LOG_I ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface);
+
+ vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus);
+ if (vaSurfaceStatus & VASurfaceReady) {
+ mOutCodedBuffer = mCurOutputTask->coded_buffer;
+ mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
+ mCurOutputTask->completed = true;
+ //if need to call SyncSurface again ?
+
+ } else {//not encode complet yet, but keep all context and return directly
+ return ENCODE_DATA_NOT_READY;
+ }
+
+ }
+
+ }
+
+ //start to output data
+ ret = prepareForOutput(outBuffer, &useLocalBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
+
+ //copy all flags to outBuffer
+ outBuffer->offset = 0;
+ outBuffer->flag = mCurOutputTask->flag;
+ outBuffer->type = mCurOutputTask->type;
+ outBuffer->timeStamp = mCurOutputTask->timestamp;
+ outBuffer->priv = mCurOutputTask->priv;
+
+ if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
+ ret = outputAllData(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+ }else {
+ ret = getExtFormatOutput(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput");
+ }
+
+ LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize);
+
+ ret = cleanupForOutput();
+ CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
+
+ LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped);
+
+ return ENCODE_SUCCESS;
+
+CLEAN_UP:
+
+ if (outBuffer->data && (useLocalBuffer == true)) {
+ delete[] outBuffer->data;
+ outBuffer->data = NULL;
+ useLocalBuffer = false;
+ }
+
+ if (mOutCodedBufferPtr != NULL) {
+ vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+ mOutCodedBufferPtr = NULL;
+ mCurSegment = NULL;
+ }
+
+ delete mCurOutputTask;
+ mCurOutputTask = NULL;
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.push_back(mOutCodedBuffer);
+ mCodedBuffer_Cond.signal();
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("getOutput return error=%x\n", ret);
+ return ret;
+}
+
+void VideoEncoderBase::flush() {
+
+ LOG_V( "Begin\n");
+
+ // reset the properities
+ mFrameNum = 0;
+
+ LOG_V( "end\n");
+}
+
+Encode_Status VideoEncoderBase::stop() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V( "Begin\n");
+
+ // It is possible that above pointers have been allocated
+ // before we set mStarted to true
+ if (!mStarted) {
+ LOG_V("Encoder has been stopped\n");
+ return ENCODE_SUCCESS;
+ }
+ if (mAutoRefSurfaces) {
+ delete[] mAutoRefSurfaces;
+ mAutoRefSurfaces = NULL;
+ }
+
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.clear();
+ mCodedBuffer_Lock.unlock();
+ mCodedBuffer_Cond.broadcast();
+
+ //Delete all uncompleted tasks
+ mEncodeTask_Lock.lock();
+ while(! mEncodeTaskList.empty())
+ {
+ delete *mEncodeTaskList.begin();
+ mEncodeTaskList.erase(mEncodeTaskList.begin());
+ }
+ mEncodeTask_Lock.unlock();
+ mEncodeTask_Cond.broadcast();
+
+ //Release Src Surface Buffer Map, destroy surface manually since it is not added into context
+ LOG_V( "Rlease Src Surface Map\n");
+ while(! mSrcSurfaceMapList.empty())
+ {
+ delete (*mSrcSurfaceMapList.begin());
+ mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
+ }
+
+ LOG_V( "vaDestroyContext\n");
+ if (mVAContext != VA_INVALID_ID) {
+ vaStatus = vaDestroyContext(mVADisplay, mVAContext);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
+ }
+
+ LOG_V( "vaDestroyConfig\n");
+ if (mVAConfig != VA_INVALID_ID) {
+ vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
+ CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
+ }
+
+CLEAN_UP:
+
+ mStarted = false;
+ mSliceSizeOverflow = false;
+ mCurOutputTask= NULL;
+ mOutCodedBuffer = 0;
+ mCurSegment = NULL;
+ mOffsetInSeg =0;
+ mTotalSize = 0;
+ mTotalSizeCopied = 0;
+ mFrameSkipped = false;
+ mSupportedSurfaceMemType = 0;
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::prepareForOutput(
+ VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VACodedBufferSegment *vaCodedSeg = NULL;
+ uint32_t status = 0;
+
+ LOG_V( "begin\n");
+ // Won't check parameters here as the caller already checked them
+ // mCurSegment is NULL means it is first time to be here after finishing encoding a frame
+ if (mCurSegment == NULL) {
+ if (mOutCodedBufferPtr == NULL) {
+ vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+ CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
+ }
+
+ LOG_I ("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer);
+
+ mTotalSize = 0;
+ mOffsetInSeg = 0;
+ mTotalSizeCopied = 0;
+ vaCodedSeg = (VACodedBufferSegment *)mOutCodedBufferPtr;
+ mCurSegment = (VACodedBufferSegment *)mOutCodedBufferPtr;
+
+ while (1) {
+
+ mTotalSize += vaCodedSeg->size;
+ status = vaCodedSeg->status;
+#ifndef IMG_GFX
+ uint8_t *pTemp;
+ uint32_t ii;
+ pTemp = (uint8_t*)vaCodedSeg->buf;
+ for(ii = 0; ii < 16;){
+ if (*(pTemp + ii) == 0xFF)
+ ii++;
+ else
+ break;
+ }
+ if (ii > 0) {
+ mOffsetInSeg = ii;
+ }
+#endif
+ if (!mSliceSizeOverflow) {
+ mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
+ }
+
+ if (vaCodedSeg->next == NULL)
+ break;
+
+ vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next;
+ }
+ }
+
+ // We will support two buffer allocation mode,
+ // one is application allocates the buffer and passes to encode,
+ // the other is encode allocate memory
+
+ //means app doesn't allocate the buffer, so _encode will allocate it.
+ if (outBuffer->data == NULL) {
+ *useLocalBuffer = true;
+ outBuffer->data = new uint8_t[mTotalSize - mTotalSizeCopied + 100];
+ if (outBuffer->data == NULL) {
+ LOG_E( "outBuffer->data == NULL\n");
+ return ENCODE_NO_MEMORY;
+ }
+ outBuffer->bufferSize = mTotalSize + 100;
+ outBuffer->dataSize = 0;
+ }
+
+ // Clear all flag for every call
+ outBuffer->flag = 0;
+ if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
+
+ if (!mCurSegment)
+ return ENCODE_FAIL;
+
+ if (mCurSegment->size < mOffsetInSeg) {
+ LOG_E("mCurSegment->size < mOffsetInSeg\n");
+ return ENCODE_FAIL;
+ }
+
+ // Make sure we have data in current segment
+ if (mCurSegment->size == mOffsetInSeg) {
+ if (mCurSegment->next != NULL) {
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ } else {
+ LOG_V("No more data available\n");
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ outBuffer->dataSize = 0;
+ mCurSegment = NULL;
+ return ENCODE_NO_REQUEST_DATA;
+ }
+ }
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::cleanupForOutput() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ //mCurSegment is NULL means all data has been copied out
+ if (mCurSegment == NULL && mOutCodedBufferPtr) {
+ vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ mOutCodedBufferPtr = NULL;
+ mTotalSize = 0;
+ mOffsetInSeg = 0;
+ mTotalSizeCopied = 0;
+
+ delete mCurOutputTask;
+ mCurOutputTask = NULL;
+ mCodedBuffer_Lock.lock();
+ mVACodedBufferList.push_back(mOutCodedBuffer);
+ mCodedBuffer_Cond.signal();
+ mCodedBuffer_Lock.unlock();
+
+ LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer);
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEntrypoint entryPtr[8];
+ int i, entryPtrNum;
+
+ if(profile == VAProfileH264Main) //need to be fixed
+ return ENCODE_NOT_SUPPORTED;
+
+ vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum);
+ CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints");
+
+ for(i=0; i<entryPtrNum; i++){
+ if(entryPtr[i] == VAEntrypointEncSlice)
+ return ENCODE_SUCCESS;
+ }
+
+ return ENCODE_NOT_SUPPORTED;
+}
+
+Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAConfigAttrib attrib_list;
+ attrib_list.type = VAConfigAttribEncAutoReference;
+ attrib_list.value = VA_ATTRIB_NOT_SUPPORTED;
+
+ vaStatus = vaGetConfigAttributes(mVADisplay, profile, VAEntrypointEncSlice, &attrib_list, 1);
+ if(attrib_list.value == VA_ATTRIB_NOT_SUPPORTED )
+ mAutoReference = false;
+ else
+ mAutoReference = true;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::querySupportedSurfaceMemTypes() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ unsigned int num = 0;
+
+ VASurfaceAttrib* attribs = NULL;
+
+ //get attribs number
+ vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
+ CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
+
+ if (num == 0)
+ return ENCODE_SUCCESS;
+
+ attribs = new VASurfaceAttrib[num];
+
+ vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
+ CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
+
+ for(uint32_t i = 0; i < num; i ++) {
+ if (attribs[i].type == VASurfaceAttribMemoryType) {
+ mSupportedSurfaceMemType = attribs[i].value.value.i;
+ break;
+ }
+ else
+ continue;
+ }
+
+ delete attribs;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
+
+ // Data size been copied for every single call
+ uint32_t sizeCopiedHere = 0;
+ uint32_t sizeToBeCopied = 0;
+
+ CHECK_NULL_RETURN_IFFAIL(outBuffer->data);
+
+ while (1) {
+
+ LOG_I("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg);
+ LOG_I("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n",
+ outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied);
+
+ if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+ LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
+ return ENCODE_FAIL;
+ }
+
+ if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) {
+ sizeToBeCopied = mCurSegment->size - mOffsetInSeg;
+ memcpy(outBuffer->data + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
+ sizeCopiedHere += sizeToBeCopied;
+ mTotalSizeCopied += sizeToBeCopied;
+ mOffsetInSeg = 0;
+ } else {
+ sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere;
+ memcpy(outBuffer->data + sizeCopiedHere,
+ (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere);
+ mTotalSizeCopied += sizeToBeCopied;
+ mOffsetInSeg += sizeToBeCopied;
+ outBuffer->dataSize = outBuffer->bufferSize;
+ outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ if (mCurSegment->next == NULL) {
+ outBuffer->dataSize = sizeCopiedHere;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ mCurSegment = NULL;
+ return ENCODE_SUCCESS;
+ }
+
+ mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+ mOffsetInSeg = 0;
+ }
+}
+
+void VideoEncoderBase::setDefaultParams() {
+
+ // Set default value for input parameters
+ mComParams.profile = VAProfileH264Baseline;
+ mComParams.level = 41;
+ mComParams.rawFormat = RAW_FORMAT_NV12;
+ mComParams.frameRate.frameRateNum = 30;
+ mComParams.frameRate.frameRateDenom = 1;
+ mComParams.resolution.width = 0;
+ mComParams.resolution.height = 0;
+ mComParams.intraPeriod = 30;
+ mComParams.rcMode = RATE_CONTROL_NONE;
+ mComParams.rcParams.initQP = 15;
+ mComParams.rcParams.minQP = 0;
+ mComParams.rcParams.maxQP = 0;
+ mComParams.rcParams.I_minQP = 0;
+ mComParams.rcParams.I_maxQP = 0;
+ mComParams.rcParams.bitRate = 640000;
+ mComParams.rcParams.targetPercentage= 0;
+ mComParams.rcParams.windowSize = 0;
+ mComParams.rcParams.disableFrameSkip = 0;
+ mComParams.rcParams.disableBitsStuffing = 1;
+ mComParams.rcParams.enableIntraFrameQPControl = 0;
+ mComParams.rcParams.temporalFrameRate = 0;
+ mComParams.rcParams.temporalID = 0;
+ mComParams.cyclicFrameInterval = 30;
+ mComParams.refreshType = VIDEO_ENC_NONIR;
+ mComParams.airParams.airMBs = 0;
+ mComParams.airParams.airThreshold = 0;
+ mComParams.airParams.airAuto = 1;
+ mComParams.disableDeblocking = 2;
+ mComParams.syncEncMode = false;
+ mComParams.codedBufNum = 2;
+ mComParams.numberOfLayer = 1;
+ mComParams.nPeriodicity = 0;
+ memset(mComParams.nLayerID,0,32*sizeof(uint32_t));
+
+ mHrdParam.bufferSize = 0;
+ mHrdParam.initBufferFullness = 0;
+
+ mStoreMetaDataInBuffers.isEnabled = false;
+}
+
+Encode_Status VideoEncoderBase::setParameters(
+ VideoParamConfigSet *videoEncParams) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %x\n", (int)videoEncParams->type);
+
+ if (mStarted) {
+ LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
+ return ENCODE_ALREADY_INIT;
+ }
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ if(paramsCommon->codedBufNum < 2)
+ paramsCommon->codedBufNum =2;
+ mComParams = *paramsCommon;
+ break;
+ }
+
+ case VideoParamsTypeUpSteamBuffer: {
+
+ VideoParamsUpstreamBuffer *upStreamBuffer =
+ reinterpret_cast <VideoParamsUpstreamBuffer *> (videoEncParams);
+
+ if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ ret = setUpstreamBuffer(upStreamBuffer);
+ break;
+ }
+
+ case VideoParamsTypeUsrptrBuffer: {
+
+ // usrptr only can be get
+ // this case should not happen
+ break;
+ }
+
+ case VideoParamsTypeHRD: {
+ VideoParamsHRD *hrd =
+ reinterpret_cast <VideoParamsHRD *> (videoEncParams);
+
+ if (hrd->size != sizeof (VideoParamsHRD)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mHrdParam.bufferSize = hrd->bufferSize;
+ mHrdParam.initBufferFullness = hrd->initBufferFullness;
+ mRenderHrd = true;
+
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled;
+
+ break;
+ }
+
+ case VideoParamsTypeTemporalLayer:{
+ VideoParamsTemporalLayer *temporallayer =
+ reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
+
+ if (temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.numberOfLayer = temporallayer->numberOfLayer;
+ mComParams.nPeriodicity = temporallayer->nPeriodicity;
+ for(uint32_t i=0;i<temporallayer->nPeriodicity;i++)
+ mComParams.nLayerID[i] = temporallayer->nLayerID[i];
+ mRenderMultiTemporal = true;
+ break;
+ }
+
+ case VideoParamsTypeAVC:
+ case VideoParamsTypeH263:
+ case VideoParamsTypeMP4:
+ case VideoParamsTypeVC1:
+ case VideoParamsTypeVP8: {
+ ret = derivedSetParams(videoEncParams);
+ break;
+ }
+
+ default: {
+ LOG_E ("Wrong ParamType here\n");
+ return ENCODE_INVALID_PARAMS;
+ }
+ }
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::getParameters(
+ VideoParamConfigSet *videoEncParams) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+ switch (videoEncParams->type) {
+ case VideoParamsTypeCommon: {
+
+ VideoParamsCommon *paramsCommon =
+ reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+
+ if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ *paramsCommon = mComParams;
+ break;
+ }
+
+ case VideoParamsTypeUpSteamBuffer: {
+
+ // Get upstream buffer could happen
+ // but not meaningful a lot
+ break;
+ }
+
+ case VideoParamsTypeUsrptrBuffer: {
+ VideoParamsUsrptrBuffer *usrptrBuffer =
+ reinterpret_cast <VideoParamsUsrptrBuffer *> (videoEncParams);
+
+ if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ ret = getNewUsrptrFromSurface(
+ usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format,
+ usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize),
+ &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr));
+
+ break;
+ }
+
+ case VideoParamsTypeHRD: {
+ VideoParamsHRD *hrd =
+ reinterpret_cast <VideoParamsHRD *> (videoEncParams);
+
+ if (hrd->size != sizeof (VideoParamsHRD)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ hrd->bufferSize = mHrdParam.bufferSize;
+ hrd->initBufferFullness = mHrdParam.initBufferFullness;
+
+ break;
+ }
+
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled;
+
+ break;
+ }
+
+ case VideoParamsTypeProfileLevel: {
+ VideoParamsProfileLevel *profilelevel =
+ reinterpret_cast <VideoParamsProfileLevel *> (videoEncParams);
+
+ if (profilelevel->size != sizeof (VideoParamsProfileLevel)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ profilelevel->level = 0;
+ if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){
+ profilelevel->isSupported = true;
+ if(profilelevel->profile == VAProfileH264High)
+ profilelevel->level = 42;
+ else if(profilelevel->profile == VAProfileH264Main)
+ profilelevel->level = 42;
+ else if(profilelevel->profile == VAProfileH264Baseline)
+ profilelevel->level = 41;
+ else{
+ profilelevel->level = 0;
+ profilelevel->isSupported = false;
+ }
+ }
+ }
+
+ case VideoParamsTypeTemporalLayer:{
+ VideoParamsTemporalLayer *temporallayer =
+ reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
+
+ if(temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ temporallayer->numberOfLayer = mComParams.numberOfLayer;
+
+ break;
+ }
+
+ case VideoParamsTypeAVC:
+ case VideoParamsTypeH263:
+ case VideoParamsTypeMP4:
+ case VideoParamsTypeVC1:
+ case VideoParamsTypeVP8: {
+ derivedGetParams(videoEncParams);
+ break;
+ }
+
+ default: {
+ LOG_E ("Wrong ParamType here\n");
+ break;
+ }
+
+ }
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ // workaround
+#if 0
+ if (!mStarted) {
+ LOG_E("Encoder has not initialized yet, can't call setConfig\n");
+ return ENCODE_NOT_INIT;
+ }
+#endif
+
+ switch (videoEncConfig->type) {
+ case VideoConfigTypeFrameRate: {
+ VideoConfigFrameRate *configFrameRate =
+ reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
+
+ if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ mComParams.frameRate = configFrameRate->frameRate;
+ mRenderFrameRate = true;
+ break;
+ }
+
+ case VideoConfigTypeBitRate: {
+ VideoConfigBitRate *configBitRate =
+ reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
+
+ if (configBitRate->size != sizeof (VideoConfigBitRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ if(mComParams.numberOfLayer == 1)
+ {
+ mComParams.rcParams = configBitRate->rcParams;
+ mRenderBitRate = true;
+ }
+ else
+ {
+ mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].nLayerID = configBitRate->rcParams.temporalID;
+ mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].bitRate = configBitRate->rcParams.bitRate;
+ mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].frameRate = configBitRate->rcParams.temporalFrameRate;
+ }
+ break;
+ }
+
+ case VideoConfigTypeResolution: {
+
+ // Not Implemented
+ break;
+ }
+ case VideoConfigTypeIntraRefreshType: {
+
+ VideoConfigIntraRefreshType *configIntraRefreshType =
+ reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
+
+ if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ mComParams.refreshType = configIntraRefreshType->refreshType;
+ break;
+ }
+
+ case VideoConfigTypeCyclicFrameInterval: {
+ VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
+ reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
+ if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval;
+ break;
+ }
+
+ case VideoConfigTypeAIR: {
+
+ VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
+
+ if (configAIR->size != sizeof (VideoConfigAIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.airParams = configAIR->airParams;
+ mRenderAIR = true;
+ break;
+ }
+ case VideoConfigTypeCIR: {
+
+ VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
+
+ if (configCIR->size != sizeof (VideoConfigCIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mComParams.cirParams = configCIR->cirParams;
+ mRenderCIR = true;
+ break;
+ }
+ case VideoConfigTypeAVCIntraPeriod:
+ case VideoConfigTypeNALSize:
+ case VideoConfigTypeIDRRequest:
+ case VideoConfigTypeSliceNum:
+ case VideoConfigTypeVP8:
+ case VideoConfigTypeVP8ReferenceFrame:
+ case VideoConfigTypeVP8MaxFrameSizeRatio:{
+ ret = derivedSetConfig(videoEncConfig);
+ break;
+ }
+ default: {
+ LOG_E ("Wrong Config Type here\n");
+ break;
+ }
+ }
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+ LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+ switch (videoEncConfig->type) {
+ case VideoConfigTypeFrameRate: {
+ VideoConfigFrameRate *configFrameRate =
+ reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
+
+ if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configFrameRate->frameRate = mComParams.frameRate;
+ break;
+ }
+
+ case VideoConfigTypeBitRate: {
+ VideoConfigBitRate *configBitRate =
+ reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
+
+ if (configBitRate->size != sizeof (VideoConfigBitRate)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ configBitRate->rcParams = mComParams.rcParams;
+
+
+ break;
+ }
+ case VideoConfigTypeResolution: {
+ // Not Implemented
+ break;
+ }
+ case VideoConfigTypeIntraRefreshType: {
+
+ VideoConfigIntraRefreshType *configIntraRefreshType =
+ reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
+
+ if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+ configIntraRefreshType->refreshType = mComParams.refreshType;
+ break;
+ }
+
+ case VideoConfigTypeCyclicFrameInterval: {
+ VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
+ reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
+ if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval;
+ break;
+ }
+
+ case VideoConfigTypeAIR: {
+
+ VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
+
+ if (configAIR->size != sizeof (VideoConfigAIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configAIR->airParams = mComParams.airParams;
+ break;
+ }
+ case VideoConfigTypeCIR: {
+
+ VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
+
+ if (configCIR->size != sizeof (VideoConfigCIR)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ configCIR->cirParams = mComParams.cirParams;
+ break;
+ }
+ case VideoConfigTypeAVCIntraPeriod:
+ case VideoConfigTypeNALSize:
+ case VideoConfigTypeIDRRequest:
+ case VideoConfigTypeSliceNum:
+ case VideoConfigTypeVP8: {
+
+ ret = derivedGetConfig(videoEncConfig);
+ break;
+ }
+ default: {
+ LOG_E ("Wrong ParamType here\n");
+ break;
+ }
+ }
+ return ret;
+}
+
+void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) {
+ if (mNewHeader) mFrameNum = 0;
+ LOG_I( "mFrameNum = %d ", mFrameNum);
+
+ updateFrameInfo(task) ;
+}
+
+Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) {
+
+ task->type = FTYPE_P;
+
+ // determine the picture type
+ if (mFrameNum == 0)
+ task->type = FTYPE_I;
+ if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0))
+ task->type = FTYPE_I;
+
+ if (task->type == FTYPE_I)
+ task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
+
+ uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
+
+ if (maxSize == NULL) {
+ LOG_E("maxSize == NULL\n");
+ return ENCODE_NULL_PTR;
+ }
+
+ LOG_V( "Begin\n");
+
+ if (mCodedBufSize > 0) {
+ *maxSize = mCodedBufSize;
+ LOG_V ("Already calculate the max encoded size, get the value directly");
+ return ENCODE_SUCCESS;
+ }
+
+ // here, VP8 is different from AVC/H263
+ if(mComParams.profile == VAProfileVP8Version0_3) // for VP8 encode
+ {
+ // According to VIED suggestions, in CBR mode, coded buffer should be the size of 3 bytes per luma pixel
+ // in CBR_HRD mode, coded buffer size should be 5 * rc_buf_sz * rc_target_bitrate;
+ // now we just hardcode mCodedBufSize as 2M to walk round coded buffer size issue;
+ /*
+ if(mComParams.rcMode == VA_RC_CBR) // CBR_HRD mode
+ mCodedBufSize = 5 * mComParams.rcParams.bitRate * 6000;
+ else // CBR mode
+ mCodedBufSize = 3 * mComParams.resolution.width * mComParams.resolution.height;
+ */
+ mCodedBufSize = (2 * 1024 * 1024 + 31) & (~31);
+ }
+ else // for AVC/H263/MPEG4 encode
+ {
+ // base on the rate control mode to calculate the defaule encoded buffer size
+ if (mComParams.rcMode == VA_RC_NONE) {
+ mCodedBufSize = (size * 400) / (16 * 16);
+ // set to value according to QP
+ } else {
+ mCodedBufSize = mComParams.rcParams.bitRate / 4;
+ }
+
+ mCodedBufSize = max (mCodedBufSize , (size * 400) / (16 * 16));
+
+ // in case got a very large user input bit rate value
+ mCodedBufSize = min(mCodedBufSize, (size * 1.5 * 8));
+ mCodedBufSize = (mCodedBufSize + 15) &(~15);
+ }
+
+ *maxSize = mCodedBufSize;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
+ uint32_t width, uint32_t height, uint32_t format,
+ uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
+
+ Encode_Status ret = ENCODE_FAIL;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ VASurfaceID surface = VA_INVALID_SURFACE;
+ VAImage image;
+ uint32_t index = 0;
+
+ LOG_V( "Begin\n");
+ // If encode session has been configured, we can not request surface creation anymore
+ if (mStarted) {
+ LOG_E( "Already Initialized, can not request VA surface anymore\n");
+ return ENCODE_WRONG_STATE;
+ }
+ if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) {
+ LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n");
+ return ENCODE_NULL_PTR;
+ }
+
+ // Current only NV12 is supported in VA API
+ // Through format we can get known the number of planes
+ if (format != STRING_TO_FOURCC("NV12")) {
+ LOG_W ("Format is not supported\n");
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ surface = CreateNewVASurface(mVADisplay, width, height);
+ if (surface == VA_INVALID_SURFACE)
+ return ENCODE_DRIVER_FAIL;
+
+ vaStatus = vaDeriveImage(mVADisplay, surface, &image);
+ CHECK_VA_STATUS_RETURN("vaDeriveImage");
+ LOG_V( "vaDeriveImage Done\n");
+ vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ // make sure the physical page been allocated
+ for (index = 0; index < image.data_size; index = index + 4096) {
+ unsigned char tmp = *(*usrptr + index);
+ if (tmp == 0)
+ *(*usrptr + index) = 0;
+ }
+
+ *outsize = image.data_size;
+ *stride = image.pitches[0];
+
+ LOG_I( "surface = 0x%08x\n",(uint32_t)surface);
+ LOG_I("image->pitches[0] = %d\n", image.pitches[0]);
+ LOG_I("image->pitches[1] = %d\n", image.pitches[1]);
+ LOG_I("image->offsets[0] = %d\n", image.offsets[0]);
+ LOG_I("image->offsets[1] = %d\n", image.offsets[1]);
+ LOG_I("image->num_planes = %d\n", image.num_planes);
+ LOG_I("image->width = %d\n", image.width);
+ LOG_I("image->height = %d\n", image.height);
+ LOG_I ("data_size = %d\n", image.data_size);
+ LOG_I ("usrptr = 0x%p\n", *usrptr);
+
+ vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaDestroyImage(mVADisplay, image.image_id);
+ CHECK_VA_STATUS_RETURN("vaDestroyImage");
+
+ if (*outsize < expectedSize) {
+ LOG_E ("Allocated buffer size is small than the expected size, destroy the surface");
+ LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize);
+ vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1);
+ CHECK_VA_STATUS_RETURN("vaDestroySurfaces");
+ return ENCODE_FAIL;
+ }
+
+ VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+ if (map == NULL) {
+ LOG_E( "new VASurfaceMap failed\n");
+ return ENCODE_NO_MEMORY;
+ }
+
+ map->setVASurface(surface); //special case, vasuface is set, so nothing do in doMapping
+// map->setType(MetadataBufferTypeEncoder);
+ map->setValue((intptr_t)*usrptr);
+ ValueInfo vinfo;
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ vinfo.mode = (MemMode)MEM_MODE_USRPTR;
+ vinfo.handle = 0;
+ vinfo.size = 0;
+ vinfo.width = width;
+ vinfo.height = height;
+ vinfo.lumaStride = width;
+ vinfo.chromStride = width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xffffffff;
+ map->setValueInfo(vinfo);
+ map->doMapping();
+
+ mSrcSurfaceMapList.push_back(map);
+
+ ret = ENCODE_SUCCESS;
+
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) {
+
+ Encode_Status status = ENCODE_SUCCESS;
+
+ CHECK_NULL_RETURN_IFFAIL(upStreamBuffer);
+ if (upStreamBuffer->bufCnt == 0) {
+ LOG_E("bufCnt == 0\n");
+ return ENCODE_FAIL;
+ }
+
+ for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
+ if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL) //already mapped
+ continue;
+
+ //wrap upstream buffer into vaSurface
+ VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+
+// map->setType(MetadataBufferTypeUser);
+ map->setValue(upStreamBuffer->bufList[i]);
+ ValueInfo vinfo;
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ vinfo.mode = (MemMode)upStreamBuffer->bufferMode;
+ vinfo.handle = (intptr_t)upStreamBuffer->display;
+ vinfo.size = 0;
+ if (upStreamBuffer->bufAttrib) {
+ vinfo.width = upStreamBuffer->bufAttrib->realWidth;
+ vinfo.height = upStreamBuffer->bufAttrib->realHeight;
+ vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride;
+ vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride;
+ vinfo.format = upStreamBuffer->bufAttrib->format;
+ }
+ vinfo.s3dformat = 0xFFFFFFFF;
+ map->setValueInfo(vinfo);
+ status = map->doMapping();
+
+ if (status == ENCODE_SUCCESS)
+ mSrcSurfaceMapList.push_back(map);
+ else
+ delete map;
+ }
+
+ return status;
+}
+
+Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ IntelMetadataBufferType type;
+ intptr_t value;
+ ValueInfo vinfo;
+ ValueInfo *pvinfo = &vinfo;
+ intptr_t *extravalues = NULL;
+ unsigned int extravalues_count = 0;
+
+ IntelMetadataBuffer imb;
+ VASurfaceMap *map = NULL;
+
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ if (mStoreMetaDataInBuffers.isEnabled) {
+ //metadatabuffer mode
+ LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
+ if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
+ //fail to parse buffer
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ imb.GetType(type);
+ imb.GetValue(value);
+ } else {
+ //raw mode
+ LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
+ if (! inBuffer->data || inBuffer->size == 0) {
+ return ENCODE_NULL_PTR;
+ }
+
+ type = IntelMetadataBufferTypeUser;
+ value = (intptr_t)inBuffer->data;
+ }
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ uint32_t sflag = mSessionFlag;
+ imb.GetSessionFlag(mSessionFlag);
+ if (mSessionFlag != sflag) {
+ //new sharing session, flush buffer sharing cache
+ IntelMetadataBuffer::ClearContext(sflag, false);
+ //flush surfacemap cache
+ LOG_V( "Flush Src Surface Map\n");
+ while(! mSrcSurfaceMapList.empty())
+ {
+ delete (*mSrcSurfaceMapList.begin());
+ mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
+ }
+ }
+#endif
+
+ //find if mapped
+ map = (VASurfaceMap*) findSurfaceMapByValue(value);
+
+ if (map) {
+ //has mapped, get surfaceID directly and do all necessary actions
+ LOG_I("direct find surface %d from value %i\n", map->getVASurface(), value);
+ *sid = map->getVASurface();
+ map->doMapping();
+ return ret;
+ }
+
+ //if no found from list, then try to map value with parameters
+ LOG_I("not find surface from cache with value %i, start mapping if enough information\n", value);
+
+ if (mStoreMetaDataInBuffers.isEnabled) {
+
+ //if type is IntelMetadataBufferTypeGrallocSource, use default parameters since no ValueInfo
+ if (type == IntelMetadataBufferTypeGrallocSource) {
+ vinfo.mode = MEM_MODE_GFXHANDLE;
+ vinfo.handle = 0;
+ vinfo.size = 0;
+ vinfo.width = mComParams.resolution.width;
+ vinfo.height = mComParams.resolution.height;
+ vinfo.lumaStride = mComParams.resolution.width;
+ vinfo.chromStride = mComParams.resolution.width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xFFFFFFFF;
+ } else {
+ //get all info mapping needs
+ imb.GetValueInfo(pvinfo);
+ imb.GetExtraValues(extravalues, extravalues_count);
+ }
+
+ } else {
+
+ //raw mode
+ vinfo.mode = MEM_MODE_MALLOC;
+ vinfo.handle = 0;
+ vinfo.size = inBuffer->size;
+ vinfo.width = mComParams.resolution.width;
+ vinfo.height = mComParams.resolution.height;
+ vinfo.lumaStride = mComParams.resolution.width;
+ vinfo.chromStride = mComParams.resolution.width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xFFFFFFFF;
+ }
+
+ /* Start mapping, if pvinfo is not NULL, then have enough info to map;
+ * if extravalues is not NULL, then need to do more times mapping
+ */
+ if (pvinfo){
+ //map according info, and add to surfacemap list
+ map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+ map->setValue(value);
+ map->setValueInfo(*pvinfo);
+ map->setAction(mVASurfaceMappingAction);
+
+ ret = map->doMapping();
+ if (ret == ENCODE_SUCCESS) {
+ LOG_I("surface mapping success, map value %i into surface %d\n", value, map->getVASurface());
+ mSrcSurfaceMapList.push_back(map);
+ } else {
+ delete map;
+ LOG_E("surface mapping failed, wrong info or meet serious error\n");
+ return ret;
+ }
+
+ *sid = map->getVASurface();
+
+ } else {
+ //can't map due to no info
+ LOG_E("surface mapping failed, missing information\n");
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (extravalues) {
+ //map more using same ValueInfo
+ for(unsigned int i=0; i<extravalues_count; i++) {
+ map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
+ map->setValue(extravalues[i]);
+ map->setValueInfo(vinfo);
+
+ ret = map->doMapping();
+ if (ret == ENCODE_SUCCESS) {
+ LOG_I("surface mapping extravalue success, map value %i into surface %d\n", extravalues[i], map->getVASurface());
+ mSrcSurfaceMapList.push_back(map);
+ } else {
+ delete map;
+ map = NULL;
+ LOG_E( "surface mapping extravalue failed, extravalue is %i\n", extravalues[i]);
+ }
+ }
+ }
+
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::renderDynamicBitrate(EncodeTask* task) {
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ LOG_V( "Begin\n\n");
+ // disable bits stuffing and skip frame apply to all rate control mode
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterRateControl *bitrateControlParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
+ 1, NULL,
+ &miscParamBufferID);
+
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeRateControl;
+ bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data;
+
+ bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate;
+ bitrateControlParam->initial_qp = mComParams.rcParams.initQP;
+ if(mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) {
+ bitrateControlParam->min_qp = mComParams.rcParams.I_minQP;
+ bitrateControlParam->max_qp = mComParams.rcParams.I_maxQP;
+ mRenderBitRate = true;
+ LOG_I("apply I min/max qp for IDR or I frame\n");
+ } else {
+ bitrateControlParam->min_qp = mComParams.rcParams.minQP;
+ bitrateControlParam->max_qp = mComParams.rcParams.maxQP;
+ mRenderBitRate = false;
+ LOG_I("revert to original min/max qp after IDR or I frame\n");
+ }
+ bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage;
+ bitrateControlParam->window_size = mComParams.rcParams.windowSize;
+ bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip;
+ bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing;
+ bitrateControlParam->basic_unit_size = 0;
+
+ LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second);
+ LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp);
+ LOG_I("min_qp = %d\n", bitrateControlParam->min_qp);
+ LOG_I("max_qp = %d\n", bitrateControlParam->max_qp);
+ LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage);
+ LOG_I("window_size = %d\n", bitrateControlParam->window_size);
+ LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip);
+ LOG_I("disable_bit_stuffing = %d\n", bitrateControlParam->rc_flags.bits.disable_bit_stuffing);
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext,
+ &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderBase::renderDynamicFrameRate() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ if (mComParams.rcMode != RATE_CONTROL_VCM) {
+
+ LOG_W("Not in VCM mode, but call SendDynamicFramerate\n");
+ return ENCODE_SUCCESS;
+ }
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterFrameRate *frameRateParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate;
+ frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data;
+ frameRateParam->framerate =
+ (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2)
+ / mComParams.frameRate.frameRateDenom;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_I( "frame rate = %d\n", frameRateParam->framerate);
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::renderHrd() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ VAEncMiscParameterBuffer *miscEncParamBuf;
+ VAEncMiscParameterHRD *hrdParam;
+ VABufferID miscParamBufferID;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterHRD),
+ 1, NULL, &miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ miscEncParamBuf->type = VAEncMiscParameterTypeHRD;
+ hrdParam = (VAEncMiscParameterHRD *)miscEncParamBuf->data;
+
+ hrdParam->buffer_size = mHrdParam.bufferSize;
+ hrdParam->initial_buffer_fullness = mHrdParam.initBufferFullness;
+
+ vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(intptr_t value) {
+ android::List<VASurfaceMap *>::iterator node;
+
+ for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++)
+ {
+ if ((*node)->getValue() == value)
+ return *node;
+ else
+ continue;
+ }
+
+ return NULL;
+}
diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h
new file mode 100644
index 0000000..bf1eecf
--- /dev/null
+++ b/videoencoder/VideoEncoderBase.h
@@ -0,0 +1,186 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_BASE_H__
+#define __VIDEO_ENCODER_BASE_H__
+
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoEncoderDef.h"
+#include "VideoEncoderInterface.h"
+#include "IntelMetadataBuffer.h"
+#include <utils/List.h>
+#include <utils/threads.h>
+#include "VideoEncoderUtils.h"
+
+struct SurfaceMap {
+ VASurfaceID surface;
+ VASurfaceID surface_backup;
+ IntelMetadataBufferType type;
+ int32_t value;
+ ValueInfo vinfo;
+ bool added;
+};
+
+struct EncodeTask {
+ VASurfaceID enc_surface;
+ VASurfaceID ref_surface;
+ VASurfaceID rec_surface;
+ VABufferID coded_buffer;
+
+ FrameType type;
+ int flag;
+ int64_t timestamp; //corresponding input frame timestamp
+ void *priv; //input buffer data
+
+ bool completed; //if encode task is done complet by HW
+};
+
+class VideoEncoderBase : IVideoEncoder {
+
+public:
+ VideoEncoderBase();
+ virtual ~VideoEncoderBase();
+
+ virtual Encode_Status start(void);
+ virtual void flush(void);
+ virtual Encode_Status stop(void);
+ virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout);
+
+ /*
+ * getOutput can be called several time for a frame (such as first time codec data, and second time others)
+ * encoder will provide encoded data according to the format (whole frame, codec_data, sigle NAL etc)
+ * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL
+ * and caller should provide a big enough buffer and call again
+ */
+ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout);
+
+ virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status getMaxOutSize(uint32_t *maxSize);
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask* task) = 0;
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) = 0;
+ virtual Encode_Status updateFrameInfo(EncodeTask* task) ;
+
+ Encode_Status renderDynamicFrameRate();
+ Encode_Status renderDynamicBitrate(EncodeTask* task);
+ Encode_Status renderHrd();
+ Encode_Status queryProfileLevelConfig(VADisplay dpy, VAProfile profile);
+
+private:
+ void setDefaultParams(void);
+ Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer);
+ Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format,
+ uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr);
+ VASurfaceMap* findSurfaceMapByValue(intptr_t value);
+ Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid);
+ void PrepareFrameInfo(EncodeTask* task);
+
+ Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer);
+ Encode_Status cleanupForOutput();
+ Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer);
+ Encode_Status queryAutoReferenceConfig(VAProfile profile);
+ Encode_Status querySupportedSurfaceMemTypes();
+ Encode_Status copySurfaces(VASurfaceID srcId, VASurfaceID destId);
+ VASurfaceID CreateSurfaceFromExternalBuf(int32_t value, ValueInfo& vinfo);
+
+protected:
+
+ bool mInitialized;
+ bool mStarted;
+ VADisplay mVADisplay;
+ VAContextID mVAContext;
+ VAConfigID mVAConfig;
+ VAEntrypoint mVAEntrypoint;
+
+
+ VideoParamsCommon mComParams;
+ VideoParamsHRD mHrdParam;
+ VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers;
+
+ bool mNewHeader;
+
+ bool mRenderMaxSliceSize; //Max Slice Size
+ bool mRenderQP;
+ bool mRenderAIR;
+ bool mRenderCIR;
+ bool mRenderFrameRate;
+ bool mRenderBitRate;
+ bool mRenderHrd;
+ bool mRenderMaxFrameSize;
+ bool mRenderMultiTemporal;
+ bool mForceKFrame;
+
+ VABufferID mSeqParamBuf;
+ VABufferID mRcParamBuf;
+ VABufferID mFrameRateParamBuf;
+ VABufferID mPicParamBuf;
+ VABufferID mSliceParamBuf;
+ VASurfaceID* mAutoRefSurfaces;
+
+ android::List <VASurfaceMap *> mSrcSurfaceMapList; //all mapped surface info list from input buffer
+ android::List <EncodeTask *> mEncodeTaskList; //all encode tasks list
+ android::List <VABufferID> mVACodedBufferList; //all available codedbuffer list
+
+ VASurfaceID mRefSurface; //reference surface, only used in base
+ VASurfaceID mRecSurface; //reconstructed surface, only used in base
+ uint32_t mFrameNum;
+ uint32_t mCodedBufSize;
+ bool mAutoReference;
+ uint32_t mAutoReferenceSurfaceNum;
+ uint32_t mEncPackedHeaders;
+ uint32_t mEncMaxRefFrames;
+
+ bool mSliceSizeOverflow;
+
+ //Current Outputting task
+ EncodeTask *mCurOutputTask;
+
+ //Current outputting CodedBuffer status
+ VABufferID mOutCodedBuffer;
+ bool mCodedBufferMapped;
+ uint8_t *mOutCodedBufferPtr;
+ VACodedBufferSegment *mCurSegment;
+ uint32_t mOffsetInSeg;
+ uint32_t mTotalSize;
+ uint32_t mTotalSizeCopied;
+ android::Mutex mCodedBuffer_Lock, mEncodeTask_Lock;
+ android::Condition mCodedBuffer_Cond, mEncodeTask_Cond;
+
+ bool mFrameSkipped;
+
+ //supported surface memory types
+ int mSupportedSurfaceMemType;
+
+ //VASurface mapping extra action
+ int mVASurfaceMappingAction;
+
+ // For Temporal Layer Bitrate FrameRate settings
+ VideoConfigTemperalLayerBitrateFramerate mTemporalLayerBitrateFramerate[3];
+
+#ifdef INTEL_VIDEO_XPROC_SHARING
+ uint32_t mSessionFlag;
+#endif
+};
+#endif /* __VIDEO_ENCODER_BASE_H__ */
diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h
new file mode 100644
index 0000000..d89d93a
--- /dev/null
+++ b/videoencoder/VideoEncoderDef.h
@@ -0,0 +1,731 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_DEF_H__
+#define __VIDEO_ENCODER_DEF_H__
+
+#include <stdint.h>
+
+#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24)))
+#define min(X,Y) (((X) < (Y)) ? (X) : (Y))
+#define max(X,Y) (((X) > (Y)) ? (X) : (Y))
+
+typedef int32_t Encode_Status;
+
+// Video encode error code
+enum {
+ ENCODE_INVALID_SURFACE = -11,
+ ENCODE_NO_REQUEST_DATA = -10,
+ ENCODE_WRONG_STATE = -9,
+ ENCODE_NOTIMPL = -8,
+ ENCODE_NO_MEMORY = -7,
+ ENCODE_NOT_INIT = -6,
+ ENCODE_DRIVER_FAIL = -5,
+ ENCODE_INVALID_PARAMS = -4,
+ ENCODE_NOT_SUPPORTED = -3,
+ ENCODE_NULL_PTR = -2,
+ ENCODE_FAIL = -1,
+ ENCODE_SUCCESS = 0,
+ ENCODE_ALREADY_INIT = 1,
+ ENCODE_SLICESIZE_OVERFLOW = 2,
+ ENCODE_BUFFER_TOO_SMALL = 3, // The buffer passed to encode is too small to contain encoded data
+ ENCODE_DEVICE_BUSY = 4,
+ ENCODE_DATA_NOT_READY = 5,
+};
+
+typedef enum {
+ OUTPUT_EVERYTHING = 0, //Output whatever driver generates
+ OUTPUT_CODEC_DATA = 1,
+ OUTPUT_FRAME_DATA = 2, //Equal to OUTPUT_EVERYTHING when no header along with the frame data
+ OUTPUT_ONE_NAL = 4,
+ OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8,
+ OUTPUT_LENGTH_PREFIXED = 16,
+ OUTPUT_CODEDBUFFER = 32,
+ OUTPUT_NALULENGTHS_PREFIXED = 64,
+ OUTPUT_BUFFER_LAST
+} VideoOutputFormat;
+
+typedef enum {
+ RAW_FORMAT_NONE = 0,
+ RAW_FORMAT_YUV420 = 1,
+ RAW_FORMAT_YUV422 = 2,
+ RAW_FORMAT_YUV444 = 4,
+ RAW_FORMAT_NV12 = 8,
+ RAW_FORMAT_RGBA = 16,
+ RAW_FORMAT_OPAQUE = 32,
+ RAW_FORMAT_PROTECTED = 0x80000000,
+ RAW_FORMAT_LAST
+} VideoRawFormat;
+
+typedef enum {
+ RATE_CONTROL_NONE = 1,
+ RATE_CONTROL_CBR = 2,
+ RATE_CONTROL_VBR = 4,
+ RATE_CONTROL_VCM = 8,
+ RATE_CONTROL_LAST
+} VideoRateControl;
+
+typedef enum {
+ PROFILE_MPEG2SIMPLE = 0,
+ PROFILE_MPEG2MAIN,
+ PROFILE_MPEG4SIMPLE,
+ PROFILE_MPEG4ADVANCEDSIMPLE,
+ PROFILE_MPEG4MAIN,
+ PROFILE_H264BASELINE,
+ PROFILE_H264MAIN,
+ PROFILE_H264HIGH,
+ PROFILE_VC1SIMPLE,
+ PROFILE_VC1MAIN,
+ PROFILE_VC1ADVANCED,
+ PROFILE_H263BASELINE
+} VideoProfile;
+
+typedef enum {
+ AVC_DELIMITER_LENGTHPREFIX = 0,
+ AVC_DELIMITER_ANNEXB
+} AVCDelimiterType;
+
+typedef enum {
+ VIDEO_ENC_NONIR, // Non intra refresh
+ VIDEO_ENC_CIR, // Cyclic intra refresh
+ VIDEO_ENC_AIR, // Adaptive intra refresh
+ VIDEO_ENC_BOTH,
+ VIDEO_ENC_LAST
+} VideoIntraRefreshType;
+
+enum VideoBufferSharingMode {
+ BUFFER_SHARING_NONE = 1, //Means non shared buffer mode
+ BUFFER_SHARING_CI = 2,
+ BUFFER_SHARING_V4L2 = 4,
+ BUFFER_SHARING_SURFACE = 8,
+ BUFFER_SHARING_USRPTR = 16,
+ BUFFER_SHARING_GFXHANDLE = 32,
+ BUFFER_SHARING_KBUFHANDLE = 64,
+ BUFFER_LAST
+};
+
+typedef enum {
+ FTYPE_UNKNOWN = 0, // Unknown
+ FTYPE_I = 1, // General I-frame type
+ FTYPE_P = 2, // General P-frame type
+ FTYPE_B = 3, // General B-frame type
+ FTYPE_SI = 4, // H.263 SI-frame type
+ FTYPE_SP = 5, // H.263 SP-frame type
+ FTYPE_EI = 6, // H.264 EI-frame type
+ FTYPE_EP = 7, // H.264 EP-frame type
+ FTYPE_S = 8, // MPEG-4 S-frame type
+ FTYPE_IDR = 9, // IDR-frame type
+}FrameType;
+
+//function call mode
+#define FUNC_BLOCK 0xFFFFFFFF
+#define FUNC_NONBLOCK 0
+
+// Output buffer flag
+#define ENCODE_BUFFERFLAG_ENDOFFRAME 0x00000001
+#define ENCODE_BUFFERFLAG_PARTIALFRAME 0x00000002
+#define ENCODE_BUFFERFLAG_SYNCFRAME 0x00000004
+#define ENCODE_BUFFERFLAG_CODECCONFIG 0x00000008
+#define ENCODE_BUFFERFLAG_DATACORRUPT 0x00000010
+#define ENCODE_BUFFERFLAG_DATAINVALID 0x00000020
+#define ENCODE_BUFFERFLAG_SLICEOVERFOLOW 0x00000040
+#define ENCODE_BUFFERFLAG_ENDOFSTREAM 0x00000080
+#define ENCODE_BUFFERFLAG_NSTOPFRAME 0x00000100
+
+typedef struct {
+ uint8_t *data;
+ uint32_t bufferSize; //buffer size
+ uint32_t dataSize; //actual size
+ uint32_t offset; //buffer offset
+ uint32_t remainingSize;
+ int flag; //Key frame, Codec Data etc
+ VideoOutputFormat format; //output format
+ int64_t timeStamp; //reserved
+ FrameType type;
+ void *priv; //indicate corresponding input data
+} VideoEncOutputBuffer;
+
+typedef struct {
+ uint8_t *data;
+ uint32_t size;
+ bool bufAvailable; //To indicate whether this buffer can be reused
+ int64_t timeStamp; //reserved
+ FrameType type; //frame type expected to be encoded
+ int flag; // flag to indicate buffer property
+ void *priv; //indicate corresponding input data
+} VideoEncRawBuffer;
+
+struct VideoEncSurfaceBuffer {
+ VASurfaceID surface;
+ uint8_t *usrptr;
+ uint32_t index;
+ bool bufAvailable;
+ VideoEncSurfaceBuffer *next;
+};
+
+struct CirParams {
+ uint32_t cir_num_mbs;
+
+ CirParams &operator=(const CirParams &other) {
+ if (this == &other) return *this;
+
+ this->cir_num_mbs = other.cir_num_mbs;
+ return *this;
+ }
+};
+
+struct AirParams {
+ uint32_t airMBs;
+ uint32_t airThreshold;
+ uint32_t airAuto;
+
+ AirParams &operator=(const AirParams &other) {
+ if (this == &other) return *this;
+
+ this->airMBs= other.airMBs;
+ this->airThreshold= other.airThreshold;
+ this->airAuto = other.airAuto;
+ return *this;
+ }
+};
+
+struct VideoFrameRate {
+ uint32_t frameRateNum;
+ uint32_t frameRateDenom;
+
+ VideoFrameRate &operator=(const VideoFrameRate &other) {
+ if (this == &other) return *this;
+
+ this->frameRateNum = other.frameRateNum;
+ this->frameRateDenom = other.frameRateDenom;
+ return *this;
+ }
+};
+
+struct VideoResolution {
+ uint32_t width;
+ uint32_t height;
+
+ VideoResolution &operator=(const VideoResolution &other) {
+ if (this == &other) return *this;
+
+ this->width = other.width;
+ this->height = other.height;
+ return *this;
+ }
+};
+
+struct VideoRateControlParams {
+ uint32_t bitRate;
+ uint32_t initQP;
+ uint32_t minQP;
+ uint32_t maxQP;
+ uint32_t I_minQP;
+ uint32_t I_maxQP;
+ uint32_t windowSize;
+ uint32_t targetPercentage;
+ uint32_t disableFrameSkip;
+ uint32_t disableBitsStuffing;
+ uint32_t enableIntraFrameQPControl;
+ uint32_t temporalFrameRate;
+ uint32_t temporalID;
+
+ VideoRateControlParams &operator=(const VideoRateControlParams &other) {
+ if (this == &other) return *this;
+
+ this->bitRate = other.bitRate;
+ this->initQP = other.initQP;
+ this->minQP = other.minQP;
+ this->maxQP = other.maxQP;
+ this->I_minQP = other.I_minQP;
+ this->I_maxQP = other.I_maxQP;
+ this->windowSize = other.windowSize;
+ this->targetPercentage = other.targetPercentage;
+ this->disableFrameSkip = other.disableFrameSkip;
+ this->disableBitsStuffing = other.disableBitsStuffing;
+ this->enableIntraFrameQPControl = other.enableIntraFrameQPControl;
+ this->temporalFrameRate = other.temporalFrameRate;
+ this->temporalID = other.temporalID;
+
+ return *this;
+ }
+};
+
+struct SliceNum {
+ uint32_t iSliceNum;
+ uint32_t pSliceNum;
+
+ SliceNum &operator=(const SliceNum &other) {
+ if (this == &other) return *this;
+
+ this->iSliceNum = other.iSliceNum;
+ this->pSliceNum= other.pSliceNum;
+ return *this;
+ }
+};
+
+typedef struct {
+ uint32_t realWidth;
+ uint32_t realHeight;
+ uint32_t lumaStride;
+ uint32_t chromStride;
+ uint32_t format;
+} ExternalBufferAttrib;
+
+struct Cropping {
+ uint32_t LeftOffset;
+ uint32_t RightOffset;
+ uint32_t TopOffset;
+ uint32_t BottomOffset;
+
+ Cropping &operator=(const Cropping &other) {
+ if (this == &other) return *this;
+
+ this->LeftOffset = other.LeftOffset;
+ this->RightOffset = other.RightOffset;
+ this->TopOffset = other.TopOffset;
+ this->BottomOffset = other.BottomOffset;
+ return *this;
+ }
+};
+
+struct SamplingAspectRatio {
+ uint16_t SarWidth;
+ uint16_t SarHeight;
+
+ SamplingAspectRatio &operator=(const SamplingAspectRatio &other) {
+ if (this == &other) return *this;
+
+ this->SarWidth = other.SarWidth;
+ this->SarHeight = other.SarHeight;
+ return *this;
+ }
+};
+
+enum VideoParamConfigType {
+ VideoParamsTypeStartUnused = 0x01000000,
+ VideoParamsTypeCommon,
+ VideoParamsTypeAVC,
+ VideoParamsTypeH263,
+ VideoParamsTypeMP4,
+ VideoParamsTypeVC1,
+ VideoParamsTypeUpSteamBuffer,
+ VideoParamsTypeUsrptrBuffer,
+ VideoParamsTypeHRD,
+ VideoParamsTypeStoreMetaDataInBuffers,
+ VideoParamsTypeProfileLevel,
+ VideoParamsTypeVP8,
+ VideoParamsTypeTemporalLayer,
+
+ VideoConfigTypeFrameRate,
+ VideoConfigTypeBitRate,
+ VideoConfigTypeResolution,
+ VideoConfigTypeIntraRefreshType,
+ VideoConfigTypeAIR,
+ VideoConfigTypeCyclicFrameInterval,
+ VideoConfigTypeAVCIntraPeriod,
+ VideoConfigTypeNALSize,
+ VideoConfigTypeIDRRequest,
+ VideoConfigTypeSliceNum,
+ VideoConfigTypeVP8,
+ VideoConfigTypeVP8ReferenceFrame,
+ VideoConfigTypeCIR,
+ VideoConfigTypeVP8MaxFrameSizeRatio,
+ VideoConfigTypeTemperalLayerBitrateFramerate,
+
+ VideoParamsConfigExtension
+};
+
+struct VideoParamConfigSet {
+ VideoParamConfigType type;
+ uint32_t size;
+
+ VideoParamConfigSet &operator=(const VideoParamConfigSet &other) {
+ if (this == &other) return *this;
+ this->type = other.type;
+ this->size = other.size;
+ return *this;
+ }
+};
+
+struct VideoParamsCommon : VideoParamConfigSet {
+
+ VAProfile profile;
+ uint8_t level;
+ VideoRawFormat rawFormat;
+ VideoResolution resolution;
+ VideoFrameRate frameRate;
+ int32_t intraPeriod;
+ VideoRateControl rcMode;
+ VideoRateControlParams rcParams;
+ VideoIntraRefreshType refreshType;
+ int32_t cyclicFrameInterval;
+ AirParams airParams;
+ CirParams cirParams;
+ uint32_t disableDeblocking;
+ bool syncEncMode;
+ //CodedBuffer properties
+ uint32_t codedBufNum;
+ uint32_t numberOfLayer;
+ uint32_t nPeriodicity;
+ uint32_t nLayerID[32];
+
+ VideoParamsCommon() {
+ type = VideoParamsTypeCommon;
+ size = sizeof(VideoParamsCommon);
+ }
+
+ VideoParamsCommon &operator=(const VideoParamsCommon &other) {
+ if (this == &other) return *this;
+
+ VideoParamConfigSet::operator=(other);
+ this->profile = other.profile;
+ this->level = other.level;
+ this->rawFormat = other.rawFormat;
+ this->resolution = other.resolution;
+ this->frameRate = other.frameRate;
+ this->intraPeriod = other.intraPeriod;
+ this->rcMode = other.rcMode;
+ this->rcParams = other.rcParams;
+ this->refreshType = other.refreshType;
+ this->cyclicFrameInterval = other.cyclicFrameInterval;
+ this->airParams = other.airParams;
+ this->disableDeblocking = other.disableDeblocking;
+ this->syncEncMode = other.syncEncMode;
+ this->codedBufNum = other.codedBufNum;
+ this->numberOfLayer = other.numberOfLayer;
+ return *this;
+ }
+};
+
+struct VideoParamsAVC : VideoParamConfigSet {
+ uint32_t basicUnitSize; //for rate control
+ uint8_t VUIFlag;
+ int32_t maxSliceSize;
+ uint32_t idrInterval;
+ uint32_t ipPeriod;
+ uint32_t refFrames;
+ SliceNum sliceNum;
+ AVCDelimiterType delimiterType;
+ Cropping crop;
+ SamplingAspectRatio SAR;
+ uint32_t refIdx10ActiveMinus1;
+ uint32_t refIdx11ActiveMinus1;
+ bool bFrameMBsOnly;
+ bool bMBAFF;
+ bool bEntropyCodingCABAC;
+ bool bWeightedPPrediction;
+ uint32_t weightedBipredicitonMode;
+ bool bConstIpred ;
+ bool bDirect8x8Inference;
+ bool bDirectSpatialTemporal;
+ uint32_t cabacInitIdc;
+
+ VideoParamsAVC() {
+ type = VideoParamsTypeAVC;
+ size = sizeof(VideoParamsAVC);
+ }
+
+ VideoParamsAVC &operator=(const VideoParamsAVC &other) {
+ if (this == &other) return *this;
+
+ VideoParamConfigSet::operator=(other);
+ this->basicUnitSize = other.basicUnitSize;
+ this->VUIFlag = other.VUIFlag;
+ this->maxSliceSize = other.maxSliceSize;
+ this->idrInterval = other.idrInterval;
+ this->ipPeriod = other.ipPeriod;
+ this->refFrames = other.refFrames;
+ this->sliceNum = other.sliceNum;
+ this->delimiterType = other.delimiterType;
+ this->crop.LeftOffset = other.crop.LeftOffset;
+ this->crop.RightOffset = other.crop.RightOffset;
+ this->crop.TopOffset = other.crop.TopOffset;
+ this->crop.BottomOffset = other.crop.BottomOffset;
+ this->SAR.SarWidth = other.SAR.SarWidth;
+ this->SAR.SarHeight = other.SAR.SarHeight;
+
+ this->refIdx10ActiveMinus1 = other.refIdx10ActiveMinus1;
+ this->refIdx11ActiveMinus1 = other.refIdx11ActiveMinus1;
+ this->bFrameMBsOnly = other.bFrameMBsOnly;
+ this->bMBAFF = other.bMBAFF;
+ this->bEntropyCodingCABAC = other.bEntropyCodingCABAC;
+ this->bWeightedPPrediction = other.bWeightedPPrediction;
+ this->weightedBipredicitonMode = other.weightedBipredicitonMode;
+ this->bConstIpred = other.bConstIpred;
+ this->bDirect8x8Inference = other.bDirect8x8Inference;
+ this->bDirectSpatialTemporal = other.bDirectSpatialTemporal;
+ this->cabacInitIdc = other.cabacInitIdc;
+ return *this;
+ }
+};
+
+struct VideoParamsUpstreamBuffer : VideoParamConfigSet {
+
+ VideoParamsUpstreamBuffer() {
+ type = VideoParamsTypeUpSteamBuffer;
+ size = sizeof(VideoParamsUpstreamBuffer);
+ }
+
+ VideoBufferSharingMode bufferMode;
+ intptr_t *bufList;
+ uint32_t bufCnt;
+ ExternalBufferAttrib *bufAttrib;
+ void *display;
+};
+
+struct VideoParamsUsrptrBuffer : VideoParamConfigSet {
+
+ VideoParamsUsrptrBuffer() {
+ type = VideoParamsTypeUsrptrBuffer;
+ size = sizeof(VideoParamsUsrptrBuffer);
+ }
+
+ //input
+ uint32_t width;
+ uint32_t height;
+ uint32_t format;
+ uint32_t expectedSize;
+
+ //output
+ uint32_t actualSize;
+ uint32_t stride;
+ uint8_t *usrPtr;
+};
+
+struct VideoParamsHRD : VideoParamConfigSet {
+
+ VideoParamsHRD() {
+ type = VideoParamsTypeHRD;
+ size = sizeof(VideoParamsHRD);
+ }
+
+ uint32_t bufferSize;
+ uint32_t initBufferFullness;
+};
+
+struct VideoParamsStoreMetaDataInBuffers : VideoParamConfigSet {
+
+ VideoParamsStoreMetaDataInBuffers() {
+ type = VideoParamsTypeStoreMetaDataInBuffers;
+ size = sizeof(VideoParamsStoreMetaDataInBuffers);
+ }
+
+ bool isEnabled;
+};
+
+struct VideoParamsProfileLevel : VideoParamConfigSet {
+
+ VideoParamsProfileLevel() {
+ type = VideoParamsTypeProfileLevel;
+ size = sizeof(VideoParamsProfileLevel);
+ }
+
+ VAProfile profile;
+ uint32_t level;
+ bool isSupported;
+};
+
+struct VideoParamsTemporalLayer : VideoParamConfigSet {
+
+ VideoParamsTemporalLayer() {
+ type = VideoParamsTypeTemporalLayer;
+ size = sizeof(VideoParamsTemporalLayer);
+ }
+
+ uint32_t numberOfLayer;
+ uint32_t nPeriodicity;
+ uint32_t nLayerID[32];
+};
+
+
+struct VideoConfigFrameRate : VideoParamConfigSet {
+
+ VideoConfigFrameRate() {
+ type = VideoConfigTypeFrameRate;
+ size = sizeof(VideoConfigFrameRate);
+ }
+
+ VideoFrameRate frameRate;
+};
+
+struct VideoConfigBitRate : VideoParamConfigSet {
+
+ VideoConfigBitRate() {
+ type = VideoConfigTypeBitRate;
+ size = sizeof(VideoConfigBitRate);
+ }
+
+ VideoRateControlParams rcParams;
+};
+
+struct VideoConfigAVCIntraPeriod : VideoParamConfigSet {
+
+ VideoConfigAVCIntraPeriod() {
+ type = VideoConfigTypeAVCIntraPeriod;
+ size = sizeof(VideoConfigAVCIntraPeriod);
+ }
+
+ uint32_t idrInterval; //How many Intra frame will have a IDR frame
+ uint32_t intraPeriod;
+ uint32_t ipPeriod;
+};
+
+struct VideoConfigNALSize : VideoParamConfigSet {
+
+ VideoConfigNALSize() {
+ type = VideoConfigTypeNALSize;
+ size = sizeof(VideoConfigNALSize);
+ }
+
+ uint32_t maxSliceSize;
+};
+
+struct VideoConfigResolution : VideoParamConfigSet {
+
+ VideoConfigResolution() {
+ type = VideoConfigTypeResolution;
+ size = sizeof(VideoConfigResolution);
+ }
+
+ VideoResolution resolution;
+};
+
+struct VideoConfigIntraRefreshType : VideoParamConfigSet {
+
+ VideoConfigIntraRefreshType() {
+ type = VideoConfigTypeIntraRefreshType;
+ size = sizeof(VideoConfigIntraRefreshType);
+ }
+
+ VideoIntraRefreshType refreshType;
+};
+
+struct VideoConfigCyclicFrameInterval : VideoParamConfigSet {
+
+ VideoConfigCyclicFrameInterval() {
+ type = VideoConfigTypeCyclicFrameInterval;
+ size = sizeof(VideoConfigCyclicFrameInterval);
+ }
+
+ int32_t cyclicFrameInterval;
+};
+
+struct VideoConfigCIR : VideoParamConfigSet {
+
+ VideoConfigCIR() {
+ type = VideoConfigTypeCIR;
+ size = sizeof(VideoConfigCIR);
+ }
+
+ CirParams cirParams;
+};
+
+struct VideoConfigAIR : VideoParamConfigSet {
+
+ VideoConfigAIR() {
+ type = VideoConfigTypeAIR;
+ size = sizeof(VideoConfigAIR);
+ }
+
+ AirParams airParams;
+};
+
+struct VideoConfigSliceNum : VideoParamConfigSet {
+
+ VideoConfigSliceNum() {
+ type = VideoConfigTypeSliceNum;
+ size = sizeof(VideoConfigSliceNum);
+ }
+
+ SliceNum sliceNum;
+};
+
+struct VideoParamsVP8 : VideoParamConfigSet {
+
+ uint32_t profile;
+ uint32_t error_resilient;
+ uint32_t num_token_partitions;
+ uint32_t kf_auto;
+ uint32_t kf_min_dist;
+ uint32_t kf_max_dist;
+ uint32_t min_qp;
+ uint32_t max_qp;
+ uint32_t init_qp;
+ uint32_t rc_undershoot;
+ uint32_t rc_overshoot;
+ uint32_t hrd_buf_size;
+ uint32_t hrd_buf_initial_fullness;
+ uint32_t hrd_buf_optimal_fullness;
+ uint32_t max_frame_size_ratio;
+
+ VideoParamsVP8() {
+ type = VideoParamsTypeVP8;
+ size = sizeof(VideoParamsVP8);
+ }
+};
+
+struct VideoConfigVP8 : VideoParamConfigSet {
+
+ uint32_t force_kf;
+ uint32_t refresh_entropy_probs;
+ uint32_t value;
+ unsigned char sharpness_level;
+
+ VideoConfigVP8 () {
+ type = VideoConfigTypeVP8;
+ size = sizeof(VideoConfigVP8);
+ }
+};
+
+struct VideoConfigVP8ReferenceFrame : VideoParamConfigSet {
+
+ uint32_t no_ref_last;
+ uint32_t no_ref_gf;
+ uint32_t no_ref_arf;
+ uint32_t refresh_last;
+ uint32_t refresh_golden_frame;
+ uint32_t refresh_alternate_frame;
+
+ VideoConfigVP8ReferenceFrame () {
+ type = VideoConfigTypeVP8ReferenceFrame;
+ size = sizeof(VideoConfigVP8ReferenceFrame);
+ }
+};
+
+struct VideoConfigVP8MaxFrameSizeRatio : VideoParamConfigSet {
+
+ VideoConfigVP8MaxFrameSizeRatio() {
+ type = VideoConfigTypeVP8MaxFrameSizeRatio;
+ size = sizeof(VideoConfigVP8MaxFrameSizeRatio);
+ }
+
+ uint32_t max_frame_size_ratio;
+};
+
+struct VideoConfigTemperalLayerBitrateFramerate : VideoParamConfigSet {
+
+ VideoConfigTemperalLayerBitrateFramerate() {
+ type = VideoConfigTypeTemperalLayerBitrateFramerate;
+ size = sizeof(VideoConfigTemperalLayerBitrateFramerate);
+ }
+
+ uint32_t nLayerID;
+ uint32_t bitRate;
+ uint32_t frameRate;
+};
+
+#endif /* __VIDEO_ENCODER_DEF_H__ */
diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp
new file mode 100644
index 0000000..b9f3a9c
--- /dev/null
+++ b/videoencoder/VideoEncoderH263.cpp
@@ -0,0 +1,178 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderH263.h"
+#include <va/va_tpi.h>
+
+VideoEncoderH263::VideoEncoderH263() {
+ mComParams.profile = (VAProfile)PROFILE_H263BASELINE;
+ mAutoReferenceSurfaceNum = 2;
+}
+
+Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ if (mFrameNum == 0) {
+ ret = renderSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ ret = renderSliceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
+
+ LOG_V( "End\n");
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferH263 h263SequenceParam = VAEncSequenceParameterBufferH263();
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ LOG_V( "Begin\n\n");
+ //set up the sequence params for HW
+ h263SequenceParam.bits_per_second= mComParams.rcParams.bitRate;
+ h263SequenceParam.frame_rate =
+ (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom; //hard-coded, driver need;
+ h263SequenceParam.initial_qp = mComParams.rcParams.initQP;
+ h263SequenceParam.min_qp = mComParams.rcParams.minQP;
+ h263SequenceParam.intra_period = mComParams.intraPeriod;
+
+ //h263_seq_param.fixed_vop_rate = 30;
+
+ LOG_V("===h263 sequence params===\n");
+ LOG_I( "bitrate = %d\n", h263SequenceParam.bits_per_second);
+ LOG_I( "frame_rate = %d\n", h263SequenceParam.frame_rate);
+ LOG_I( "initial_qp = %d\n", h263SequenceParam.initial_qp);
+ LOG_I( "min_qp = %d\n", h263SequenceParam.min_qp);
+ LOG_I( "intra_period = %d\n\n", h263SequenceParam.intra_period);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(h263SequenceParam),
+ 1, &h263SequenceParam,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferH263 h263PictureParams = VAEncPictureParameterBufferH263();
+
+ LOG_V( "Begin\n\n");
+
+ // set picture params for HW
+ if(mAutoReference == false){
+ h263PictureParams.reference_picture = task->ref_surface;
+ h263PictureParams.reconstructed_picture = task->rec_surface;
+ }else {
+ h263PictureParams.reference_picture = mAutoRefSurfaces[0];
+ h263PictureParams.reconstructed_picture = mAutoRefSurfaces[1];
+ }
+
+ h263PictureParams.coded_buf = task->coded_buffer;
+ h263PictureParams.picture_width = mComParams.resolution.width;
+ h263PictureParams.picture_height = mComParams.resolution.height;
+ h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+ LOG_V("======h263 picture params======\n");
+ LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture);
+ LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture);
+ LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf);
+// LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
+ LOG_I( "picture_width = %d\n", h263PictureParams.picture_width);
+ LOG_I( "picture_height = %d\n",h263PictureParams.picture_height);
+ LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(h263PictureParams),
+ 1,&h263PictureParams,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf , 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ uint32_t sliceHeight;
+ uint32_t sliceHeightInMB;
+
+ LOG_V("Begin\n\n");
+
+ sliceHeight = mComParams.resolution.height;
+ sliceHeight += 15;
+ sliceHeight &= (~15);
+ sliceHeightInMB = sliceHeight / 16;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBuffer),
+ 1, NULL, &mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ VAEncSliceParameterBuffer *sliceParams;
+ vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ // starting MB row number for this slice
+ sliceParams->start_row_number = 0;
+ // slice height measured in MB
+ sliceParams->slice_height = sliceHeightInMB;
+ sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
+ sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+ LOG_V("======h263 slice params======\n");
+ LOG_I("start_row_number = %d\n", (int) sliceParams->start_row_number);
+ LOG_I("slice_height_in_mb = %d\n", (int) sliceParams->slice_height);
+ LOG_I("slice.is_intra = %d\n", (int) sliceParams->slice_flags.bits.is_intra);
+
+ vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V("end\n");
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h
new file mode 100644
index 0000000..4d0e7a2
--- /dev/null
+++ b/videoencoder/VideoEncoderH263.h
@@ -0,0 +1,57 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_H263_H__
+#define __VIDEO_ENCODER_H263_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+ * H.263 Encoder class, derived from VideoEncoderBase
+ */
+class VideoEncoderH263: public VideoEncoderBase {
+public:
+ VideoEncoderH263();
+ virtual ~VideoEncoderH263() {};
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *) {
+ return ENCODE_NOT_SUPPORTED;
+ }
+ //virtual Encode_Status updateFrameInfo(EncodeTask* task);
+
+ // Local Methods
+private:
+ Encode_Status renderSequenceParams(EncodeTask *task);
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderSliceParams(EncodeTask *task);
+};
+
+#endif /* __VIDEO_ENCODER_H263_H__ */
+
diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp
new file mode 100644
index 0000000..e4ea968
--- /dev/null
+++ b/videoencoder/VideoEncoderHost.cpp
@@ -0,0 +1,76 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoEncoderMP4.h"
+#include "VideoEncoderH263.h"
+#include "VideoEncoderAVC.h"
+#include "VideoEncoderVP8.h"
+#ifndef IMG_GFX
+#include "PVSoftMPEG4Encoder.h"
+#endif
+#include "VideoEncoderHost.h"
+#include <string.h>
+#include <cutils/properties.h>
+#include <wrs_omxil_core/log.h>
+
+int32_t gLogLevel = 0;
+
+IVideoEncoder *createVideoEncoder(const char *mimeType) {
+
+ char logLevelProp[PROPERTY_VALUE_MAX];
+
+ if (property_get("libmix.debug", logLevelProp, NULL)) {
+ gLogLevel = atoi(logLevelProp);
+ LOGD("Debug level is %d", gLogLevel);
+ }
+
+ if (mimeType == NULL) {
+ LOGE("NULL mime type");
+ return NULL;
+ }
+
+ if (strcasecmp(mimeType, "video/avc") == 0 ||
+ strcasecmp(mimeType, "video/h264") == 0) {
+ VideoEncoderAVC *p = new VideoEncoderAVC();
+ return (IVideoEncoder *)p;
+ } else if (strcasecmp(mimeType, "video/h263") == 0) {
+#ifdef IMG_GFX
+ VideoEncoderH263 *p = new VideoEncoderH263();
+#else
+ PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.h263.encoder");
+#endif
+ return (IVideoEncoder *)p;
+ } else if (strcasecmp(mimeType, "video/mpeg4") == 0 ||
+ strcasecmp(mimeType, "video/mp4v-es") == 0) {
+#ifdef IMG_GFX
+ VideoEncoderMP4 *p = new VideoEncoderMP4();
+#else
+ PVSoftMPEG4Encoder *p = new PVSoftMPEG4Encoder("OMX.google.mpeg4.encoder");
+#endif
+ return (IVideoEncoder *)p;
+ } else if (strcasecmp(mimeType, "video/x-vnd.on2.vp8") == 0) {
+ VideoEncoderVP8 *p = new VideoEncoderVP8();
+ return (IVideoEncoder *)p;
+ } else {
+ LOGE ("Unknown mime type: %s", mimeType);
+ }
+ return NULL;
+}
+
+void releaseVideoEncoder(IVideoEncoder *p) {
+ if (p) delete p;
+}
+
diff --git a/videoencoder/VideoEncoderHost.h b/videoencoder/VideoEncoderHost.h
new file mode 100644
index 0000000..ad5df6e
--- /dev/null
+++ b/videoencoder/VideoEncoderHost.h
@@ -0,0 +1,25 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_ENCODER_HOST_H_
+#define VIDEO_ENCODER_HOST_H_
+
+#include "VideoEncoderInterface.h"
+
+IVideoEncoder *createVideoEncoder(const char *mimeType);
+void releaseVideoEncoder(IVideoEncoder *p);
+
+#endif /* VIDEO_ENCODER_HOST_H_ */
\ No newline at end of file
diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h
new file mode 100644
index 0000000..00604ce
--- /dev/null
+++ b/videoencoder/VideoEncoderInterface.h
@@ -0,0 +1,37 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef VIDEO_ENCODER_INTERFACE_H_
+#define VIDEO_ENCODER_INTERFACE_H_
+
+#include "VideoEncoderDef.h"
+
+class IVideoEncoder {
+public:
+ virtual ~IVideoEncoder() {};
+ virtual Encode_Status start(void) = 0;
+ virtual Encode_Status stop(void) = 0;
+ virtual void flush(void) = 0;
+ virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout = FUNC_BLOCK) = 0;
+ virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout = FUNC_BLOCK) = 0;
+ virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0;
+ virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0;
+ virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0;
+};
+
+#endif /* VIDEO_ENCODER_INTERFACE_H_ */
diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h
new file mode 100644
index 0000000..c38eb94
--- /dev/null
+++ b/videoencoder/VideoEncoderLog.h
@@ -0,0 +1,61 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_LOG_H__
+#define __VIDEO_ENCODER_LOG_H__
+
+#define LOG_TAG "VideoEncoder"
+
+#include <wrs_omxil_core/log.h>
+
+#define LOG_V ALOGV
+#define LOG_D ALOGD
+#define LOG_I ALOGI
+#define LOG_W ALOGW
+#define LOG_E ALOGE
+
+extern int32_t gLogLevel;
+#define CHECK_VA_STATUS_RETURN(FUNC)\
+ if (vaStatus != VA_STATUS_SUCCESS) {\
+ LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\
+ return ENCODE_DRIVER_FAIL;\
+ }
+
+#define CHECK_VA_STATUS_GOTO_CLEANUP(FUNC)\
+ if (vaStatus != VA_STATUS_SUCCESS) {\
+ LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\
+ ret = ENCODE_DRIVER_FAIL; \
+ goto CLEAN_UP;\
+ }
+
+#define CHECK_ENCODE_STATUS_RETURN(FUNC)\
+ if (ret != ENCODE_SUCCESS) { \
+ LOG_E(FUNC"Failed. ret = 0x%08x\n", ret); \
+ return ret; \
+ }
+
+#define CHECK_ENCODE_STATUS_CLEANUP(FUNC)\
+ if (ret != ENCODE_SUCCESS) { \
+ LOG_E(FUNC"Failed, ret = 0x%08x\n", ret); \
+ goto CLEAN_UP;\
+ }
+
+#define CHECK_NULL_RETURN_IFFAIL(POINTER)\
+ if (POINTER == NULL) { \
+ LOG_E("Invalid pointer\n"); \
+ return ENCODE_NULL_PTR;\
+ }
+#endif /* __VIDEO_ENCODER_LOG_H__ */
diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp
new file mode 100644
index 0000000..b414c1d
--- /dev/null
+++ b/videoencoder/VideoEncoderMP4.cpp
@@ -0,0 +1,281 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "VideoEncoderLog.h"
+#include "VideoEncoderMP4.h"
+#include <va/va_tpi.h>
+
+VideoEncoderMP4::VideoEncoderMP4()
+ :mProfileLevelIndication(3)
+ ,mFixedVOPTimeIncrement(0) {
+ mComParams.profile = (VAProfile)PROFILE_MPEG4SIMPLE;
+ mAutoReferenceSurfaceNum = 2;
+}
+
+Encode_Status VideoEncoderMP4::getHeaderPos(
+ uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) {
+
+ uint32_t bytesLeft = bufSize;
+
+ *headerSize = 0;
+ CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+ if (bufSize < 4) {
+ //bufSize shoule not < 4
+ LOG_E("Buffer size too small\n");
+ return ENCODE_FAIL;
+ }
+
+ while (bytesLeft > 4 &&
+ (memcmp("\x00\x00\x01\xB6", &inBuffer[bufSize - bytesLeft], 4) &&
+ memcmp("\x00\x00\x01\xB3", &inBuffer[bufSize - bytesLeft], 4))) {
+ --bytesLeft;
+ }
+
+ if (bytesLeft <= 4) {
+ LOG_E("NO header found\n");
+ *headerSize = 0; //
+ } else {
+ *headerSize = bufSize - bytesLeft;
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::outputConfigData(
+ VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t headerSize = 0;
+
+ ret = getHeaderPos((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+ mCurSegment->size - mOffsetInSeg, &headerSize);
+ CHECK_ENCODE_STATUS_RETURN("getHeaderPos");
+ if (headerSize == 0) {
+ outBuffer->dataSize = 0;
+ mCurSegment = NULL;
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (headerSize <= outBuffer->bufferSize) {
+ memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize);
+ mTotalSizeCopied += headerSize;
+ mOffsetInSeg += headerSize;
+ outBuffer->dataSize = headerSize;
+ outBuffer->remainingSize = 0;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+ } else {
+ // we need a big enough buffer, otherwise we won't output anything
+ outBuffer->dataSize = 0;
+ outBuffer->remainingSize = headerSize;
+ outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+ LOG_E("Buffer size too small\n");
+ return ENCODE_BUFFER_TOO_SMALL;
+ }
+
+ return ret;
+}
+
+Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ LOG_V("Begin\n");
+ CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+ switch (outBuffer->format) {
+ case OUTPUT_CODEC_DATA: {
+ // Output the codec config data
+ ret = outputConfigData(outBuffer);
+ CHECK_ENCODE_STATUS_CLEANUP("outputCodecData");
+ break;
+ }
+ default:
+ LOG_E("Invalid buffer mode for MPEG-4:2\n");
+ ret = ENCODE_FAIL;
+ break;
+ }
+
+ LOG_I("out size is = %d\n", outBuffer->dataSize);
+
+
+CLEAN_UP:
+
+ LOG_V("End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = VAEncSequenceParameterBufferMPEG4();
+
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ LOG_V( "Begin\n\n");
+ // set up the sequence params for HW
+ mp4SequenceParams.profile_and_level_indication = mProfileLevelIndication;
+ mp4SequenceParams.video_object_layer_width= mComParams.resolution.width;
+ mp4SequenceParams.video_object_layer_height= mComParams.resolution.height;
+ mp4SequenceParams.vop_time_increment_resolution =
+ (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+ mp4SequenceParams.fixed_vop_time_increment= mFixedVOPTimeIncrement;
+ mp4SequenceParams.bits_per_second= mComParams.rcParams.bitRate;
+ mp4SequenceParams.frame_rate =
+ (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+ mp4SequenceParams.initial_qp = mComParams.rcParams.initQP;
+ mp4SequenceParams.min_qp = mComParams.rcParams.minQP;
+ mp4SequenceParams.intra_period = mComParams.intraPeriod;
+ //mpeg4_seq_param.fixed_vop_rate = 30;
+
+ LOG_V("===mpeg4 sequence params===\n");
+ LOG_I("profile_and_level_indication = %d\n", (uint32_t)mp4SequenceParams.profile_and_level_indication);
+ LOG_I("intra_period = %d\n", mp4SequenceParams.intra_period);
+ LOG_I("video_object_layer_width = %d\n", mp4SequenceParams.video_object_layer_width);
+ LOG_I("video_object_layer_height = %d\n", mp4SequenceParams.video_object_layer_height);
+ LOG_I("vop_time_increment_resolution = %d\n", mp4SequenceParams.vop_time_increment_resolution);
+ LOG_I("fixed_vop_rate = %d\n", mp4SequenceParams.fixed_vop_rate);
+ LOG_I("fixed_vop_time_increment = %d\n", mp4SequenceParams.fixed_vop_time_increment);
+ LOG_I("bitrate = %d\n", mp4SequenceParams.bits_per_second);
+ LOG_I("frame_rate = %d\n", mp4SequenceParams.frame_rate);
+ LOG_I("initial_qp = %d\n", mp4SequenceParams.initial_qp);
+ LOG_I("min_qp = %d\n", mp4SequenceParams.min_qp);
+ LOG_I("intra_period = %d\n\n", mp4SequenceParams.intra_period);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(mp4SequenceParams),
+ 1, &mp4SequenceParams,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = VAEncPictureParameterBufferMPEG4();
+ LOG_V( "Begin\n\n");
+ // set picture params for HW
+ if(mAutoReference == false){
+ mpeg4_pic_param.reference_picture = task->ref_surface;
+ mpeg4_pic_param.reconstructed_picture = task->rec_surface;
+ }else {
+ mpeg4_pic_param.reference_picture = mAutoRefSurfaces[0];
+ mpeg4_pic_param.reconstructed_picture = mAutoRefSurfaces[1];
+ }
+
+ mpeg4_pic_param.coded_buf = task->coded_buffer;
+ mpeg4_pic_param.picture_width = mComParams.resolution.width;
+ mpeg4_pic_param.picture_height = mComParams.resolution.height;
+ mpeg4_pic_param.vop_time_increment= mFrameNum;
+ mpeg4_pic_param.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+ LOG_V("======mpeg4 picture params======\n");
+ LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture);
+ LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture);
+ LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf);
+// LOG_I("coded_buf_index = %d\n", mCodedBufIndex);
+ LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width);
+ LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height);
+ LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment);
+ LOG_I("picture_type = %d\n\n", mpeg4_pic_param.picture_type);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(mpeg4_pic_param),
+ 1,&mpeg4_pic_param,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ uint32_t sliceHeight;
+ uint32_t sliceHeightInMB;
+
+ VAEncSliceParameterBuffer sliceParams;
+
+ LOG_V( "Begin\n\n");
+
+ sliceHeight = mComParams.resolution.height;
+ sliceHeight += 15;
+ sliceHeight &= (~15);
+ sliceHeightInMB = sliceHeight / 16;
+
+ sliceParams.start_row_number = 0;
+ sliceParams.slice_height = sliceHeightInMB;
+ sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
+ sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+ LOG_V("======mpeg4 slice params======\n");
+ LOG_I( "start_row_number = %d\n", (int) sliceParams.start_row_number);
+ LOG_I( "sliceHeightInMB = %d\n", (int) sliceParams.slice_height);
+ LOG_I( "is_intra = %d\n", (int) sliceParams.slice_flags.bits.is_intra);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSliceParameterBufferType,
+ sizeof(VAEncSliceParameterBuffer),
+ 1, &sliceParams,
+ &mSliceParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "end\n");
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::sendEncodeCommand(EncodeTask *task) {
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ if (mFrameNum == 0) {
+ ret = renderSequenceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ ret = renderSliceParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ LOG_V( "End\n");
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h
new file mode 100644
index 0000000..2691aab
--- /dev/null
+++ b/videoencoder/VideoEncoderMP4.h
@@ -0,0 +1,61 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER__MPEG4_H__
+#define __VIDEO_ENCODER__MPEG4_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+ * MPEG-4:2 Encoder class, derived from VideoEncoderBase
+ */
+class VideoEncoderMP4: public VideoEncoderBase {
+public:
+ VideoEncoderMP4();
+ virtual ~VideoEncoderMP4() {};
+
+// Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *) {
+ return ENCODE_SUCCESS;
+ }
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer);
+ //virtual Encode_Status updateFrameInfo(EncodeTask* task);
+
+ // Local Methods
+private:
+ Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize);
+ Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer);
+ Encode_Status renderSequenceParams(EncodeTask *task);
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderSliceParams(EncodeTask *task);
+
+ unsigned char mProfileLevelIndication;
+ uint32_t mFixedVOPTimeIncrement;
+};
+
+#endif /* __VIDEO_ENCODER__MPEG4_H__ */
diff --git a/videoencoder/VideoEncoderUtils.cpp b/videoencoder/VideoEncoderUtils.cpp
new file mode 100644
index 0000000..8b55bb0
--- /dev/null
+++ b/videoencoder/VideoEncoderUtils.cpp
@@ -0,0 +1,808 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "VideoEncoderLog.h"
+#include "VideoEncoderUtils.h"
+#include <va/va_android.h>
+#include <va/va_drmcommon.h>
+
+#ifdef IMG_GFX
+#include <hal/hal_public.h>
+#include <hardware/gralloc.h>
+
+//#define GFX_DUMP
+
+#define OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar 0x7FA00E00
+
+static hw_module_t const *gModule = NULL;
+static gralloc_module_t *gAllocMod = NULL; /* get by force hw_module_t */
+static alloc_device_t *gAllocDev = NULL;
+
+static int gfx_init(void) {
+
+ int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule);
+ if (err) {
+ LOG_E("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ } else
+ LOG_V("hw_get_module returned\n");
+ gAllocMod = (gralloc_module_t *)gModule;
+
+ return 0;
+}
+
+static int gfx_alloc(uint32_t w, uint32_t h, int format,
+ int usage, buffer_handle_t* handle, int32_t* stride) {
+
+ int err;
+
+ if (!gAllocDev) {
+ if (!gModule) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gralloc_open(gModule, &gAllocDev);
+ if (err) {
+ LOG_E("FATAL: gralloc open failed\n");
+ return -1;
+ }
+ }
+
+ err = gAllocDev->alloc(gAllocDev, w, h, format, usage, handle, stride);
+ if (err) {
+ LOG_E("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n",
+ w, h, format, usage, err, strerror(-err));
+ }
+
+ return err;
+}
+
+static int gfx_free(buffer_handle_t handle) {
+
+ int err;
+
+ if (!gAllocDev) {
+ if (!gModule) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gralloc_open(gModule, &gAllocDev);
+ if (err) {
+ LOG_E("FATAL: gralloc open failed\n");
+ return -1;
+ }
+ }
+
+ err = gAllocDev->free(gAllocDev, handle);
+ if (err) {
+ LOG_E("free(...) failed %d (%s)\n", err, strerror(-err));
+ }
+
+ return err;
+}
+
+static int gfx_lock(buffer_handle_t handle, int usage,
+ int left, int top, int width, int height, void** vaddr) {
+
+ int err;
+
+ if (!gAllocMod) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gAllocMod->lock(gAllocMod, handle, usage,
+ left, top, width, height, vaddr);
+ LOG_V("gfx_lock: handle is %x, usage is %x, vaddr is %x.\n", (unsigned int)handle, usage, (unsigned int)*vaddr);
+
+ if (err){
+ LOG_E("lock(...) failed %d (%s).\n", err, strerror(-err));
+ return -1;
+ } else
+ LOG_V("lock returned with address %p\n", *vaddr);
+
+ return err;
+}
+
+static int gfx_unlock(buffer_handle_t handle) {
+
+ int err;
+
+ if (!gAllocMod) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ err = gAllocMod->unlock(gAllocMod, handle);
+ if (err) {
+ LOG_E("unlock(...) failed %d (%s)", err, strerror(-err));
+ return -1;
+ } else
+ LOG_V("unlock returned\n");
+
+ return err;
+}
+
+static int gfx_Blit(buffer_handle_t src, buffer_handle_t dest,
+ int w, int h, int , int )
+{
+ int err;
+
+ if (!gAllocMod) {
+ if (gfx_init()) {
+ LOG_E("can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ return -1;
+ }
+ }
+
+ IMG_gralloc_module_public_t* GrallocMod = (IMG_gralloc_module_public_t*)gModule;
+
+#ifdef MRFLD_GFX
+ err = GrallocMod->Blit(GrallocMod, src, dest, w, h, 0, 0, 0, 0);
+#else
+ err = GrallocMod->Blit2(GrallocMod, src, dest, w, h, 0, 0);
+#endif
+
+ if (err) {
+ LOG_E("Blit(...) failed %d (%s)", err, strerror(-err));
+ return -1;
+ } else
+ LOG_V("Blit returned\n");
+
+ return err;
+}
+
+Encode_Status GetGfxBufferInfo(intptr_t handle, ValueInfo& vinfo){
+
+ /* only support OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar
+ HAL_PIXEL_FORMAT_NV12
+ HAL_PIXEL_FORMAT_BGRA_8888
+ HAL_PIXEL_FORMAT_RGBA_8888
+ HAL_PIXEL_FORMAT_RGBX_8888
+ HAL_PIXEL_FORMAT_BGRX_8888 */
+ IMG_native_handle_t* h = (IMG_native_handle_t*) handle;
+
+ vinfo.width = h->iWidth;
+ vinfo.height = h->iHeight;
+ vinfo.lumaStride = h->iWidth;
+
+ LOG_I("GetGfxBufferInfo: gfx iWidth=%d, iHeight=%d, iFormat=%x in handle structure\n", h->iWidth, h->iHeight, h->iFormat);
+
+ if (h->iFormat == HAL_PIXEL_FORMAT_NV12) {
+ #ifdef MRFLD_GFX
+ if((h->usage & GRALLOC_USAGE_HW_CAMERA_READ) || (h->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) )
+ vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned
+ else
+ vinfo.lumaStride = (h->iWidth + 31) & ~31; //32 aligned
+ #else //on CTP
+ if (h->iWidth > 512)
+ vinfo.lumaStride = (h->iWidth + 63) & ~63; //64 aligned
+ else
+ vinfo.lumaStride = 512;
+ #endif
+ } else if ((h->iFormat == HAL_PIXEL_FORMAT_BGRA_8888)||
+ (h->iFormat == HAL_PIXEL_FORMAT_RGBA_8888)||
+ (h->iFormat == HAL_PIXEL_FORMAT_RGBX_8888)||
+ (h->iFormat == HAL_PIXEL_FORMAT_BGRX_8888)) {
+ vinfo.lumaStride = (h->iWidth + 31) & ~31;
+ } else if (h->iFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar) {
+ //nothing to do
+ } else
+ return ENCODE_NOT_SUPPORTED;
+
+ vinfo.format = h->iFormat;
+
+ LOG_I(" Actual Width=%d, Height=%d, Stride=%d\n\n", vinfo.width, vinfo.height, vinfo.lumaStride);
+ return ENCODE_SUCCESS;
+}
+
+#ifdef GFX_DUMP
+void DumpGfx(intptr_t handle, char* filename) {
+ ValueInfo vinfo;
+ void* vaddr[3];
+ FILE* fp;
+ int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN;
+
+ GetGfxBufferInfo(handle, vinfo);
+ if (gfx_lock((buffer_handle_t)handle, usage, 0, 0, vinfo.width, vinfo.height, &vaddr[0]) != 0)
+ return ENCODE_DRIVER_FAIL;
+ fp = fopen(filename, "wb");
+ fwrite(vaddr[0], 1, vinfo.lumaStride * vinfo.height * 4, fp);
+ fclose(fp);
+ LOG_I("dump %d bytes data to %s\n", vinfo.lumaStride * vinfo.height * 4, filename);
+ gfx_unlock((buffer_handle_t)handle);
+
+ return;
+}
+#endif
+
+#endif
+
+extern "C" {
+VAStatus vaLockSurface(VADisplay dpy,
+ VASurfaceID surface,
+ unsigned int *fourcc,
+ unsigned int *luma_stride,
+ unsigned int *chroma_u_stride,
+ unsigned int *chroma_v_stride,
+ unsigned int *luma_offset,
+ unsigned int *chroma_u_offset,
+ unsigned int *chroma_v_offset,
+ unsigned int *buffer_name,
+ void **buffer
+);
+
+VAStatus vaUnlockSurface(VADisplay dpy,
+ VASurfaceID surface
+);
+}
+
+VASurfaceMap::VASurfaceMap(VADisplay display, int hwcap) {
+
+ mVADisplay = display;
+ mSupportedSurfaceMemType = hwcap;
+ mValue = 0;
+ mVASurface = VA_INVALID_SURFACE;
+ mTracked = false;
+ mAction = 0;
+ memset(&mVinfo, 0, sizeof(ValueInfo));
+#ifdef IMG_GFX
+ mGfxHandle = NULL;
+#endif
+}
+
+VASurfaceMap::~VASurfaceMap() {
+
+ if (!mTracked && (mVASurface != VA_INVALID_SURFACE))
+ vaDestroySurfaces(mVADisplay, &mVASurface, 1);
+
+#ifdef IMG_GFX
+ if (mGfxHandle)
+ gfx_free(mGfxHandle);
+#endif
+}
+
+Encode_Status VASurfaceMap::doMapping() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ if (mVASurface == VA_INVALID_SURFACE) {
+
+ int width = mVASurfaceWidth = mVinfo.width;
+ int height = mVASurfaceHeight = mVinfo.height;
+ int stride = mVASurfaceStride = mVinfo.lumaStride;
+
+ if (mAction & MAP_ACTION_COLORCONVERT) {
+
+ //only support gfx buffer
+ if (mVinfo.mode != MEM_MODE_GFXHANDLE)
+ return ENCODE_NOT_SUPPORTED;
+
+ #ifdef IMG_GFX //only enable on IMG chip
+
+ //do not trust valueinfo for gfx case, directly get from structure
+ ValueInfo tmp;
+
+ ret = GetGfxBufferInfo(mValue, tmp);
+ CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo");
+ width = tmp.width;
+ height = tmp.height;
+ stride = tmp.lumaStride;
+
+ if (HAL_PIXEL_FORMAT_NV12 == tmp.format || OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar == tmp.format)
+ mAction &= ~MAP_ACTION_COLORCONVERT;
+ else {
+ //allocate new gfx buffer if format is not NV12
+ int usage = GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
+
+ //use same size with original and HAL_PIXEL_FORMAT_NV12 format
+ if (gfx_alloc(width, height, HAL_PIXEL_FORMAT_NV12, usage, &mGfxHandle, &stride) != 0)
+ return ENCODE_DRIVER_FAIL;
+
+ LOG_I("Create an new gfx buffer handle 0x%p for color convert, width=%d, height=%d, stride=%d\n",
+ mGfxHandle, width, height, stride);
+ }
+
+ #else
+ return ENCODE_NOT_SUPPORTED;
+ #endif
+ }
+
+ if (mAction & MAP_ACTION_ALIGN64 && stride % 64 != 0) {
+ //check if stride is not 64 aligned, must allocate new 64 aligned vasurface
+ stride = (stride + 63 ) & ~63;
+ mAction |= MAP_ACTION_COPY;
+ }
+
+ if(mAction & MAP_ACTION_ALIGN64 && width <= 320 && height <= 240) {
+ mAction |= MAP_ACTION_COPY;
+ }
+
+ if (mAction & MAP_ACTION_COPY) { //must allocate new vasurface(EXternalMemoryNULL, uncached)
+ //allocate new vasurface
+ mVASurface = CreateNewVASurface(mVADisplay, stride, height);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_DRIVER_FAIL;
+ mVASurfaceWidth = mVASurfaceStride = stride;
+ mVASurfaceHeight = height;
+ LOGI("create new vaSurface for MAP_ACTION_COPY\n");
+ } else {
+ #ifdef IMG_GFX
+ if (mGfxHandle != NULL) {
+ //map new gfx handle to vasurface
+ ret = MappingGfxHandle((intptr_t)mGfxHandle);
+ CHECK_ENCODE_STATUS_RETURN("MappingGfxHandle");
+ LOGI("map new allocated gfx handle to vaSurface\n");
+ } else
+ #endif
+ {
+ //map original value to vasurface
+ ret = MappingToVASurface();
+ CHECK_ENCODE_STATUS_RETURN("MappingToVASurface");
+ }
+ }
+ }
+
+ if (mAction & MAP_ACTION_COLORCONVERT) {
+ ret = doActionColConv();
+ CHECK_ENCODE_STATUS_RETURN("doActionColConv");
+ }
+
+ if (mAction & MAP_ACTION_COPY) {
+ //keep src color format is NV12, then do copy
+ ret = doActionCopy();
+ CHECK_ENCODE_STATUS_RETURN("doActionCopy");
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingToVASurface() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ if (mVASurface != VA_INVALID_SURFACE) {
+ LOG_I("VASurface is already set before, nothing to do here\n");
+ return ENCODE_SUCCESS;
+ }
+ LOG_I("MappingToVASurface mode=%d, value=%p\n", mVinfo.mode, (void*)mValue);
+
+ const char *mode = NULL;
+ switch (mVinfo.mode) {
+ case MEM_MODE_SURFACE:
+ mode = "SURFACE";
+ ret = MappingSurfaceID(mValue);
+ break;
+ case MEM_MODE_GFXHANDLE:
+ mode = "GFXHANDLE";
+ ret = MappingGfxHandle(mValue);
+ break;
+ case MEM_MODE_KBUFHANDLE:
+ mode = "KBUFHANDLE";
+ ret = MappingKbufHandle(mValue);
+ break;
+ case MEM_MODE_MALLOC:
+ case MEM_MODE_NONECACHE_USRPTR:
+ mode = "MALLOC or NONCACHE_USRPTR";
+ ret = MappingMallocPTR(mValue);
+ break;
+ case MEM_MODE_ION:
+ case MEM_MODE_V4L2:
+ case MEM_MODE_USRPTR:
+ case MEM_MODE_CI:
+ default:
+ LOG_I("UnSupported memory mode 0x%08x", mVinfo.mode);
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ LOG_I("%s: Format=%x, lumaStride=%d, width=%d, height=%d\n", mode, mVinfo.format, mVinfo.lumaStride, mVinfo.width, mVinfo.height);
+ LOG_I("vaSurface 0x%08x is created for value = 0x%p\n", mVASurface, (void*)mValue);
+
+ return ret;
+}
+
+Encode_Status VASurfaceMap::MappingSurfaceID(intptr_t value) {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VASurfaceID surface;
+
+ //try to get kbufhandle from SurfaceID
+ uint32_t fourCC = 0;
+ uint32_t lumaStride = 0;
+ uint32_t chromaUStride = 0;
+ uint32_t chromaVStride = 0;
+ uint32_t lumaOffset = 0;
+ uint32_t chromaUOffset = 0;
+ uint32_t chromaVOffset = 0;
+ uint32_t kBufHandle = 0;
+
+ vaStatus = vaLockSurface(
+ (VADisplay)mVinfo.handle, (VASurfaceID)value,
+ &fourCC, &lumaStride, &chromaUStride, &chromaVStride,
+ &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL);
+
+ CHECK_VA_STATUS_RETURN("vaLockSurface");
+ LOG_I("Surface incoming = 0x%p\n", (void*)value);
+ LOG_I("lumaStride = %d, chromaUStride = %d, chromaVStride=%d\n", lumaStride, chromaUStride, chromaVStride);
+ LOG_I("lumaOffset = %d, chromaUOffset = %d, chromaVOffset = %d\n", lumaOffset, chromaUOffset, chromaVOffset);
+ LOG_I("kBufHandle = 0x%08x, fourCC = %d\n", kBufHandle, fourCC);
+
+ vaStatus = vaUnlockSurface((VADisplay)mVinfo.handle, (VASurfaceID)value);
+ CHECK_VA_STATUS_RETURN("vaUnlockSurface");
+
+ mVinfo.mode = MEM_MODE_KBUFHANDLE;
+ mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5;
+
+ mVASurface = CreateSurfaceFromExternalBuf(kBufHandle, mVinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingGfxHandle(intptr_t value) {
+
+ LOG_I("MappingGfxHandle %p......\n", (void*)value);
+ LOG_I("format = 0x%08x, lumaStride = %d in ValueInfo\n", mVinfo.format, mVinfo.lumaStride);
+
+ //default value for all HW platforms, maybe not accurate
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+
+#ifdef IMG_GFX
+ Encode_Status ret;
+ ValueInfo tmp;
+
+ ret = GetGfxBufferInfo(value, tmp);
+ CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo");
+ mVASurfaceWidth = tmp.width;
+ mVASurfaceHeight = tmp.height;
+ mVASurfaceStride = tmp.lumaStride;
+#endif
+
+ LOG_I("Mapping vasurface Width=%d, Height=%d, Stride=%d\n", mVASurfaceWidth, mVASurfaceHeight, mVASurfaceStride);
+
+ ValueInfo vinfo;
+ memset(&vinfo, 0, sizeof(ValueInfo));
+ vinfo.mode = MEM_MODE_GFXHANDLE;
+ vinfo.width = mVASurfaceWidth;
+ vinfo.height = mVASurfaceHeight;
+ vinfo.lumaStride = mVASurfaceStride;
+ mVASurface = CreateSurfaceFromExternalBuf(value, vinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingKbufHandle(intptr_t value) {
+
+ LOG_I("MappingKbufHandle value=%p\n", (void*)value);
+
+ mVinfo.size = mVinfo.lumaStride * mVinfo.height * 1.5;
+ mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::MappingMallocPTR(intptr_t value) {
+
+ mVASurface = CreateSurfaceFromExternalBuf(value, mVinfo);
+ if (mVASurface == VA_INVALID_SURFACE)
+ return ENCODE_INVALID_SURFACE;
+
+ mVASurfaceWidth = mVinfo.width;
+ mVASurfaceHeight = mVinfo.height;
+ mVASurfaceStride = mVinfo.lumaStride;
+
+ return ENCODE_SUCCESS;
+}
+
+//always copy with same color format NV12
+Encode_Status VASurfaceMap::doActionCopy() {
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+ uint32_t width = 0, height = 0, stride = 0;
+ uint8_t *pSrcBuffer, *pDestBuffer;
+ intptr_t handle = 0;
+
+ LOG_I("Copying Src Buffer data to VASurface\n");
+
+ if (mVinfo.mode != MEM_MODE_MALLOC && mVinfo.mode != MEM_MODE_GFXHANDLE) {
+ LOG_E("Not support copy in mode %d", mVinfo.mode);
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ LOG_I("Src Buffer information\n");
+ LOG_I("Mode = %d, width = %d, stride = %d, height = %d\n",
+ mVinfo.mode, mVinfo.width, mVinfo.lumaStride, mVinfo.height);
+
+ uint32_t srcY_offset, srcUV_offset;
+ uint32_t srcY_pitch, srcUV_pitch;
+
+ if (mVinfo.mode == MEM_MODE_MALLOC) {
+ width = mVinfo.width;
+ height = mVinfo.height;
+ stride = mVinfo.lumaStride;
+ pSrcBuffer = (uint8_t*) mValue;
+ srcY_offset = 0;
+ srcUV_offset = stride * height;
+ srcY_pitch = stride;
+ srcUV_pitch = stride;
+ } else {
+
+ #ifdef IMG_GFX //only enable on IMG chips
+ int usage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN;
+
+ //do not trust valueinfo, directly get from structure
+ Encode_Status ret;
+ ValueInfo tmp;
+
+ if (mGfxHandle)
+ handle = (intptr_t) mGfxHandle;
+ else
+ handle = mValue;
+
+ ret = GetGfxBufferInfo(handle, tmp);
+ CHECK_ENCODE_STATUS_RETURN("GetGfxBufferInfo");
+ width = tmp.width;
+ height = tmp.height;
+ stride = tmp.lumaStride;
+
+ //only support HAL_PIXEL_FORMAT_NV12 & OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar
+ if (HAL_PIXEL_FORMAT_NV12 != tmp.format && OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar != tmp.format) {
+ LOG_E("Not support gfx buffer format %x", tmp.format);
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ srcY_offset = 0;
+ srcUV_offset = stride * height;
+ srcY_pitch = stride;
+ srcUV_pitch = stride;
+
+ //lock gfx handle with buffer real size
+ void* vaddr[3];
+ if (gfx_lock((buffer_handle_t) handle, usage, 0, 0, width, height, &vaddr[0]) != 0)
+ return ENCODE_DRIVER_FAIL;
+ pSrcBuffer = (uint8_t*)vaddr[0];
+ #else
+
+ return ENCODE_NOT_SUPPORTED;
+ #endif
+ }
+
+
+ VAImage destImage;
+ vaStatus = vaDeriveImage(mVADisplay, mVASurface, &destImage);
+ CHECK_VA_STATUS_RETURN("vaDeriveImage");
+ vaStatus = vaMapBuffer(mVADisplay, destImage.buf, (void **)&pDestBuffer);
+ CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+ LOG_I("\nDest VASurface information\n");
+ LOG_I("pitches[0] = %d\n", destImage.pitches[0]);
+ LOG_I("pitches[1] = %d\n", destImage.pitches[1]);
+ LOG_I("offsets[0] = %d\n", destImage.offsets[0]);
+ LOG_I("offsets[1] = %d\n", destImage.offsets[1]);
+ LOG_I("num_planes = %d\n", destImage.num_planes);
+ LOG_I("width = %d\n", destImage.width);
+ LOG_I("height = %d\n", destImage.height);
+
+ if (width > destImage.width || height > destImage.height) {
+ LOG_E("src buffer is bigger than destination buffer\n");
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ uint8_t *srcY, *dstY;
+ uint8_t *srcU, *srcV;
+ uint8_t *srcUV, *dstUV;
+
+ srcY = pSrcBuffer + srcY_offset;
+ dstY = pDestBuffer + destImage.offsets[0];
+ srcUV = pSrcBuffer + srcUV_offset;
+ dstUV = pDestBuffer + destImage.offsets[1];
+
+ for (uint32_t i = 0; i < height; i++) {
+ memcpy(dstY, srcY, width);
+ srcY += srcY_pitch;
+ dstY += destImage.pitches[0];
+ }
+
+ for (uint32_t i = 0; i < height / 2; i++) {
+ memcpy(dstUV, srcUV, width);
+ srcUV += srcUV_pitch;
+ dstUV += destImage.pitches[1];
+ }
+
+ vaStatus = vaUnmapBuffer(mVADisplay, destImage.buf);
+ CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+ vaStatus = vaDestroyImage(mVADisplay, destImage.image_id);
+ CHECK_VA_STATUS_RETURN("vaDestroyImage");
+
+#ifdef IMG_GFX
+ if (mVinfo.mode == MEM_MODE_GFXHANDLE) {
+ //unlock gfx handle
+ gfx_unlock((buffer_handle_t) handle);
+ }
+#endif
+ LOG_I("Copying Src Buffer data to VASurface Complete\n");
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VASurfaceMap::doActionColConv() {
+
+#ifdef IMG_GFX
+ if (mGfxHandle == NULL) {
+ LOG_E("something wrong, why new gfxhandle is not allocated? \n");
+ return ENCODE_FAIL;
+ }
+
+ LOG_I("doActionColConv gfx_Blit width=%d, height=%d\n", mVinfo.width, mVinfo.height);
+ if (gfx_Blit((buffer_handle_t)mValue, mGfxHandle,
+ mVinfo.width, mVinfo.height, 0, 0) != 0)
+ return ENCODE_DRIVER_FAIL;
+
+ #ifdef GFX_DUMP
+ LOG_I("dumpping gfx data.....\n");
+ DumpGfx(mValue, "/data/dump.rgb");
+ DumpGfx((intptr_t)mGfxHandle, "/data/dump.yuv");
+ #endif
+ return ENCODE_SUCCESS;
+
+#else
+ return ENCODE_NOT_SUPPORTED;
+#endif
+}
+
+VASurfaceID VASurfaceMap::CreateSurfaceFromExternalBuf(intptr_t value, ValueInfo& vinfo) {
+
+ VAStatus vaStatus;
+ VASurfaceAttribExternalBuffers extbuf;
+ VASurfaceAttrib attribs[2];
+ VASurfaceID surface = VA_INVALID_SURFACE;
+ int type;
+ unsigned long data = value;
+
+ extbuf.pixel_format = VA_FOURCC_NV12;
+ extbuf.width = vinfo.width;
+ extbuf.height = vinfo.height;
+ extbuf.data_size = vinfo.size;
+ if (extbuf.data_size == 0)
+ extbuf.data_size = vinfo.lumaStride * vinfo.height * 1.5;
+ extbuf.num_buffers = 1;
+ extbuf.num_planes = 3;
+ extbuf.pitches[0] = vinfo.lumaStride;
+ extbuf.pitches[1] = vinfo.lumaStride;
+ extbuf.pitches[2] = vinfo.lumaStride;
+ extbuf.pitches[3] = 0;
+ extbuf.offsets[0] = 0;
+ extbuf.offsets[1] = vinfo.lumaStride * vinfo.height;
+ extbuf.offsets[2] = extbuf.offsets[1];
+ extbuf.offsets[3] = 0;
+ extbuf.buffers = &data;
+ extbuf.flags = 0;
+ extbuf.private_data = NULL;
+
+ switch(vinfo.mode) {
+ case MEM_MODE_GFXHANDLE:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+ break;
+ case MEM_MODE_KBUFHANDLE:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM;
+ break;
+ case MEM_MODE_MALLOC:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR;
+ break;
+ case MEM_MODE_NONECACHE_USRPTR:
+ type = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR;
+ extbuf.flags |= VA_SURFACE_EXTBUF_DESC_UNCACHED;
+ break;
+ case MEM_MODE_SURFACE:
+ case MEM_MODE_ION:
+ case MEM_MODE_V4L2:
+ case MEM_MODE_USRPTR:
+ case MEM_MODE_CI:
+ default:
+ //not support
+ return VA_INVALID_SURFACE;
+ }
+
+ if (!(mSupportedSurfaceMemType & type))
+ return VA_INVALID_SURFACE;
+
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = type;
+
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)&extbuf;
+
+ vaStatus = vaCreateSurfaces(mVADisplay, VA_RT_FORMAT_YUV420, vinfo.width,
+ vinfo.height, &surface, 1, attribs, 2);
+ if (vaStatus != VA_STATUS_SUCCESS){
+ LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus);
+ surface = VA_INVALID_SURFACE;
+ }
+ return surface;
+}
+
+VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height) {
+
+ VAStatus vaStatus;
+ VASurfaceID surface = VA_INVALID_SURFACE;
+ VASurfaceAttrib attribs[2];
+ VASurfaceAttribExternalBuffers extbuf;
+ unsigned long data;
+
+ extbuf.pixel_format = VA_FOURCC_NV12;
+ extbuf.width = width;
+ extbuf.height = height;
+ extbuf.data_size = width * height * 3 / 2;
+ extbuf.num_buffers = 1;
+ extbuf.num_planes = 3;
+ extbuf.pitches[0] = width;
+ extbuf.pitches[1] = width;
+ extbuf.pitches[2] = width;
+ extbuf.pitches[3] = 0;
+ extbuf.offsets[0] = 0;
+ extbuf.offsets[1] = width * height;
+ extbuf.offsets[2] = extbuf.offsets[1];
+ extbuf.offsets[3] = 0;
+ extbuf.buffers = &data;
+ extbuf.flags = 0;
+ extbuf.private_data = NULL;
+
+ attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
+ attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[0].value.type = VAGenericValueTypeInteger;
+ attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
+
+ attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attribs[1].value.type = VAGenericValueTypePointer;
+ attribs[1].value.value.p = (void *)&extbuf;
+
+ vaStatus = vaCreateSurfaces(display, VA_RT_FORMAT_YUV420, width,
+ height, &surface, 1, attribs, 2);
+ if (vaStatus != VA_STATUS_SUCCESS)
+ LOG_E("vaCreateSurfaces failed. vaStatus = %d\n", vaStatus);
+
+ return surface;
+}
diff --git a/videoencoder/VideoEncoderUtils.h b/videoencoder/VideoEncoderUtils.h
new file mode 100644
index 0000000..05911cd
--- /dev/null
+++ b/videoencoder/VideoEncoderUtils.h
@@ -0,0 +1,85 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_UTILS_H__
+#define __VIDEO_ENCODER_UTILS_H__
+#include <va/va.h>
+#include <va/va_tpi.h>
+#include "VideoEncoderDef.h"
+#include "IntelMetadataBuffer.h"
+#ifdef IMG_GFX
+#include <hardware/gralloc.h>
+#endif
+
+#define MAP_ACTION_COPY 0x00000001 //mem copy
+#define MAP_ACTION_ALIGN64 0x00000002 //align 64
+#define MAP_ACTION_COLORCONVERT 0x00000004 //color convert
+#define MAP_ACTION_RESIZE 0x00000008 //resize
+
+class VASurfaceMap {
+public:
+ VASurfaceMap(VADisplay display, int hwcap);
+ ~VASurfaceMap();
+
+ Encode_Status doMapping();
+ VASurfaceID getVASurface() {return mVASurface;}
+ intptr_t getValue() {return mValue;}
+ ValueInfo* getValueInfo() {return &mVinfo;}
+
+ void setVASurface(VASurfaceID surface) {mVASurface = surface;}
+ void setValue(intptr_t value) {mValue = value;}
+ void setValueInfo(ValueInfo& vinfo) {memcpy(&mVinfo, &vinfo, sizeof(ValueInfo));}
+ void setTracked() {mTracked = true;}
+ void setAction(int32_t action) {mAction = action;}
+
+private:
+ Encode_Status doActionCopy();
+ Encode_Status doActionColConv();
+ Encode_Status MappingToVASurface();
+ Encode_Status MappingSurfaceID(intptr_t value);
+ Encode_Status MappingGfxHandle(intptr_t value);
+ Encode_Status MappingKbufHandle(intptr_t value);
+ Encode_Status MappingMallocPTR(intptr_t value);
+ VASurfaceID CreateSurfaceFromExternalBuf(intptr_t value, ValueInfo& vinfo);
+
+ VADisplay mVADisplay;
+
+ intptr_t mValue;
+
+ VASurfaceID mVASurface;
+ int32_t mVASurfaceWidth;
+ int32_t mVASurfaceHeight;
+ int32_t mVASurfaceStride;
+
+// MetadataBufferType mType;
+
+ ValueInfo mVinfo;
+ bool mTracked;
+
+ int32_t mAction;
+
+ int32_t mSupportedSurfaceMemType;
+
+#ifdef IMG_GFX
+ //special for gfx color format converter
+ buffer_handle_t mGfxHandle;
+#endif
+};
+
+VASurfaceID CreateNewVASurface(VADisplay display, int32_t width, int32_t height);
+
+#endif
+
diff --git a/videoencoder/VideoEncoderVP8.cpp b/videoencoder/VideoEncoderVP8.cpp
new file mode 100644
index 0000000..d65b385
--- /dev/null
+++ b/videoencoder/VideoEncoderVP8.cpp
@@ -0,0 +1,521 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderVP8.h"
+#include <va/va_tpi.h>
+#include <va/va_enc_vp8.h>
+
+VideoEncoderVP8::VideoEncoderVP8()
+ :VideoEncoderBase() {
+
+ mVideoParamsVP8.profile = 0;
+ mVideoParamsVP8.error_resilient = 0;
+ mVideoParamsVP8.num_token_partitions = 4;
+ mVideoParamsVP8.kf_auto = 0;
+ mVideoParamsVP8.kf_min_dist = 128;
+ mVideoParamsVP8.kf_max_dist = 128;
+ mVideoParamsVP8.min_qp = 0;
+ mVideoParamsVP8.max_qp = 63;
+ mVideoParamsVP8.init_qp = 26;
+ mVideoParamsVP8.rc_undershoot = 100;
+ mVideoParamsVP8.rc_overshoot = 100;
+ mVideoParamsVP8.hrd_buf_size = 1000;
+ mVideoParamsVP8.hrd_buf_initial_fullness = 500;
+ mVideoParamsVP8.hrd_buf_optimal_fullness = 600;
+ mVideoParamsVP8.max_frame_size_ratio = 0;
+
+ mVideoConfigVP8.force_kf = 0;
+ mVideoConfigVP8.refresh_entropy_probs = 0;
+ mVideoConfigVP8.value = 0;
+ mVideoConfigVP8.sharpness_level = 2;
+
+ mVideoConfigVP8ReferenceFrame.no_ref_last = 0;
+ mVideoConfigVP8ReferenceFrame.no_ref_gf = 0;
+ mVideoConfigVP8ReferenceFrame.no_ref_arf = 0;
+ mVideoConfigVP8ReferenceFrame.refresh_last = 1;
+ mVideoConfigVP8ReferenceFrame.refresh_golden_frame = 1;
+ mVideoConfigVP8ReferenceFrame.refresh_alternate_frame = 1;
+
+ mComParams.profile = VAProfileVP8Version0_3;
+}
+
+VideoEncoderVP8::~VideoEncoderVP8() {
+}
+
+Encode_Status VideoEncoderVP8::start() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ LOG_V( "Begin\n");
+
+ ret = VideoEncoderBase::start ();
+ CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start");
+
+ if (mComParams.rcMode == VA_RC_VCM) {
+ mRenderBitRate = false;
+ }
+
+ LOG_V( "end\n");
+ return ret;
+}
+
+
+Encode_Status VideoEncoderVP8::renderSequenceParams() {
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncSequenceParameterBufferVP8 vp8SeqParam = VAEncSequenceParameterBufferVP8();
+
+ LOG_V( "Begin\n");
+
+ vp8SeqParam.frame_width = mComParams.resolution.width;
+ vp8SeqParam.frame_height = mComParams.resolution.height;
+ vp8SeqParam.error_resilient = mVideoParamsVP8.error_resilient;
+ vp8SeqParam.kf_auto = mVideoParamsVP8.kf_auto;
+ vp8SeqParam.kf_min_dist = mVideoParamsVP8.kf_min_dist;
+ vp8SeqParam.kf_max_dist = mVideoParamsVP8.kf_max_dist;
+ vp8SeqParam.bits_per_second = mComParams.rcParams.bitRate;
+ memcpy(vp8SeqParam.reference_frames, mAutoRefSurfaces, sizeof(mAutoRefSurfaces) * mAutoReferenceSurfaceNum);
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncSequenceParameterBufferType,
+ sizeof(vp8SeqParam),
+ 1, &vp8SeqParam,
+ &mSeqParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderVP8::renderPictureParams(EncodeTask *task) {
+ Encode_Status ret = ENCODE_SUCCESS;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncPictureParameterBufferVP8 vp8PicParam = VAEncPictureParameterBufferVP8();
+ LOG_V( "Begin\n");
+
+ vp8PicParam.coded_buf = task->coded_buffer;
+ vp8PicParam.pic_flags.value = 0;
+ vp8PicParam.ref_flags.bits.force_kf = mVideoConfigVP8.force_kf; //0;
+ if(!vp8PicParam.ref_flags.bits.force_kf) {
+ vp8PicParam.ref_flags.bits.no_ref_last = mVideoConfigVP8ReferenceFrame.no_ref_last;
+ vp8PicParam.ref_flags.bits.no_ref_arf = mVideoConfigVP8ReferenceFrame.no_ref_arf;
+ vp8PicParam.ref_flags.bits.no_ref_gf = mVideoConfigVP8ReferenceFrame.no_ref_gf;
+ }
+ vp8PicParam.pic_flags.bits.refresh_entropy_probs = 0;
+ vp8PicParam.sharpness_level = 2;
+ vp8PicParam.pic_flags.bits.num_token_partitions = 2;
+ vp8PicParam.pic_flags.bits.refresh_last = mVideoConfigVP8ReferenceFrame.refresh_last;
+ vp8PicParam.pic_flags.bits.refresh_golden_frame = mVideoConfigVP8ReferenceFrame.refresh_golden_frame;
+ vp8PicParam.pic_flags.bits.refresh_alternate_frame = mVideoConfigVP8ReferenceFrame.refresh_alternate_frame;
+
+ vaStatus = vaCreateBuffer(
+ mVADisplay, mVAContext,
+ VAEncPictureParameterBufferType,
+ sizeof(vp8PicParam),
+ 1, &vp8PicParam,
+ &mPicParamBuf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+Encode_Status VideoEncoderVP8::renderRCParams(uint32_t layer_id, bool total_bitrate)
+{
+ VABufferID rc_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterRateControl *misc_rate_ctrl;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
+ 1,NULL,&rc_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, rc_param_buf,(void **)&misc_param);
+
+ misc_param->type = VAEncMiscParameterTypeRateControl;
+ misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
+ memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
+
+ if(total_bitrate)
+ misc_rate_ctrl->bits_per_second = mComParams.rcParams.bitRate;
+ else
+ {
+ misc_rate_ctrl->rc_flags.bits.temporal_id = layer_id;
+ if(mTemporalLayerBitrateFramerate[layer_id].bitRate != 0)
+ misc_rate_ctrl->bits_per_second = mTemporalLayerBitrateFramerate[layer_id].bitRate;
+ }
+
+ misc_rate_ctrl->target_percentage = 100;
+ misc_rate_ctrl->window_size = 1000;
+ misc_rate_ctrl->initial_qp = mVideoParamsVP8.init_qp;
+ misc_rate_ctrl->min_qp = mVideoParamsVP8.min_qp;
+ misc_rate_ctrl->basic_unit_size = 0;
+ misc_rate_ctrl->max_qp = mVideoParamsVP8.max_qp;
+
+ vaUnmapBuffer(mVADisplay, rc_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &rc_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderFrameRateParams(uint32_t layer_id, bool total_framerate)
+{
+ VABufferID framerate_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterFrameRate * misc_framerate;
+ uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+ uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterFrameRate),
+ 1,NULL,&framerate_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, framerate_param_buf,(void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeFrameRate;
+ misc_framerate = (VAEncMiscParameterFrameRate *)misc_param->data;
+ memset(misc_framerate, 0, sizeof(*misc_framerate));
+
+ if(total_framerate)
+ misc_framerate->framerate = (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+ else
+ {
+ misc_framerate->framerate_flags.bits.temporal_id = layer_id;
+ if(mTemporalLayerBitrateFramerate[layer_id].frameRate != 0)
+ misc_framerate->framerate = mTemporalLayerBitrateFramerate[layer_id].frameRate;
+ }
+
+ vaUnmapBuffer(mVADisplay, framerate_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &framerate_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderHRDParams(void)
+{
+ VABufferID hrd_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterHRD * misc_hrd;
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD),
+ 1,NULL,&hrd_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, hrd_param_buf,(void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeHRD;
+ misc_hrd = (VAEncMiscParameterHRD *)misc_param->data;
+ memset(misc_hrd, 0, sizeof(*misc_hrd));
+ misc_hrd->buffer_size = 1000;
+ misc_hrd->initial_buffer_fullness = 500;
+ misc_hrd->optimal_buffer_fullness = 600;
+ vaUnmapBuffer(mVADisplay, hrd_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &hrd_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderMaxFrameSizeParams(void)
+{
+ VABufferID max_frame_size_param_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterBufferMaxFrameSize * misc_maxframesize;
+ unsigned int frameRateNum = mComParams.frameRate.frameRateNum;
+ unsigned int frameRateDenom = mComParams.frameRate.frameRateDenom;
+ unsigned int frameRate = (unsigned int)(frameRateNum + frameRateDenom /2);
+ unsigned int bitRate = mComParams.rcParams.bitRate;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD),
+ 1,NULL,&max_frame_size_param_buf);
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+ vaMapBuffer(mVADisplay, max_frame_size_param_buf,(void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeMaxFrameSize;
+ misc_maxframesize = (VAEncMiscParameterBufferMaxFrameSize *)misc_param->data;
+ memset(misc_maxframesize, 0, sizeof(*misc_maxframesize));
+ misc_maxframesize->max_frame_size = (unsigned int)((bitRate/frameRate) * mVideoParamsVP8.max_frame_size_ratio);
+ vaUnmapBuffer(mVADisplay, max_frame_size_param_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay,mVAContext, &max_frame_size_param_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+Encode_Status VideoEncoderVP8::renderLayerStructureParam(void)
+{
+ VABufferID layer_struc_buf;
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ VAEncMiscParameterBuffer *misc_param;
+ VAEncMiscParameterTemporalLayerStructure *misc_layer_struc;
+ uint32_t i;
+
+ vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+ VAEncMiscParameterBufferType,
+ sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterTemporalLayerStructure),
+ 1, NULL, &layer_struc_buf);
+
+ CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+ vaMapBuffer(mVADisplay, layer_struc_buf, (void **)&misc_param);
+ misc_param->type = VAEncMiscParameterTypeTemporalLayerStructure;
+ misc_layer_struc = (VAEncMiscParameterTemporalLayerStructure *)misc_param->data;
+ memset(misc_layer_struc, 0, sizeof(*misc_layer_struc));
+
+ misc_layer_struc->number_of_layers = mComParams.numberOfLayer;
+ misc_layer_struc->periodicity = mComParams.nPeriodicity;
+ LOGE("renderLayerStructureParam misc_layer_struc->number_of_layers is %d",misc_layer_struc->number_of_layers);
+
+ for(i=0;i<mComParams.nPeriodicity;i++)
+ {
+ misc_layer_struc->layer_id[i] = mComParams.nLayerID[i];
+ }
+
+ vaUnmapBuffer(mVADisplay, layer_struc_buf);
+
+ vaStatus = vaRenderPicture(mVADisplay, mVAContext, &layer_struc_buf, 1);
+ CHECK_VA_STATUS_RETURN("vaRenderPicture");;
+
+ return 0;
+}
+
+
+Encode_Status VideoEncoderVP8::sendEncodeCommand(EncodeTask *task) {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+ uint32_t i;
+
+ if (mFrameNum == 0) {
+ ret = renderSequenceParams();
+ ret = renderFrameRateParams(0,true);
+ ret = renderRCParams(0,true);
+ ret = renderHRDParams();
+ ret = renderMaxFrameSizeParams();
+ if(mRenderMultiTemporal)
+ {
+ ret = renderLayerStructureParam();
+ mRenderMultiTemporal = false;
+
+ }
+
+ if(mComParams.numberOfLayer > 1)
+ for(i=0;i<mComParams.numberOfLayer;i++)
+ {
+ ret = renderFrameRateParams(i, false);
+ ret = renderRCParams(i, false);
+ }
+
+ CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+ }
+
+ if (mRenderBitRate){
+ ret = renderRCParams(0,true);
+ CHECK_ENCODE_STATUS_RETURN("renderRCParams");
+
+ mRenderBitRate = false;
+ }
+
+ if (mRenderFrameRate) {
+ ret = renderFrameRateParams(0,true);
+ CHECK_ENCODE_STATUS_RETURN("renderFrameRateParams");
+
+ mRenderFrameRate = false;
+ }
+
+ if (mRenderMaxFrameSize) {
+ ret = renderMaxFrameSizeParams();
+ CHECK_ENCODE_STATUS_RETURN("renderMaxFrameSizeParams");
+
+ mRenderMaxFrameSize = false;
+ }
+
+ ret = renderPictureParams(task);
+ CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+ if(mForceKFrame) {
+ mVideoConfigVP8.force_kf = 0;//rest it as default value
+ mForceKFrame = false;
+ }
+
+ LOG_V( "End\n");
+ return ret;
+}
+
+
+Encode_Status VideoEncoderVP8::derivedSetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsVP8 *encParamsVP8 = reinterpret_cast <VideoParamsVP8*> (videoEncParams);
+
+ if (encParamsVP8->size != sizeof(VideoParamsVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsVP8 = *encParamsVP8;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderVP8::derivedGetParams(VideoParamConfigSet *videoEncParams) {
+
+ CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+ VideoParamsVP8 *encParamsVP8 = reinterpret_cast <VideoParamsVP8*> (videoEncParams);
+
+ if (encParamsVP8->size != sizeof(VideoParamsVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encParamsVP8 = mVideoParamsVP8;
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderVP8::derivedGetConfig(VideoParamConfigSet *videoEncConfig) {
+
+ int layer_id;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+
+ switch (videoEncConfig->type)
+ {
+ case VideoConfigTypeVP8:{
+ VideoConfigVP8 *encConfigVP8 =
+ reinterpret_cast<VideoConfigVP8*> (videoEncConfig);
+
+ if (encConfigVP8->size != sizeof(VideoConfigVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encConfigVP8 = mVideoConfigVP8;
+ }
+ break;
+
+ case VideoConfigTypeVP8ReferenceFrame:{
+
+ VideoConfigVP8ReferenceFrame *encConfigVP8ReferenceFrame =
+ reinterpret_cast<VideoConfigVP8ReferenceFrame*> (videoEncConfig);
+
+ if (encConfigVP8ReferenceFrame->size != sizeof(VideoConfigVP8ReferenceFrame)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ *encConfigVP8ReferenceFrame = mVideoConfigVP8ReferenceFrame;
+
+ }
+ break;
+
+ case VideoConfigTypeVP8MaxFrameSizeRatio :{
+
+ VideoConfigVP8MaxFrameSizeRatio *encConfigVP8MaxFrameSizeRatio =
+ reinterpret_cast<VideoConfigVP8MaxFrameSizeRatio*> (videoEncConfig);
+
+ if (encConfigVP8MaxFrameSizeRatio->size != sizeof(VideoConfigVP8MaxFrameSizeRatio)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ encConfigVP8MaxFrameSizeRatio->max_frame_size_ratio = mVideoParamsVP8.max_frame_size_ratio;
+ }
+ break;
+
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+
+ return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderVP8::derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+
+ int layer_id;
+ CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+
+ switch (videoEncConfig->type)
+ {
+ case VideoConfigTypeVP8:{
+ VideoConfigVP8 *encConfigVP8 =
+ reinterpret_cast<VideoConfigVP8*> (videoEncConfig);
+
+ if (encConfigVP8->size != sizeof(VideoConfigVP8)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoConfigVP8 = *encConfigVP8;
+ }
+ break;
+
+ case VideoConfigTypeVP8ReferenceFrame:{
+ VideoConfigVP8ReferenceFrame *encConfigVP8ReferenceFrame =
+ reinterpret_cast<VideoConfigVP8ReferenceFrame*> (videoEncConfig);
+
+ if (encConfigVP8ReferenceFrame->size != sizeof(VideoConfigVP8ReferenceFrame)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoConfigVP8ReferenceFrame = *encConfigVP8ReferenceFrame;
+
+ }
+ break;
+
+ case VideoConfigTypeVP8MaxFrameSizeRatio:{
+ VideoConfigVP8MaxFrameSizeRatio *encConfigVP8MaxFrameSizeRatio =
+ reinterpret_cast<VideoConfigVP8MaxFrameSizeRatio*> (videoEncConfig);
+
+ if (encConfigVP8MaxFrameSizeRatio->size != sizeof(VideoConfigVP8MaxFrameSizeRatio)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mVideoParamsVP8.max_frame_size_ratio = encConfigVP8MaxFrameSizeRatio->max_frame_size_ratio;
+ mRenderMaxFrameSize = true;
+ }
+ break;
+
+ case VideoConfigTypeIDRRequest:{
+ VideoParamConfigSet *encConfigVP8KFrameRequest =
+ reinterpret_cast<VideoParamConfigSet*> (videoEncConfig);
+
+ mVideoConfigVP8.force_kf = 1;
+ mForceKFrame = true;
+ }
+ break;
+
+ default: {
+ LOG_E ("Invalid Config Type");
+ break;
+ }
+ }
+ return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderVP8.h b/videoencoder/VideoEncoderVP8.h
new file mode 100644
index 0000000..1a4360b
--- /dev/null
+++ b/videoencoder/VideoEncoderVP8.h
@@ -0,0 +1,58 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __VIDEO_ENCODER_VP8_H__
+#define __VIDEO_ENCODER_VP8_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+ * VP8 Encoder class, derived from VideoEncoderBase
+ */
+class VideoEncoderVP8: public VideoEncoderBase {
+public:
+ VideoEncoderVP8();
+ virtual ~VideoEncoderVP8();
+ virtual Encode_Status start();
+
+
+
+protected:
+ virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+ virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams);
+ virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig);
+ virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *) {
+ return ENCODE_NOT_SUPPORTED;
+ }
+
+ // Local Methods
+private:
+ Encode_Status renderSequenceParams();
+ Encode_Status renderPictureParams(EncodeTask *task);
+ Encode_Status renderRCParams(uint32_t layer_id, bool total_bitrate);
+ Encode_Status renderHRDParams(void);
+ Encode_Status renderFrameRateParams(uint32_t layer_id, bool total_framerate);
+ Encode_Status renderMaxFrameSizeParams(void);
+ Encode_Status renderLayerStructureParam(void);
+
+ VideoConfigVP8 mVideoConfigVP8;
+ VideoParamsVP8 mVideoParamsVP8;
+ VideoConfigVP8ReferenceFrame mVideoConfigVP8ReferenceFrame;
+};
+
+#endif /* __VIDEO_ENCODER_VP8_H__ */
diff --git a/videoencoder/bitstream.h b/videoencoder/bitstream.h
new file mode 100644
index 0000000..c7f919e
--- /dev/null
+++ b/videoencoder/bitstream.h
@@ -0,0 +1,403 @@
+/*
+* Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#ifndef __BITSTREAM_H__
+#define __BITSTREAM_H__
+
+#include <VideoEncoderBase.h>
+#include <assert.h>
+
+struct bitstream {
+ unsigned int *buffer;
+ int bit_offset;
+ int max_size_in_dword;
+};
+
+#define BITSTREAM_ALLOCATE_STEPPING 4096
+
+static unsigned int va_swap32(unsigned int val)
+{
+ unsigned char *pval = (unsigned char *)&val;
+
+ return ((pval[0] << 24) |
+ (pval[1] << 16) |
+ (pval[2] << 8) |
+ (pval[3] << 0));
+}
+
+static void bitstream_start(bitstream *bs)
+{
+ bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
+ bs->buffer = (unsigned int*)calloc(bs->max_size_in_dword * sizeof(int), 1);
+ bs->bit_offset = 0;
+}
+
+static void bitstream_end(bitstream *bs)
+{
+ int pos = (bs->bit_offset >> 5);
+ int bit_offset = (bs->bit_offset & 0x1f);
+ int bit_left = 32 - bit_offset;
+
+ if (bit_offset) {
+ bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
+ }
+}
+
+static void bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
+{
+ int pos = (bs->bit_offset >> 5);
+ int bit_offset = (bs->bit_offset & 0x1f);
+ int bit_left = 32 - bit_offset;
+
+ if (!size_in_bits)
+ return;
+
+ bs->bit_offset += size_in_bits;
+
+ if (bit_left > size_in_bits) {
+ bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
+ } else {
+ size_in_bits -= bit_left;
+ bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
+ bs->buffer[pos] = va_swap32(bs->buffer[pos]);
+
+ if (pos + 1 == bs->max_size_in_dword) {
+ bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
+ bs->buffer = (unsigned int*)realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
+ if (bs->buffer == NULL)
+ abort();
+ }
+
+ bs->buffer[pos + 1] = val;
+ }
+}
+
+static void bitstream_put_ue(bitstream *bs, unsigned int val)
+{
+ int size_in_bits = 0;
+ int tmp_val = ++val;
+
+ while (tmp_val) {
+ tmp_val >>= 1;
+ size_in_bits++;
+ }
+
+ bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
+ bitstream_put_ui(bs, val, size_in_bits);
+}
+
+static void bitstream_put_se(bitstream *bs, int val)
+{
+ unsigned int new_val;
+
+ if (val <= 0)
+ new_val = -2 * val;
+ else
+ new_val = 2 * val - 1;
+
+ bitstream_put_ue(bs, new_val);
+}
+
+static void bitstream_byte_aligning(bitstream *bs, int bit)
+{
+ int bit_offset = (bs->bit_offset & 0x7);
+ int bit_left = 8 - bit_offset;
+ int new_val;
+
+ if (!bit_offset)
+ return;
+
+ assert(bit == 0 || bit == 1);
+
+ if (bit)
+ new_val = (1 << bit_left) - 1;
+ else
+ new_val = 0;
+
+ bitstream_put_ui(bs, new_val, bit_left);
+}
+
+static void rbsp_trailing_bits(bitstream *bs)
+{
+ bitstream_put_ui(bs, 1, 1);
+ bitstream_byte_aligning(bs, 0);
+}
+
+static void nal_start_code_prefix(bitstream *bs)
+{
+ bitstream_put_ui(bs, 0x00000001, 32);
+}
+
+static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
+{
+ bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */
+ bitstream_put_ui(bs, nal_ref_idc, 2);
+ bitstream_put_ui(bs, nal_unit_type, 5);
+}
+
+#define NAL_REF_IDC_NONE 0
+#define NAL_REF_IDC_LOW 1
+#define NAL_REF_IDC_MEDIUM 2
+#define NAL_REF_IDC_HIGH 3
+
+#define NAL_NON_IDR 1
+#define NAL_IDR 5
+#define NAL_SPS 7
+#define NAL_PPS 8
+#define NAL_SEI 6
+
+#define SLICE_TYPE_P 0
+#define SLICE_TYPE_B 1
+#define SLICE_TYPE_I 2
+
+#define ENTROPY_MODE_CAVLC 0
+#define ENTROPY_MODE_CABAC 1
+
+#define PROFILE_IDC_BASELINE 66
+#define PROFILE_IDC_MAIN 77
+#define PROFILE_IDC_HIGH 100
+
+static void sps_rbsp(bitstream *bs, VAProfile profile, int frame_bit_rate, VAEncSequenceParameterBufferH264 *seq_param)
+{
+ int profile_idc = 0;
+ int constraint_set_flag = 0;
+
+ if (profile == VAProfileH264High) {
+ profile_idc = PROFILE_IDC_HIGH;
+ constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
+ }
+ else if (profile == VAProfileH264Main) {
+ profile_idc = PROFILE_IDC_MAIN;
+ constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
+ } else {
+ profile_idc = PROFILE_IDC_BASELINE;
+ constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
+ }
+
+ bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1); /* constraint_set0_flag */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1); /* constraint_set1_flag */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1); /* constraint_set2_flag */
+ bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1); /* constraint_set3_flag */
+ bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */
+ bitstream_put_ui(bs, seq_param->level_idc, 8); /* level_idc */
+ bitstream_put_ue(bs, seq_param->seq_parameter_set_id); /* seq_parameter_set_id */
+
+ if ( profile_idc == PROFILE_IDC_HIGH) {
+ bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */
+ bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */
+ bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */
+ bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */
+ bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */
+ }
+
+ bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
+ bitstream_put_ue(bs, seq_param->seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */
+
+ if (seq_param->seq_fields.bits.pic_order_cnt_type == 0)
+ bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */
+ else {
+ assert(0);
+ }
+
+ bitstream_put_ue(bs, seq_param->max_num_ref_frames); /* num_ref_frames */
+ bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */
+
+ bitstream_put_ue(bs, seq_param->picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
+ bitstream_put_ue(bs, seq_param->picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
+ bitstream_put_ui(bs, seq_param->seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
+
+ if (!seq_param->seq_fields.bits.frame_mbs_only_flag) {
+ assert(0);
+ }
+
+ bitstream_put_ui(bs, seq_param->seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */
+ bitstream_put_ui(bs, seq_param->frame_cropping_flag, 1); /* frame_cropping_flag */
+
+ if (seq_param->frame_cropping_flag) {
+ bitstream_put_ue(bs, seq_param->frame_crop_left_offset); /* frame_crop_left_offset */
+ bitstream_put_ue(bs, seq_param->frame_crop_right_offset); /* frame_crop_right_offset */
+ bitstream_put_ue(bs, seq_param->frame_crop_top_offset); /* frame_crop_top_offset */
+ bitstream_put_ue(bs, seq_param->frame_crop_bottom_offset); /* frame_crop_bottom_offset */
+ }
+
+ if ( frame_bit_rate < 0 ) {
+ bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
+ } else {
+ bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
+ bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
+ {
+ bitstream_put_ui(bs, 15, 32);
+ bitstream_put_ui(bs, 900, 32);
+ bitstream_put_ui(bs, 1, 1);
+ }
+ bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
+ {
+ // hrd_parameters
+ bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */
+ bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
+ bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
+
+ bitstream_put_ue(bs, frame_bit_rate - 1); /* bit_rate_value_minus1[0] */
+ bitstream_put_ue(bs, frame_bit_rate*8 - 1); /* cpb_size_value_minus1[0] */
+ bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
+
+ bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */
+ bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */
+ bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */
+ bitstream_put_ui(bs, 23, 5); /* time_offset_length */
+ }
+ bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
+
+ bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
+ }
+
+ rbsp_trailing_bits(bs); /* rbsp_trailing_bits */
+}
+
+static void pps_rbsp(bitstream *bs, VAEncPictureParameterBufferH264 *pic_param)
+{
+
+ bitstream_put_ue(bs, pic_param->pic_parameter_set_id); /* pic_parameter_set_id */
+ bitstream_put_ue(bs, pic_param->seq_parameter_set_id); /* seq_parameter_set_id */
+
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
+
+ bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */
+
+ bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */
+
+ bitstream_put_ue(bs, pic_param->num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */
+ bitstream_put_ue(bs, pic_param->num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */
+
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */
+
+ bitstream_put_se(bs, pic_param->pic_init_qp - 26); /* pic_init_qp_minus26 */
+ bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */
+ bitstream_put_se(bs, 0); /* chroma_qp_index_offset */
+
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
+ bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */
+ bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */
+
+ /* more_rbsp_data */
+ bitstream_put_ui(bs, pic_param->pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */
+ bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */
+ bitstream_put_se(bs, pic_param->second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */
+
+ rbsp_trailing_bits(bs);
+}
+
+int build_packed_seq_buffer(unsigned char **header_buffer, VAProfile profile, VAEncSequenceParameterBufferH264 *seq_param)
+{
+ bitstream bs;
+
+ bitstream_start(&bs);
+ nal_start_code_prefix(&bs);
+ nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
+ sps_rbsp(&bs, profile, seq_param->bits_per_second, seq_param);
+ bitstream_end(&bs);
+
+ *header_buffer = (unsigned char *)bs.buffer;
+ return bs.bit_offset;
+}
+
+int build_packed_pic_buffer(unsigned char **header_buffer, VAEncPictureParameterBufferH264 *pic_param)
+{
+ bitstream bs;
+
+ bitstream_start(&bs);
+ nal_start_code_prefix(&bs);
+ nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
+ pps_rbsp(&bs, pic_param);
+ bitstream_end(&bs);
+
+ *header_buffer = (unsigned char *)bs.buffer;
+ return bs.bit_offset;
+}
+
+int build_packed_sei_buffer_timing(unsigned int init_cpb_removal_delay,
+ unsigned int init_cpb_removal_delay_offset,
+ unsigned int cpb_removal_length,
+ unsigned int cpb_removal_delay,
+ unsigned int dpb_output_length,
+ unsigned int dpb_output_delay,
+ unsigned char **sei_buffer)
+{
+ unsigned char *byte_buf;
+ int bp_byte_size, i, pic_byte_size;
+
+ bitstream nal_bs;
+ bitstream sei_bp_bs, sei_pic_bs;
+
+ bitstream_start(&sei_bp_bs);
+ bitstream_put_ue(&sei_bp_bs, 0); /*seq_parameter_set_id*/
+ bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay, cpb_removal_length);
+ bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay_offset, cpb_removal_length);
+ if ( sei_bp_bs.bit_offset & 0x7) {
+ bitstream_put_ui(&sei_bp_bs, 1, 1);
+ }
+ bitstream_end(&sei_bp_bs);
+ bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8;
+
+ bitstream_start(&sei_pic_bs);
+ bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
+ bitstream_put_ui(&sei_pic_bs, dpb_output_delay, dpb_output_length);
+ if ( sei_pic_bs.bit_offset & 0x7) {
+ bitstream_put_ui(&sei_pic_bs, 1, 1);
+ }
+ bitstream_end(&sei_pic_bs);
+ pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
+
+ bitstream_start(&nal_bs);
+ nal_start_code_prefix(&nal_bs);
+ nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
+
+ /* Write the SEI buffer period data */
+ bitstream_put_ui(&nal_bs, 0, 8);
+ bitstream_put_ui(&nal_bs, bp_byte_size, 8);
+
+ byte_buf = (unsigned char *)sei_bp_bs.buffer;
+ for(i = 0; i < bp_byte_size; i++) {
+ bitstream_put_ui(&nal_bs, byte_buf[i], 8);
+ }
+ free(byte_buf);
+ /* write the SEI timing data */
+ bitstream_put_ui(&nal_bs, 0x01, 8);
+ bitstream_put_ui(&nal_bs, pic_byte_size, 8);
+
+ byte_buf = (unsigned char *)sei_pic_bs.buffer;
+ for(i = 0; i < pic_byte_size; i++) {
+ bitstream_put_ui(&nal_bs, byte_buf[i], 8);
+ }
+ free(byte_buf);
+
+ rbsp_trailing_bits(&nal_bs);
+ bitstream_end(&nal_bs);
+
+ *sei_buffer = (unsigned char *)nal_bs.buffer;
+
+ return nal_bs.bit_offset;
+}
+
+#endif