blob: 37df634239d35e6c2f0eaaeca0428f3b1f5af8cc [file] [log] [blame]
James Dong1cc31e62010-07-02 17:44:44 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "AVCEncoder"
19#include <utils/Log.h>
20
21#include "AVCEncoder.h"
22
23#include "avcenc_api.h"
24#include "avcenc_int.h"
25#include "OMX_Video.h"
26
27#include <media/stagefright/MediaBufferGroup.h>
28#include <media/stagefright/MediaDebug.h>
29#include <media/stagefright/MediaDefs.h>
30#include <media/stagefright/MediaErrors.h>
31#include <media/stagefright/MetaData.h>
32#include <media/stagefright/Utils.h>
33
34namespace android {
35
James Dongaf509692010-10-19 17:35:35 -070036static status_t ConvertOmxAvcProfileToAvcSpecProfile(
37 int32_t omxProfile, AVCProfile* pvProfile) {
Steve Block71f2cf12011-10-20 11:56:00 +010038 ALOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile);
James Dongaf509692010-10-19 17:35:35 -070039 switch (omxProfile) {
40 case OMX_VIDEO_AVCProfileBaseline:
41 *pvProfile = AVC_BASELINE;
42 return OK;
43 default:
44 LOGE("Unsupported omx profile: %d", omxProfile);
45 }
46 return BAD_VALUE;
47}
48
49static status_t ConvertOmxAvcLevelToAvcSpecLevel(
50 int32_t omxLevel, AVCLevel *pvLevel) {
Steve Block71f2cf12011-10-20 11:56:00 +010051 ALOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel);
James Dongaf509692010-10-19 17:35:35 -070052 AVCLevel level = AVC_LEVEL5_1;
53 switch (omxLevel) {
54 case OMX_VIDEO_AVCLevel1:
55 level = AVC_LEVEL1_B;
56 break;
57 case OMX_VIDEO_AVCLevel1b:
58 level = AVC_LEVEL1;
59 break;
60 case OMX_VIDEO_AVCLevel11:
61 level = AVC_LEVEL1_1;
62 break;
63 case OMX_VIDEO_AVCLevel12:
64 level = AVC_LEVEL1_2;
65 break;
66 case OMX_VIDEO_AVCLevel13:
67 level = AVC_LEVEL1_3;
68 break;
69 case OMX_VIDEO_AVCLevel2:
70 level = AVC_LEVEL2;
71 break;
72 case OMX_VIDEO_AVCLevel21:
73 level = AVC_LEVEL2_1;
74 break;
75 case OMX_VIDEO_AVCLevel22:
76 level = AVC_LEVEL2_2;
77 break;
78 case OMX_VIDEO_AVCLevel3:
79 level = AVC_LEVEL3;
80 break;
81 case OMX_VIDEO_AVCLevel31:
82 level = AVC_LEVEL3_1;
83 break;
84 case OMX_VIDEO_AVCLevel32:
85 level = AVC_LEVEL3_2;
86 break;
87 case OMX_VIDEO_AVCLevel4:
88 level = AVC_LEVEL4;
89 break;
90 case OMX_VIDEO_AVCLevel41:
91 level = AVC_LEVEL4_1;
92 break;
93 case OMX_VIDEO_AVCLevel42:
94 level = AVC_LEVEL4_2;
95 break;
96 case OMX_VIDEO_AVCLevel5:
97 level = AVC_LEVEL5;
98 break;
99 case OMX_VIDEO_AVCLevel51:
100 level = AVC_LEVEL5_1;
101 break;
102 default:
103 LOGE("Unknown omx level: %d", omxLevel);
104 return BAD_VALUE;
105 }
106 *pvLevel = level;
107 return OK;
108}
109
James Dong1cc31e62010-07-02 17:44:44 -0700110inline static void ConvertYUV420SemiPlanarToYUV420Planar(
111 uint8_t *inyuv, uint8_t* outyuv,
112 int32_t width, int32_t height) {
113
114 int32_t outYsize = width * height;
115 uint32_t *outy = (uint32_t *) outyuv;
116 uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
117 uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
118
119 /* Y copying */
120 memcpy(outy, inyuv, outYsize);
121
122 /* U & V copying */
123 uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
124 for (int32_t i = height >> 1; i > 0; --i) {
125 for (int32_t j = width >> 2; j > 0; --j) {
126 uint32_t temp = *inyuv_4++;
127 uint32_t tempU = temp & 0xFF;
128 tempU = tempU | ((temp >> 8) & 0xFF00);
129
130 uint32_t tempV = (temp >> 8) & 0xFF;
131 tempV = tempV | ((temp >> 16) & 0xFF00);
132
133 // Flip U and V
134 *outcb++ = tempV;
135 *outcr++ = tempU;
136 }
137 }
138}
139
140static int32_t MallocWrapper(
141 void *userData, int32_t size, int32_t attrs) {
142 return reinterpret_cast<int32_t>(malloc(size));
143}
144
145static void FreeWrapper(void *userData, int32_t ptr) {
146 free(reinterpret_cast<void *>(ptr));
147}
148
149static int32_t DpbAllocWrapper(void *userData,
150 unsigned int sizeInMbs, unsigned int numBuffers) {
151 AVCEncoder *encoder = static_cast<AVCEncoder *>(userData);
152 CHECK(encoder != NULL);
153 return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
154}
155
156static int32_t BindFrameWrapper(
157 void *userData, int32_t index, uint8_t **yuv) {
158 AVCEncoder *encoder = static_cast<AVCEncoder *>(userData);
159 CHECK(encoder != NULL);
160 return encoder->bindOutputBuffer(index, yuv);
161}
162
163static void UnbindFrameWrapper(void *userData, int32_t index) {
164 AVCEncoder *encoder = static_cast<AVCEncoder *>(userData);
165 CHECK(encoder != NULL);
166 return encoder->unbindOutputBuffer(index);
167}
168
169AVCEncoder::AVCEncoder(
170 const sp<MediaSource>& source,
171 const sp<MetaData>& meta)
172 : mSource(source),
173 mMeta(meta),
174 mNumInputFrames(-1),
James Donge95d1922010-08-12 15:41:11 -0700175 mPrevTimestampUs(-1),
James Dong1cc31e62010-07-02 17:44:44 -0700176 mStarted(false),
177 mInputBuffer(NULL),
178 mInputFrameData(NULL),
179 mGroup(NULL) {
180
Steve Block6215d3f2012-01-04 20:05:49 +0000181 ALOGI("Construct software AVCEncoder");
James Dong1cc31e62010-07-02 17:44:44 -0700182
183 mHandle = new tagAVCHandle;
184 memset(mHandle, 0, sizeof(tagAVCHandle));
185 mHandle->AVCObject = NULL;
186 mHandle->userData = this;
187 mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
188 mHandle->CBAVC_FrameBind = BindFrameWrapper;
189 mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
190 mHandle->CBAVC_Malloc = MallocWrapper;
191 mHandle->CBAVC_Free = FreeWrapper;
192
193 mInitCheck = initCheck(meta);
194}
195
196AVCEncoder::~AVCEncoder() {
Steve Block71f2cf12011-10-20 11:56:00 +0100197 ALOGV("Destruct software AVCEncoder");
James Dong1cc31e62010-07-02 17:44:44 -0700198 if (mStarted) {
199 stop();
200 }
201
202 delete mEncParams;
203 delete mHandle;
204}
205
206status_t AVCEncoder::initCheck(const sp<MetaData>& meta) {
Steve Block71f2cf12011-10-20 11:56:00 +0100207 ALOGV("initCheck");
James Dong1cc31e62010-07-02 17:44:44 -0700208 CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
209 CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
James Dongaac193c2010-11-10 20:43:53 -0800210 CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
James Dong1cc31e62010-07-02 17:44:44 -0700211 CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
212
213 // XXX: Add more color format support
214 CHECK(meta->findInt32(kKeyColorFormat, &mVideoColorFormat));
215 if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
216 if (mVideoColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) {
217 LOGE("Color format %d is not supported", mVideoColorFormat);
218 return BAD_VALUE;
219 }
220 // Allocate spare buffer only when color conversion is needed.
221 // Assume the color format is OMX_COLOR_FormatYUV420SemiPlanar.
222 mInputFrameData =
223 (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
224 CHECK(mInputFrameData);
225 }
226
227 // XXX: Remove this restriction
228 if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
229 LOGE("Video frame size %dx%d must be a multiple of 16",
230 mVideoWidth, mVideoHeight);
231 return BAD_VALUE;
232 }
233
234 mEncParams = new tagAVCEncParam;
235 memset(mEncParams, 0, sizeof(mEncParams));
236 mEncParams->width = mVideoWidth;
237 mEncParams->height = mVideoHeight;
238 mEncParams->frame_rate = 1000 * mVideoFrameRate; // In frames/ms!
239 mEncParams->rate_control = AVC_ON;
240 mEncParams->bitrate = mVideoBitRate;
241 mEncParams->initQP = 0;
242 mEncParams->init_CBP_removal_delay = 1600;
243 mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
244
245 mEncParams->intramb_refresh = 0;
246 mEncParams->auto_scd = AVC_ON;
247 mEncParams->out_of_band_param_set = AVC_ON;
248 mEncParams->poc_type = 2;
249 mEncParams->log2_max_poc_lsb_minus_4 = 12;
250 mEncParams->delta_poc_zero_flag = 0;
251 mEncParams->offset_poc_non_ref = 0;
252 mEncParams->offset_top_bottom = 0;
253 mEncParams->num_ref_in_cycle = 0;
254 mEncParams->offset_poc_ref = NULL;
255
256 mEncParams->num_ref_frame = 1;
257 mEncParams->num_slice_group = 1;
258 mEncParams->fmo_type = 0;
259
260 mEncParams->db_filter = AVC_ON;
261 mEncParams->disable_db_idc = 0;
262
263 mEncParams->alpha_offset = 0;
264 mEncParams->beta_offset = 0;
265 mEncParams->constrained_intra_pred = AVC_OFF;
266
267 mEncParams->data_par = AVC_OFF;
268 mEncParams->fullsearch = AVC_OFF;
269 mEncParams->search_range = 16;
270 mEncParams->sub_pel = AVC_OFF;
271 mEncParams->submb_pred = AVC_OFF;
272 mEncParams->rdopt_mode = AVC_OFF;
273 mEncParams->bidir_pred = AVC_OFF;
274 int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
275 (((mVideoHeight + 15) >> 4) << 4)) >> 8;
276 uint32_t *sliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
277 for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
278 sliceGroup[ii] = idx++;
279 if (idx >= mEncParams->num_slice_group) {
280 idx = 0;
281 }
282 }
283 mEncParams->slice_group = sliceGroup;
284
285 mEncParams->use_overrun_buffer = AVC_OFF;
286
287 // Set IDR frame refresh interval
288 int32_t iFramesIntervalSec;
289 CHECK(meta->findInt32(kKeyIFramesInterval, &iFramesIntervalSec));
290 if (iFramesIntervalSec < 0) {
291 mEncParams->idr_period = -1;
292 } else if (iFramesIntervalSec == 0) {
293 mEncParams->idr_period = 1; // All I frames
294 } else {
295 mEncParams->idr_period =
296 (iFramesIntervalSec * mVideoFrameRate);
297 }
Steve Block71f2cf12011-10-20 11:56:00 +0100298 ALOGV("idr_period: %d, I-frames interval: %d seconds, and frame rate: %d",
James Dong1cc31e62010-07-02 17:44:44 -0700299 mEncParams->idr_period, iFramesIntervalSec, mVideoFrameRate);
300
301 // Set profile and level
302 // If profile and level setting is not correct, failure
303 // is reported when the encoder is initialized.
304 mEncParams->profile = AVC_BASELINE;
305 mEncParams->level = AVC_LEVEL3_2;
306 int32_t profile, level;
307 if (meta->findInt32(kKeyVideoProfile, &profile)) {
James Dongaf509692010-10-19 17:35:35 -0700308 if (OK != ConvertOmxAvcProfileToAvcSpecProfile(
309 profile, &mEncParams->profile)) {
310 return BAD_VALUE;
311 }
James Dong1cc31e62010-07-02 17:44:44 -0700312 }
313 if (meta->findInt32(kKeyVideoLevel, &level)) {
James Dongaf509692010-10-19 17:35:35 -0700314 if (OK != ConvertOmxAvcLevelToAvcSpecLevel(
315 level, &mEncParams->level)) {
316 return BAD_VALUE;
317 }
James Dong1cc31e62010-07-02 17:44:44 -0700318 }
319
320
321 mFormat = new MetaData;
322 mFormat->setInt32(kKeyWidth, mVideoWidth);
323 mFormat->setInt32(kKeyHeight, mVideoHeight);
324 mFormat->setInt32(kKeyBitRate, mVideoBitRate);
James Dongaac193c2010-11-10 20:43:53 -0800325 mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
James Dong1cc31e62010-07-02 17:44:44 -0700326 mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
327 mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
328 mFormat->setCString(kKeyDecoderComponent, "AVCEncoder");
329 return OK;
330}
331
332status_t AVCEncoder::start(MetaData *params) {
Steve Block71f2cf12011-10-20 11:56:00 +0100333 ALOGV("start");
James Dong1cc31e62010-07-02 17:44:44 -0700334 if (mInitCheck != OK) {
335 return mInitCheck;
336 }
337
338 if (mStarted) {
339 LOGW("Call start() when encoder already started");
340 return OK;
341 }
342
343 AVCEnc_Status err;
344 err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
345 if (err != AVCENC_SUCCESS) {
346 LOGE("Failed to initialize the encoder: %d", err);
347 return UNKNOWN_ERROR;
348 }
349
350 mGroup = new MediaBufferGroup();
351 int32_t maxSize;
352 if (AVCENC_SUCCESS !=
353 PVAVCEncGetMaxOutputBufferSize(mHandle, &maxSize)) {
354 maxSize = 31584; // Magic #
355 }
356 mGroup->add_buffer(new MediaBuffer(maxSize));
357
358 mSource->start(params);
359 mNumInputFrames = -2; // 1st two buffers contain SPS and PPS
360 mStarted = true;
361 mSpsPpsHeaderReceived = false;
362 mReadyForNextFrame = true;
363 mIsIDRFrame = 0;
364
365 return OK;
366}
367
368status_t AVCEncoder::stop() {
Steve Block71f2cf12011-10-20 11:56:00 +0100369 ALOGV("stop");
James Dong1cc31e62010-07-02 17:44:44 -0700370 if (!mStarted) {
371 LOGW("Call stop() when encoder has not started");
372 return OK;
373 }
374
375 if (mInputBuffer) {
376 mInputBuffer->release();
377 mInputBuffer = NULL;
378 }
379
380 if (mGroup) {
381 delete mGroup;
382 mGroup = NULL;
383 }
384
385 if (mInputFrameData) {
386 delete mInputFrameData;
387 mInputFrameData = NULL;
388 }
389
390 PVAVCCleanUpEncoder(mHandle);
391 mSource->stop();
392 releaseOutputBuffers();
393 mStarted = false;
394
395 return OK;
396}
397
398void AVCEncoder::releaseOutputBuffers() {
Steve Block71f2cf12011-10-20 11:56:00 +0100399 ALOGV("releaseOutputBuffers");
James Dong1cc31e62010-07-02 17:44:44 -0700400 for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
401 MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
402 buffer->setObserver(NULL);
403 buffer->release();
404 }
405 mOutputBuffers.clear();
406}
407
408sp<MetaData> AVCEncoder::getFormat() {
Steve Block71f2cf12011-10-20 11:56:00 +0100409 ALOGV("getFormat");
James Dong1cc31e62010-07-02 17:44:44 -0700410 return mFormat;
411}
412
413status_t AVCEncoder::read(
414 MediaBuffer **out, const ReadOptions *options) {
415
416 CHECK(!options);
417 *out = NULL;
418
419 MediaBuffer *outputBuffer;
420 CHECK_EQ(OK, mGroup->acquire_buffer(&outputBuffer));
James Dong9767dbf2010-08-19 20:47:30 -0700421 uint8_t *outPtr = (uint8_t *) outputBuffer->data();
422 uint32_t dataLength = outputBuffer->size();
James Dongf3b78592010-08-08 08:56:48 -0700423
James Dong9767dbf2010-08-19 20:47:30 -0700424 if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
425 // 4 bytes are reserved for holding the start code 0x00000001
426 // of the sequence parameter set at the beginning.
427 outPtr += 4;
428 dataLength -= 4;
429 }
James Dong1cc31e62010-07-02 17:44:44 -0700430
431 int32_t type;
432 AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
433
James Dongf3b78592010-08-08 08:56:48 -0700434 // Combine SPS and PPS and place them in the very first output buffer
435 // SPS and PPS are separated by start code 0x00000001
436 // Assume that we have exactly one SPS and exactly one PPS.
437 while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
James Dong1cc31e62010-07-02 17:44:44 -0700438 encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
439 if (encoderStatus == AVCENC_WRONG_STATE) {
440 mSpsPpsHeaderReceived = true;
441 CHECK_EQ(0, mNumInputFrames); // 1st video frame is 0
442 } else {
443 switch (type) {
444 case AVC_NALTYPE_SPS:
James Dong1cc31e62010-07-02 17:44:44 -0700445 ++mNumInputFrames;
James Dong9767dbf2010-08-19 20:47:30 -0700446 memcpy((uint8_t *)outputBuffer->data(), "\x00\x00\x00\x01", 4);
James Dongf3b78592010-08-08 08:56:48 -0700447 outputBuffer->set_range(0, dataLength + 4);
448 outPtr += (dataLength + 4); // 4 bytes for next start code
449 dataLength = outputBuffer->size() -
450 (outputBuffer->range_length() + 4);
451 break;
452 case AVC_NALTYPE_PPS:
453 ++mNumInputFrames;
454 memcpy(((uint8_t *) outputBuffer->data()) +
455 outputBuffer->range_length(),
456 "\x00\x00\x00\x01", 4);
457 outputBuffer->set_range(0,
458 dataLength + outputBuffer->range_length() + 4);
459 outputBuffer->meta_data()->setInt32(kKeyIsCodecConfig, 1);
460 outputBuffer->meta_data()->setInt64(kKeyTime, 0);
James Dong1cc31e62010-07-02 17:44:44 -0700461 *out = outputBuffer;
462 return OK;
463 default:
464 LOGE("Nal type (%d) other than SPS/PPS is unexpected", type);
465 return UNKNOWN_ERROR;
466 }
467 }
468 }
469
470 // Get next input video frame
471 if (mReadyForNextFrame) {
472 if (mInputBuffer) {
473 mInputBuffer->release();
474 mInputBuffer = NULL;
475 }
476 status_t err = mSource->read(&mInputBuffer, options);
477 if (err != OK) {
James Dongd39e8f42011-06-29 22:35:59 -0700478 if (err != ERROR_END_OF_STREAM) {
479 LOGE("Failed to read input video frame: %d", err);
480 }
James Dong1cc31e62010-07-02 17:44:44 -0700481 outputBuffer->release();
482 return err;
483 }
James Donge6daea52010-08-09 17:45:29 -0700484
485 if (mInputBuffer->size() - ((mVideoWidth * mVideoHeight * 3) >> 1) != 0) {
486 outputBuffer->release();
487 mInputBuffer->release();
488 mInputBuffer = NULL;
489 return UNKNOWN_ERROR;
490 }
491
James Dong1cc31e62010-07-02 17:44:44 -0700492 int64_t timeUs;
493 CHECK(mInputBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
494 outputBuffer->meta_data()->setInt64(kKeyTime, timeUs);
495
James Dong708ec392010-08-11 17:29:09 -0700496 // When the timestamp of the current sample is the same as
497 // that of the previous sample, the encoding of the sample
498 // is bypassed, and the output length is set to 0.
499 if (mNumInputFrames >= 1 && mPrevTimestampUs == timeUs) {
500 // Frame arrives too late
501 mInputBuffer->release();
502 mInputBuffer = NULL;
503 outputBuffer->set_range(0, 0);
504 *out = outputBuffer;
505 return OK;
506 }
507
508 // Don't accept out-of-order samples
509 CHECK(mPrevTimestampUs < timeUs);
510 mPrevTimestampUs = timeUs;
511
James Dong1cc31e62010-07-02 17:44:44 -0700512 AVCFrameIO videoInput;
513 memset(&videoInput, 0, sizeof(videoInput));
514 videoInput.height = ((mVideoHeight + 15) >> 4) << 4;
515 videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
516 videoInput.coding_timestamp = (timeUs + 500) / 1000; // in ms
517 uint8_t *inputData = (uint8_t *) mInputBuffer->data();
518
519 if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
520 CHECK(mInputFrameData);
521 CHECK(mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar);
522 ConvertYUV420SemiPlanarToYUV420Planar(
523 inputData, mInputFrameData, mVideoWidth, mVideoHeight);
524 inputData = mInputFrameData;
525 }
526 CHECK(inputData != NULL);
527 videoInput.YCbCr[0] = inputData;
528 videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
529 videoInput.YCbCr[2] = videoInput.YCbCr[1] +
530 ((videoInput.height * videoInput.pitch) >> 2);
531 videoInput.disp_order = mNumInputFrames;
532
533 encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
534 if (encoderStatus == AVCENC_SUCCESS ||
535 encoderStatus == AVCENC_NEW_IDR) {
536 mReadyForNextFrame = false;
537 ++mNumInputFrames;
538 if (encoderStatus == AVCENC_NEW_IDR) {
539 mIsIDRFrame = 1;
540 }
541 } else {
542 if (encoderStatus < AVCENC_SUCCESS) {
543 outputBuffer->release();
544 return UNKNOWN_ERROR;
545 } else {
546 outputBuffer->set_range(0, 0);
547 *out = outputBuffer;
548 return OK;
549 }
550 }
551 }
552
553 // Encode an input video frame
554 CHECK(encoderStatus == AVCENC_SUCCESS ||
555 encoderStatus == AVCENC_NEW_IDR);
556 dataLength = outputBuffer->size(); // Reset the output buffer length
557 encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
558 if (encoderStatus == AVCENC_SUCCESS) {
559 outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame, mIsIDRFrame);
560 CHECK_EQ(NULL, PVAVCEncGetOverrunBuffer(mHandle));
561 } else if (encoderStatus == AVCENC_PICTURE_READY) {
562 CHECK_EQ(NULL, PVAVCEncGetOverrunBuffer(mHandle));
563 if (mIsIDRFrame) {
564 outputBuffer->meta_data()->setInt32(kKeyIsSyncFrame, mIsIDRFrame);
565 mIsIDRFrame = 0;
Steve Block71f2cf12011-10-20 11:56:00 +0100566 ALOGV("Output an IDR frame");
James Dong1cc31e62010-07-02 17:44:44 -0700567 }
568 mReadyForNextFrame = true;
569 AVCFrameIO recon;
570 if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
571 PVAVCEncReleaseRecon(mHandle, &recon);
572 }
573 } else {
574 dataLength = 0;
575 mReadyForNextFrame = true;
576 }
577 if (encoderStatus < AVCENC_SUCCESS) {
578 outputBuffer->release();
579 return UNKNOWN_ERROR;
580 }
581
582 outputBuffer->set_range(0, dataLength);
583 *out = outputBuffer;
584 return OK;
585}
586
587int32_t AVCEncoder::allocOutputBuffers(
588 unsigned int sizeInMbs, unsigned int numBuffers) {
589 CHECK(mOutputBuffers.isEmpty());
590 size_t frameSize = (sizeInMbs << 7) * 3;
591 for (unsigned int i = 0; i < numBuffers; ++i) {
592 MediaBuffer *buffer = new MediaBuffer(frameSize);
593 buffer->setObserver(this);
594 mOutputBuffers.push(buffer);
595 }
596
597 return 1;
598}
599
600void AVCEncoder::unbindOutputBuffer(int32_t index) {
601 CHECK(index >= 0);
602}
603
604int32_t AVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
605 CHECK(index >= 0);
606 CHECK(index < (int32_t) mOutputBuffers.size());
607 int64_t timeUs;
608 CHECK(mInputBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
609 mOutputBuffers[index]->meta_data()->setInt64(kKeyTime, timeUs);
610
611 *yuv = (uint8_t *) mOutputBuffers[index]->data();
612
613 return 1;
614}
615
616void AVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
617}
618
619} // namespace android