blob: 49b47cc8cfea7552f5bc8c8eda5fd3be504e0a79 [file] [log] [blame]
Ajit Khare73cfaf42013-01-07 23:28:47 -08001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Shalaj Jain65506622013-01-29 18:27:08 -080017//#define LOG_NDEBUG 0
Ajit Khare73cfaf42013-01-07 23:28:47 -080018#define LOG_TAG "DashPlayerRenderer"
19#include <utils/Log.h>
20
21#include "DashPlayerRenderer.h"
22
23#include <media/stagefright/foundation/ABuffer.h>
24#include <media/stagefright/foundation/ADebug.h>
25#include <media/stagefright/foundation/AMessage.h>
Surajit Poddere6471132013-08-20 17:01:03 +053026#include <cutils/properties.h>
Ajit Khare73cfaf42013-01-07 23:28:47 -080027
28namespace android {
29
30// static
31const int64_t DashPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
32
33DashPlayer::Renderer::Renderer(
34 const sp<MediaPlayerBase::AudioSink> &sink,
35 const sp<AMessage> &notify)
36 : mAudioSink(sink),
37 mNotify(notify),
38 mNumFramesWritten(0),
39 mDrainAudioQueuePending(false),
40 mDrainVideoQueuePending(false),
41 mAudioQueueGeneration(0),
42 mVideoQueueGeneration(0),
43 mAnchorTimeMediaUs(-1),
44 mAnchorTimeRealUs(-1),
45 mFlushingAudio(false),
46 mFlushingVideo(false),
47 mHasAudio(false),
48 mHasVideo(false),
49 mSyncQueues(false),
50 mPaused(false),
51 mWasPaused(false),
52 mLastPositionUpdateUs(-1ll),
53 mVideoLateByUs(0ll),
Surajit Podderc72631d2013-08-20 14:26:43 +053054 mStats(NULL),
55 mSeekTimeUs(0){
Surajit Poddere6471132013-08-20 17:01:03 +053056
57 mAVSyncDelayWindowUs = 40000;
58
59 char avSyncDelayMsec[PROPERTY_VALUE_MAX] = {0};
60 property_get("persist.dash.avsync.window.msec", avSyncDelayMsec, NULL);
61
62 if(*avSyncDelayMsec) {
63 int64_t avSyncDelayWindowUs = atoi(avSyncDelayMsec) * 1000;
64
65 if(avSyncDelayWindowUs > 0) {
66 mAVSyncDelayWindowUs = avSyncDelayWindowUs;
67 }
68 }
69
70 ALOGV("AVsync window in Us %lld", mAVSyncDelayWindowUs);
Ajit Khare73cfaf42013-01-07 23:28:47 -080071}
72
73DashPlayer::Renderer::~Renderer() {
74 if(mStats != NULL) {
75 mStats->logStatistics();
76 mStats->logSyncLoss();
77 mStats = NULL;
78 }
79}
80
81void DashPlayer::Renderer::queueBuffer(
82 bool audio,
83 const sp<ABuffer> &buffer,
84 const sp<AMessage> &notifyConsumed) {
85 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
86 msg->setInt32("audio", static_cast<int32_t>(audio));
87 msg->setBuffer("buffer", buffer);
88 msg->setMessage("notifyConsumed", notifyConsumed);
89 msg->post();
90}
91
92void DashPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
93 CHECK_NE(finalResult, (status_t)OK);
94
95 if(mSyncQueues)
96 syncQueuesDone();
97
98 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
99 msg->setInt32("audio", static_cast<int32_t>(audio));
100 msg->setInt32("finalResult", finalResult);
101 msg->post();
102}
103
104void DashPlayer::Renderer::flush(bool audio) {
105 {
106 Mutex::Autolock autoLock(mFlushLock);
107 if (audio) {
108 CHECK(!mFlushingAudio);
109 mFlushingAudio = true;
110 } else {
111 CHECK(!mFlushingVideo);
112 mFlushingVideo = true;
113 }
114 }
115
116 sp<AMessage> msg = new AMessage(kWhatFlush, id());
117 msg->setInt32("audio", static_cast<int32_t>(audio));
118 msg->post();
119}
120
121void DashPlayer::Renderer::signalTimeDiscontinuity() {
122 CHECK(mAudioQueue.empty());
123 CHECK(mVideoQueue.empty());
124 mAnchorTimeMediaUs = -1;
125 mAnchorTimeRealUs = -1;
126 mWasPaused = false;
Surajit Podderc72631d2013-08-20 14:26:43 +0530127 mSeekTimeUs = 0;
Ajit Khare73cfaf42013-01-07 23:28:47 -0800128 mSyncQueues = mHasAudio && mHasVideo;
129 ALOGI("signalTimeDiscontinuity mHasAudio %d mHasVideo %d mSyncQueues %d",mHasAudio,mHasVideo,mSyncQueues);
130}
131
132void DashPlayer::Renderer::pause() {
133 (new AMessage(kWhatPause, id()))->post();
134}
135
136void DashPlayer::Renderer::resume() {
137 (new AMessage(kWhatResume, id()))->post();
138}
139
140void DashPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
141 switch (msg->what()) {
142 case kWhatDrainAudioQueue:
143 {
144 int32_t generation;
145 CHECK(msg->findInt32("generation", &generation));
146 if (generation != mAudioQueueGeneration) {
147 break;
148 }
149
150 mDrainAudioQueuePending = false;
151
152 if (onDrainAudioQueue()) {
153 uint32_t numFramesPlayed;
154 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
155 (status_t)OK);
156
157 uint32_t numFramesPendingPlayout =
158 mNumFramesWritten - numFramesPlayed;
159
160 // This is how long the audio sink will have data to
161 // play back.
162 int64_t delayUs =
163 mAudioSink->msecsPerFrame()
164 * numFramesPendingPlayout * 1000ll;
165
166 // Let's give it more data after about half that time
167 // has elapsed.
168 postDrainAudioQueue(delayUs / 2);
169 }
170 break;
171 }
172
173 case kWhatDrainVideoQueue:
174 {
175 int32_t generation;
176 CHECK(msg->findInt32("generation", &generation));
177 if (generation != mVideoQueueGeneration) {
178 break;
179 }
180
181 mDrainVideoQueuePending = false;
182
183 onDrainVideoQueue();
184
185 postDrainVideoQueue();
186 break;
187 }
188
189 case kWhatQueueBuffer:
190 {
191 onQueueBuffer(msg);
192 break;
193 }
194
195 case kWhatQueueEOS:
196 {
197 onQueueEOS(msg);
198 break;
199 }
200
201 case kWhatFlush:
202 {
203 onFlush(msg);
204 break;
205 }
206
207 case kWhatAudioSinkChanged:
208 {
209 onAudioSinkChanged();
210 break;
211 }
212
213 case kWhatPause:
214 {
215 onPause();
216 break;
217 }
218
219 case kWhatResume:
220 {
221 onResume();
222 break;
223 }
224
225 default:
226 TRESPASS();
227 break;
228 }
229}
230
231void DashPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
232 if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
233 return;
234 }
235
236 if (mAudioQueue.empty()) {
237 return;
238 }
239
240 mDrainAudioQueuePending = true;
241 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
242 msg->setInt32("generation", mAudioQueueGeneration);
243 msg->post(delayUs);
244}
245
246void DashPlayer::Renderer::signalAudioSinkChanged() {
247 (new AMessage(kWhatAudioSinkChanged, id()))->post();
248}
249
250bool DashPlayer::Renderer::onDrainAudioQueue() {
251 uint32_t numFramesPlayed;
252 if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
253 return false;
254 }
255
256 ssize_t numFramesAvailableToWrite =
257 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
258
259#if 0
260 if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
261 ALOGI("audio sink underrun");
262 } else {
263 ALOGV("audio queue has %d frames left to play",
264 mAudioSink->frameCount() - numFramesAvailableToWrite);
265 }
266#endif
267
268 size_t numBytesAvailableToWrite =
269 numFramesAvailableToWrite * mAudioSink->frameSize();
270
271 while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
272 QueueEntry *entry = &*mAudioQueue.begin();
273
274 if (entry->mBuffer == NULL) {
275 // EOS
276
277 notifyEOS(true /* audio */, entry->mFinalResult);
278
279 mAudioQueue.erase(mAudioQueue.begin());
280 entry = NULL;
281 return false;
282 }
283
284 if (entry->mOffset == 0) {
285 int64_t mediaTimeUs;
286 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
287
288 ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
289
290 mAnchorTimeMediaUs = mediaTimeUs;
291
292 uint32_t numFramesPlayed;
293 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
294
295 uint32_t numFramesPendingPlayout =
296 mNumFramesWritten - numFramesPlayed;
297
298 int64_t realTimeOffsetUs =
299 (mAudioSink->latency() / 2 /* XXX */
300 + numFramesPendingPlayout
301 * mAudioSink->msecsPerFrame()) * 1000ll;
302
303 // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
304
305 mAnchorTimeRealUs =
306 ALooper::GetNowUs() + realTimeOffsetUs;
307 }
308
309 size_t copy = entry->mBuffer->size() - entry->mOffset;
310 if (copy > numBytesAvailableToWrite) {
311 copy = numBytesAvailableToWrite;
312 }
313
314 CHECK_EQ(mAudioSink->write(
315 entry->mBuffer->data() + entry->mOffset, copy),
316 (ssize_t)copy);
317
318 entry->mOffset += copy;
319 if (entry->mOffset == entry->mBuffer->size()) {
320 entry->mNotifyConsumed->post();
321 mAudioQueue.erase(mAudioQueue.begin());
322
323 entry = NULL;
324 }
325
326 numBytesAvailableToWrite -= copy;
327 size_t copiedFrames = copy / mAudioSink->frameSize();
328 mNumFramesWritten += copiedFrames;
329 }
330
331 notifyPosition();
332
333 return !mAudioQueue.empty();
334}
335
336void DashPlayer::Renderer::postDrainVideoQueue() {
337 if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
338 return;
339 }
340
341 if (mVideoQueue.empty()) {
342 return;
343 }
344
345 QueueEntry &entry = *mVideoQueue.begin();
346
347 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
348 msg->setInt32("generation", mVideoQueueGeneration);
349
350 int64_t delayUs;
351
352 if (entry.mBuffer == NULL) {
353 // EOS doesn't carry a timestamp.
354 delayUs = 0;
355 } else {
356 int64_t mediaTimeUs;
357 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
358
359 if (mAnchorTimeMediaUs < 0) {
360 delayUs = 0;
361
362 if (!mHasAudio) {
363 mAnchorTimeMediaUs = mediaTimeUs;
364 mAnchorTimeRealUs = ALooper::GetNowUs();
365 }
366 } else {
367 if ( (!mHasAudio && mHasVideo) && (mWasPaused == true))
368 {
369 mAnchorTimeMediaUs = mediaTimeUs;
370 mAnchorTimeRealUs = ALooper::GetNowUs();
371 mWasPaused = false;
372 }
373
374 int64_t realTimeUs =
375 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
376
377 delayUs = realTimeUs - ALooper::GetNowUs();
378 }
379 }
380
381 msg->post(delayUs);
382
383 mDrainVideoQueuePending = true;
384}
385
386void DashPlayer::Renderer::onDrainVideoQueue() {
387 if (mVideoQueue.empty()) {
388 return;
389 }
390
391 QueueEntry *entry = &*mVideoQueue.begin();
392
393 if (entry->mBuffer == NULL) {
394 // EOS
395
Surajit Podder1b763112013-05-22 14:16:51 +0530396 notifyPosition(true);
397
Ajit Khare73cfaf42013-01-07 23:28:47 -0800398 notifyEOS(false /* audio */, entry->mFinalResult);
399
400 mVideoQueue.erase(mVideoQueue.begin());
401 entry = NULL;
402
403 mVideoLateByUs = 0ll;
404
Ajit Khare73cfaf42013-01-07 23:28:47 -0800405 return;
406 }
407
408 int64_t mediaTimeUs;
409 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
410
411 int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
412 int64_t nowUs = ALooper::GetNowUs();
413 mVideoLateByUs = nowUs - realTimeUs;
414
Surajit Poddere6471132013-08-20 17:01:03 +0530415 bool tooLate = (mVideoLateByUs > mAVSyncDelayWindowUs);
Ajit Khare73cfaf42013-01-07 23:28:47 -0800416
417 if (tooLate) {
418 ALOGV("video late by %lld us (%.2f secs)",
419 mVideoLateByUs, mVideoLateByUs / 1E6);
420 if(mStats != NULL) {
421 mStats->recordLate(realTimeUs,nowUs,mVideoLateByUs,mAnchorTimeRealUs);
422 }
423 } else {
424 ALOGV("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
425 if(mStats != NULL) {
426 mStats->recordOnTime(realTimeUs,nowUs,mVideoLateByUs);
427 mStats->incrementTotalRenderingFrames();
428 mStats->logFps();
429 }
430 }
431
432 entry->mNotifyConsumed->setInt32("render", !tooLate);
433 entry->mNotifyConsumed->post();
434 mVideoQueue.erase(mVideoQueue.begin());
435 entry = NULL;
436
437 notifyPosition();
438}
439
440void DashPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
441 sp<AMessage> notify = mNotify->dup();
442 notify->setInt32("what", kWhatEOS);
443 notify->setInt32("audio", static_cast<int32_t>(audio));
444 notify->setInt32("finalResult", finalResult);
445 notify->post();
446}
447
448void DashPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
449 int32_t audio;
450 CHECK(msg->findInt32("audio", &audio));
451
452 if (audio) {
453 mHasAudio = true;
454 } else {
455 mHasVideo = true;
456 }
457
458 if (dropBufferWhileFlushing(audio, msg)) {
459 return;
460 }
461
462 sp<ABuffer> buffer;
463 CHECK(msg->findBuffer("buffer", &buffer));
464
465 sp<AMessage> notifyConsumed;
466 CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
467
468 QueueEntry entry;
469 entry.mBuffer = buffer;
470 entry.mNotifyConsumed = notifyConsumed;
471 entry.mOffset = 0;
472 entry.mFinalResult = OK;
473
474 if (audio) {
475 mAudioQueue.push_back(entry);
476 postDrainAudioQueue();
477 } else {
478 mVideoQueue.push_back(entry);
479 postDrainVideoQueue();
480 }
481
482 if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
483 return;
484 }
485
486 sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
487 sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
488
489 if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
490 // EOS signalled on either queue.
491 syncQueuesDone();
492 return;
493 }
494
495 int64_t firstAudioTimeUs;
496 int64_t firstVideoTimeUs;
497 CHECK(firstAudioBuffer->meta()
498 ->findInt64("timeUs", &firstAudioTimeUs));
499 CHECK(firstVideoBuffer->meta()
500 ->findInt64("timeUs", &firstVideoTimeUs));
501
502 int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
503
504 ALOGV("queueDiff = %.2f secs", diff / 1E6);
505
506 if (diff > 100000ll) {
507 // Audio data starts More than 0.1 secs before video.
508 // Drop some audio.
509
510 (*mAudioQueue.begin()).mNotifyConsumed->post();
511 mAudioQueue.erase(mAudioQueue.begin());
512 return;
513 }
514
515 syncQueuesDone();
516}
517
518void DashPlayer::Renderer::syncQueuesDone() {
519 if (!mSyncQueues) {
520 return;
521 }
522
523 mSyncQueues = false;
524
525 if (!mAudioQueue.empty()) {
526 postDrainAudioQueue();
527 }
528
529 if (!mVideoQueue.empty()) {
530 postDrainVideoQueue();
531 }
532}
533
534void DashPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
535 int32_t audio;
536 CHECK(msg->findInt32("audio", &audio));
537
538 if (dropBufferWhileFlushing(audio, msg)) {
539 return;
540 }
541
542 int32_t finalResult;
543 CHECK(msg->findInt32("finalResult", &finalResult));
544
545 QueueEntry entry;
546 entry.mOffset = 0;
547 entry.mFinalResult = finalResult;
548
549 if (audio) {
550 mAudioQueue.push_back(entry);
551 postDrainAudioQueue();
552 } else {
553 mVideoQueue.push_back(entry);
554 postDrainVideoQueue();
555 }
556}
557
558void DashPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
559 int32_t audio;
560 CHECK(msg->findInt32("audio", &audio));
561
562 // If we're currently syncing the queues, i.e. dropping audio while
563 // aligning the first audio/video buffer times and only one of the
564 // two queues has data, we may starve that queue by not requesting
565 // more buffers from the decoder. If the other source then encounters
566 // a discontinuity that leads to flushing, we'll never find the
567 // corresponding discontinuity on the other queue.
568 // Therefore we'll stop syncing the queues if at least one of them
569 // is flushed.
570 syncQueuesDone();
571
572 if (audio) {
573 flushQueue(&mAudioQueue);
574
575 Mutex::Autolock autoLock(mFlushLock);
576 mFlushingAudio = false;
577
578 mDrainAudioQueuePending = false;
579 ++mAudioQueueGeneration;
580 } else {
581 flushQueue(&mVideoQueue);
582
583 Mutex::Autolock autoLock(mFlushLock);
584 mFlushingVideo = false;
585
586 mDrainVideoQueuePending = false;
587 ++mVideoQueueGeneration;
588 if(mStats != NULL) {
589 mStats->setVeryFirstFrame(true);
590 }
591 }
592
593 notifyFlushComplete(audio);
594}
595
596void DashPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
597 while (!queue->empty()) {
598 QueueEntry *entry = &*queue->begin();
599
600 if (entry->mBuffer != NULL) {
601 entry->mNotifyConsumed->post();
602 }
603
604 queue->erase(queue->begin());
605 entry = NULL;
606 }
607}
608
609void DashPlayer::Renderer::notifyFlushComplete(bool audio) {
610 sp<AMessage> notify = mNotify->dup();
611 notify->setInt32("what", kWhatFlushComplete);
612 notify->setInt32("audio", static_cast<int32_t>(audio));
613 notify->post();
614}
615
616bool DashPlayer::Renderer::dropBufferWhileFlushing(
617 bool audio, const sp<AMessage> &msg) {
618 bool flushing = false;
619
620 {
621 Mutex::Autolock autoLock(mFlushLock);
622 if (audio) {
623 flushing = mFlushingAudio;
624 } else {
625 flushing = mFlushingVideo;
626 }
627 }
628
629 if (!flushing) {
630 return false;
631 }
632
633 sp<AMessage> notifyConsumed;
634 if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
635 notifyConsumed->post();
636 }
637
638 return true;
639}
640
641void DashPlayer::Renderer::onAudioSinkChanged() {
642 CHECK(!mDrainAudioQueuePending);
643 mNumFramesWritten = 0;
644 uint32_t written;
645 if (mAudioSink->getFramesWritten(&written) == OK) {
646 mNumFramesWritten = written;
647 }
648}
649
Surajit Podder1b763112013-05-22 14:16:51 +0530650void DashPlayer::Renderer::notifyPosition(bool isEOS) {
Ajit Khare73cfaf42013-01-07 23:28:47 -0800651 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
652 return;
653 }
654
655 int64_t nowUs = ALooper::GetNowUs();
656
Surajit Podder1b763112013-05-22 14:16:51 +0530657 if ((!isEOS) && (mLastPositionUpdateUs >= 0
658 && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs)) {
Ajit Khare73cfaf42013-01-07 23:28:47 -0800659 return;
660 }
661 mLastPositionUpdateUs = nowUs;
662
Surajit Podderc72631d2013-08-20 14:26:43 +0530663 int64_t positionUs = (mSeekTimeUs != 0) ? mSeekTimeUs : ((nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs);
Ajit Khare73cfaf42013-01-07 23:28:47 -0800664
665 sp<AMessage> notify = mNotify->dup();
666 notify->setInt32("what", kWhatPosition);
667 notify->setInt64("positionUs", positionUs);
668 notify->setInt64("videoLateByUs", mVideoLateByUs);
669 notify->post();
670}
671
Surajit Podderc72631d2013-08-20 14:26:43 +0530672void DashPlayer::Renderer::notifySeekPosition(int64_t seekTime){
673 mSeekTimeUs = seekTime;
674 int64_t nowUs = ALooper::GetNowUs();
675 mLastPositionUpdateUs = nowUs;
676 sp<AMessage> notify = mNotify->dup();
677 notify->setInt32("what", kWhatPosition);
678 notify->setInt64("positionUs", seekTime);
679 notify->setInt64("videoLateByUs", mVideoLateByUs);
680 notify->post();
681
682}
683
684
Ajit Khare73cfaf42013-01-07 23:28:47 -0800685void DashPlayer::Renderer::onPause() {
686 CHECK(!mPaused);
687
688 mDrainAudioQueuePending = false;
689 ++mAudioQueueGeneration;
690
691 mDrainVideoQueuePending = false;
692 ++mVideoQueueGeneration;
693
694 if (mHasAudio) {
695 mAudioSink->pause();
696 }
697
698 ALOGV("now paused audio queue has %d entries, video has %d entries",
699 mAudioQueue.size(), mVideoQueue.size());
700
701 mPaused = true;
702 mWasPaused = true;
703
704 if(mStats != NULL) {
705 int64_t positionUs;
706 if(mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
707 positionUs = -1000;
708 } else {
709 int64_t nowUs = ALooper::GetNowUs();
710 positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
711 }
712
713 mStats->logPause(positionUs);
714 }
715}
716
717void DashPlayer::Renderer::onResume() {
718 if (!mPaused) {
719 return;
720 }
721
722 if (mHasAudio) {
723 mAudioSink->start();
724 }
725
726 mPaused = false;
727
728 if (!mAudioQueue.empty()) {
729 postDrainAudioQueue();
730 }
731
732 if (!mVideoQueue.empty()) {
733 postDrainVideoQueue();
734 }
735}
736
737void DashPlayer::Renderer::registerStats(sp<DashPlayerStats> stats) {
738 if(mStats != NULL) {
739 mStats = NULL;
740 }
741 mStats = stats;
742}
743
744status_t DashPlayer::Renderer::setMediaPresence(bool audio, bool bValue)
745{
746 if (audio)
747 {
748 ALOGV("mHasAudio set to %d from %d",bValue,mHasAudio);
749 mHasAudio = bValue;
750 }
751 else
752 {
753 ALOGV("mHasVideo set to %d from %d",bValue,mHasVideo);
754 mHasVideo = bValue;
755 }
756 return OK;
757}
758
759} // namespace android
760