git-svn-id: http://webrtc.googlecode.com/svn/trunk@4 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/modules/utility/source/coder.cc b/modules/utility/source/coder.cc
new file mode 100644
index 0000000..b858da1
--- /dev/null
+++ b/modules/utility/source/coder.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "coder.h"
+#include "common_types.h"
+#include "module_common_types.h"
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+ #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+ #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+AudioCoder::AudioCoder(WebRtc_UWord32 instanceID)
+ : _instanceID(instanceID),
+ _acm(AudioCodingModule::Create(instanceID)),
+ _receiveCodec(),
+ _encodeTimestamp(0),
+ _encodedData(NULL),
+ _encodedLengthInBytes(0),
+ _decodeTimestamp(0)
+{
+ _acm->InitializeSender();
+ _acm->InitializeReceiver();
+ _acm->RegisterTransportCallback(this);
+}
+
+AudioCoder::~AudioCoder()
+{
+ AudioCodingModule::Destroy(_acm);
+}
+
+WebRtc_Word32 AudioCoder::SetEncodeCodec(const CodecInst& codecInst,
+ ACMAMRPackingFormat amrFormat)
+{
+ if(_acm->RegisterSendCodec((CodecInst&)codecInst) == -1)
+ {
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word32 AudioCoder::SetDecodeCodec(const CodecInst& codecInst,
+ ACMAMRPackingFormat amrFormat)
+{
+ if(_acm->RegisterReceiveCodec((CodecInst&)codecInst) == -1)
+ {
+ return -1;
+ }
+ memcpy(&_receiveCodec,&codecInst,sizeof(CodecInst));
+ return 0;
+}
+
+WebRtc_Word32 AudioCoder::Decode(AudioFrame& decodedAudio,
+ WebRtc_UWord32 sampFreqHz,
+ const WebRtc_Word8* incomingPayload,
+ WebRtc_Word32 payloadLength)
+{
+ if (payloadLength > 0)
+ {
+ const WebRtc_UWord8 payloadType = _receiveCodec.pltype;
+ _decodeTimestamp += _receiveCodec.pacsize;
+ if(_acm->IncomingPayload(incomingPayload,
+ payloadLength,
+ payloadType,
+ _decodeTimestamp) == -1)
+ {
+ return -1;
+ }
+ }
+ return _acm->PlayoutData10Ms((WebRtc_UWord16)sampFreqHz,
+ (AudioFrame&)decodedAudio);
+}
+
+WebRtc_Word32 AudioCoder::PlayoutData(AudioFrame& decodedAudio,
+ WebRtc_UWord16& sampFreqHz)
+{
+ return _acm->PlayoutData10Ms(sampFreqHz, (AudioFrame&)decodedAudio);
+}
+
+WebRtc_Word32 AudioCoder::Encode(const AudioFrame& audio,
+ WebRtc_Word8* encodedData,
+ WebRtc_UWord32& encodedLengthInBytes)
+{
+ // Fake a timestamp in case audio doesn't contain a correct timestamp.
+ // Make a local copy of the audio frame since audio is const
+ AudioFrame audioFrame = audio;
+ audioFrame._timeStamp = _encodeTimestamp;
+ _encodeTimestamp += audioFrame._payloadDataLengthInSamples;
+
+ // For any codec with a frame size that is longer than 10 ms the encoded
+ // length in bytes should be zero until a a full frame has been encoded.
+ _encodedLengthInBytes = 0;
+ if(_acm->Add10MsData((AudioFrame&)audioFrame) == -1)
+ {
+ return -1;
+ }
+ _encodedData = encodedData;
+ if(_acm->Process() == -1)
+ {
+ return -1;
+ }
+ encodedLengthInBytes = _encodedLengthInBytes;
+ return 0;
+}
+
+WebRtc_Word32 AudioCoder::SendData(
+ FrameType /* frameType */,
+ WebRtc_UWord8 /* payloadType */,
+ WebRtc_UWord32 /* timeStamp */,
+ const WebRtc_UWord8* payloadData,
+ WebRtc_UWord16 payloadSize,
+ const RTPFragmentationHeader* /* fragmentation*/)
+{
+ memcpy(_encodedData,payloadData,sizeof(WebRtc_UWord8) * payloadSize);
+ _encodedLengthInBytes = payloadSize;
+ return 0;
+}
+} // namespace webrtc
diff --git a/modules/utility/source/coder.h b/modules/utility/source/coder.h
new file mode 100644
index 0000000..e96f455
--- /dev/null
+++ b/modules/utility/source/coder.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class AudioFrame;
+
+class AudioCoder : public AudioPacketizationCallback
+{
+public:
+ AudioCoder(WebRtc_UWord32 instanceID);
+ ~AudioCoder();
+
+ WebRtc_Word32 SetEncodeCodec(
+ const CodecInst& codecInst,
+ ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
+
+ WebRtc_Word32 SetDecodeCodec(
+ const CodecInst& codecInst,
+ ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
+
+ WebRtc_Word32 Decode(AudioFrame& decodedAudio, WebRtc_UWord32 sampFreqHz,
+ const WebRtc_Word8* incomingPayload,
+ WebRtc_Word32 payloadLength);
+
+ WebRtc_Word32 PlayoutData(AudioFrame& decodedAudio,
+ WebRtc_UWord16& sampFreqHz);
+
+ WebRtc_Word32 Encode(const AudioFrame& audio,
+ WebRtc_Word8* encodedData,
+ WebRtc_UWord32& encodedLengthInBytes);
+
+protected:
+ virtual WebRtc_Word32 SendData(FrameType frameType,
+ WebRtc_UWord8 payloadType,
+ WebRtc_UWord32 timeStamp,
+ const WebRtc_UWord8* payloadData,
+ WebRtc_UWord16 payloadSize,
+ const RTPFragmentationHeader* fragmentation);
+
+private:
+ WebRtc_UWord32 _instanceID;
+
+ AudioCodingModule* _acm;
+
+ CodecInst _receiveCodec;
+
+ WebRtc_UWord32 _encodeTimestamp;
+ WebRtc_Word8* _encodedData;
+ WebRtc_UWord32 _encodedLengthInBytes;
+
+ WebRtc_UWord32 _decodeTimestamp;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
diff --git a/modules/utility/source/file_player_impl.cc b/modules/utility/source/file_player_impl.cc
new file mode 100644
index 0000000..9e3f02f
--- /dev/null
+++ b/modules/utility/source/file_player_impl.cc
@@ -0,0 +1,725 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "file_player_impl.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+ #include "cpu_wrapper.h"
+ #include "frame_scaler.h"
+ #include "tick_util.h"
+ #include "video_coder.h"
+#endif
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+ #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+ #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+FilePlayer* FilePlayer::CreateFilePlayer(WebRtc_UWord32 instanceID,
+ FileFormats fileFormat)
+{
+ switch(fileFormat)
+ {
+ case kFileFormatWavFile:
+ case kFileFormatCompressedFile:
+ case kFileFormatPreencodedFile:
+ case kFileFormatPcm16kHzFile:
+ case kFileFormatPcm8kHzFile:
+ case kFileFormatPcm32kHzFile:
+ // audio formats
+ return new FilePlayerImpl(instanceID, fileFormat);
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+ case kFileFormatAviFile:
+ return new VideoFilePlayerImpl(instanceID, fileFormat);
+#endif
+ default:
+ return NULL;
+ }
+}
+
+void FilePlayer::DestroyFilePlayer(FilePlayer* player)
+{
+ delete player;
+}
+
+FilePlayerImpl::FilePlayerImpl(const WebRtc_UWord32 instanceID,
+ const FileFormats fileFormat)
+ : _instanceID(instanceID),
+ _fileFormat(fileFormat),
+ _fileModule(*MediaFile::CreateMediaFile(instanceID)),
+ _decodedLengthInMS(0),
+ _audioDecoder(instanceID),
+ _codec(),
+ _numberOf10MsPerFrame(0),
+ _numberOf10MsInDecoder(0),
+ _scaling(1.0)
+{
+ _codec.plfreq = 0;
+}
+
+FilePlayerImpl::~FilePlayerImpl()
+{
+ MediaFile::DestroyMediaFile(&_fileModule);
+}
+
+WebRtc_Word32 FilePlayerImpl::Frequency() const
+{
+ if(_codec.plfreq == 0)
+ {
+ return -1;
+ }
+ // Make sure that sample rate is 8,16 or 32 kHz. E.g. WAVE files may have
+ // other sampling rates.
+ if(_codec.plfreq == 11000)
+ {
+ return 16000;
+ }
+ else if(_codec.plfreq == 22000)
+ {
+ return 32000;
+ }
+ else if(_codec.plfreq == 44000)
+ {
+ return 32000;
+ }
+ else if(_codec.plfreq == 48000)
+ {
+ return 32000;
+ }
+ else
+ {
+ return _codec.plfreq;
+ }
+}
+
+WebRtc_Word32 FilePlayerImpl::AudioCodec(CodecInst& audioCodec) const
+{
+ audioCodec = _codec;
+ return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::Get10msAudioFromFile(
+ WebRtc_Word16* outBuffer,
+ WebRtc_UWord32& lengthInSamples,
+ WebRtc_UWord32 frequencyInHz)
+{
+ if(_codec.plfreq == 0)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+ "FilePlayerImpl::Get10msAudioFromFile() playing not started!\
+ codecFreq = %d, wantedFreq = %d",
+ _codec.plfreq, frequencyInHz);
+ return -1;
+ }
+
+ AudioFrame unresampledAudioFrame;
+ if(STR_CASE_CMP(_codec.plname, "L16") == 0)
+ {
+ unresampledAudioFrame._frequencyInHz = _codec.plfreq;
+
+ // L16 is un-encoded data. Just pull 10 ms.
+ WebRtc_UWord32 lengthInBytes =
+ sizeof(unresampledAudioFrame._payloadData);
+ if (_fileModule.PlayoutAudioData(
+ (WebRtc_Word8*)unresampledAudioFrame._payloadData,
+ lengthInBytes) == -1)
+ {
+ // End of file reached.
+ return -1;
+ }
+ if(lengthInBytes == 0)
+ {
+ lengthInSamples = 0;
+ return 0;
+ }
+ // One sample is two bytes.
+ unresampledAudioFrame._payloadDataLengthInSamples =
+ (WebRtc_UWord16)lengthInBytes >> 1;
+
+ }else {
+ // Decode will generate 10 ms of audio data. PlayoutAudioData(..)
+ // expects a full frame. If the frame size is larger than 10 ms,
+ // PlayoutAudioData(..) data should be called proportionally less often.
+ WebRtc_Word16 encodedBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
+ WebRtc_UWord32 encodedLengthInBytes = 0;
+ if(++_numberOf10MsInDecoder >= _numberOf10MsPerFrame)
+ {
+ _numberOf10MsInDecoder = 0;
+ WebRtc_UWord32 bytesFromFile = sizeof(encodedBuffer);
+ if (_fileModule.PlayoutAudioData((WebRtc_Word8*)encodedBuffer,
+ bytesFromFile) == -1)
+ {
+ // End of file reached.
+ return -1;
+ }
+ encodedLengthInBytes = bytesFromFile;
+ }
+ if(_audioDecoder.Decode(unresampledAudioFrame,frequencyInHz,
+ (WebRtc_Word8*)encodedBuffer,
+ encodedLengthInBytes) == -1)
+ {
+ return -1;
+ }
+ }
+
+ int outLen = 0;
+ if(_resampler.ResetIfNeeded(unresampledAudioFrame._frequencyInHz,
+ frequencyInHz, kResamplerSynchronous))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+ "FilePlayerImpl::Get10msAudioFromFile() unexpected codec");
+
+ // New sampling frequency. Update state.
+ outLen = frequencyInHz / 100;
+ memset(outBuffer, 0, outLen * sizeof(WebRtc_Word16));
+ return 0;
+ }
+ _resampler.Push(unresampledAudioFrame._payloadData,
+ unresampledAudioFrame._payloadDataLengthInSamples,
+ outBuffer,
+ MAX_AUDIO_BUFFER_IN_SAMPLES,
+ outLen);
+
+ lengthInSamples = outLen;
+
+ if(_scaling != 1.0)
+ {
+ for (int i = 0;i < outLen; i++)
+ {
+ outBuffer[i] = (WebRtc_Word16)(outBuffer[i] * _scaling);
+ }
+ }
+ _decodedLengthInMS += 10;
+ return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::RegisterModuleFileCallback(FileCallback* callback)
+{
+ return _fileModule.SetModuleFileCallback(callback);
+}
+
+WebRtc_Word32 FilePlayerImpl::SetAudioScaling(float scaleFactor)
+{
+ if((scaleFactor >= 0)&&(scaleFactor <= 2.0))
+ {
+ _scaling = scaleFactor;
+ return 0;
+ }
+ WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+ "FilePlayerImpl::SetAudioScaling() not allowed scale factor");
+ return -1;
+}
+
+WebRtc_Word32 FilePlayerImpl::StartPlayingFile(const WebRtc_Word8* fileName,
+ bool loop,
+ WebRtc_UWord32 startPosition,
+ float volumeScaling,
+ WebRtc_UWord32 notification,
+ WebRtc_UWord32 stopPosition,
+ const CodecInst* codecInst)
+{
+ if (_fileFormat == kFileFormatPcm16kHzFile ||
+ _fileFormat == kFileFormatPcm8kHzFile||
+ _fileFormat == kFileFormatPcm32kHzFile )
+ {
+ CodecInst codecInstL16;
+ strncpy(codecInstL16.plname,"L16",32);
+ codecInstL16.pltype = 93;
+ codecInstL16.channels = 1;
+
+ if (_fileFormat == kFileFormatPcm8kHzFile)
+ {
+ codecInstL16.rate = 128000;
+ codecInstL16.plfreq = 8000;
+ codecInstL16.pacsize = 80;
+
+ } else if(_fileFormat == kFileFormatPcm16kHzFile)
+ {
+ codecInstL16.rate = 256000;
+ codecInstL16.plfreq = 16000;
+ codecInstL16.pacsize = 160;
+
+ }else if(_fileFormat == kFileFormatPcm32kHzFile)
+ {
+ codecInstL16.rate = 512000;
+ codecInstL16.plfreq = 32000;
+ codecInstL16.pacsize = 160;
+ } else
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
+ "FilePlayerImpl::StartPlayingFile() sample frequency\
+ specifed not supported for PCM format.");
+ return -1;
+ }
+
+ if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+ _fileFormat, &codecInstL16,
+ startPosition,
+ stopPosition) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() failed to initialize file\
+ %s playout.", fileName);
+ return -1;
+ }
+ SetAudioScaling(volumeScaling);
+ }else if(_fileFormat == kFileFormatPreencodedFile)
+ {
+ if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+ _fileFormat, codecInst) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingPreEncodedFile() failed to\
+ initialize pre-encoded file %s playout.",
+ fileName);
+ return -1;
+ }
+ } else
+ {
+ CodecInst* no_inst = NULL;
+ if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+ _fileFormat, no_inst,
+ startPosition,
+ stopPosition) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() failed to initialize file\
+ %s playout.", fileName);
+ return -1;
+ }
+ SetAudioScaling(volumeScaling);
+ }
+ if (SetUpAudioDecoder() == -1)
+ {
+ StopPlayingFile();
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
+ WebRtc_UWord32 startPosition,
+ float volumeScaling,
+ WebRtc_UWord32 notification,
+ WebRtc_UWord32 stopPosition,
+ const CodecInst* codecInst)
+{
+ if (_fileFormat == kFileFormatPcm16kHzFile ||
+ _fileFormat == kFileFormatPcm32kHzFile ||
+ _fileFormat == kFileFormatPcm8kHzFile)
+ {
+ CodecInst codecInstL16;
+ strncpy(codecInstL16.plname,"L16",32);
+ codecInstL16.pltype = 93;
+ codecInstL16.channels = 1;
+
+ if (_fileFormat == kFileFormatPcm8kHzFile)
+ {
+ codecInstL16.rate = 128000;
+ codecInstL16.plfreq = 8000;
+ codecInstL16.pacsize = 80;
+
+ }else if (_fileFormat == kFileFormatPcm16kHzFile)
+ {
+ codecInstL16.rate = 256000;
+ codecInstL16.plfreq = 16000;
+ codecInstL16.pacsize = 160;
+
+ }else if (_fileFormat == kFileFormatPcm32kHzFile)
+ {
+ codecInstL16.rate = 512000;
+ codecInstL16.plfreq = 32000;
+ codecInstL16.pacsize = 160;
+ }else
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() sample frequency specifed\
+ not supported for PCM format.");
+ return -1;
+ }
+ if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+ _fileFormat, &codecInstL16,
+ startPosition,
+ stopPosition) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
+ playout.");
+ return -1;
+ }
+
+ }else if(_fileFormat == kFileFormatPreencodedFile)
+ {
+ if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+ _fileFormat, codecInst) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
+ playout.");
+ return -1;
+ }
+ } else {
+ CodecInst* no_inst = NULL;
+ if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+ _fileFormat, no_inst,
+ startPosition,
+ stopPosition) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
+ "FilePlayerImpl::StartPlayingFile() failed to initialize\
+ stream playout.");
+ return -1;
+ }
+ }
+ SetAudioScaling(volumeScaling);
+
+ if (SetUpAudioDecoder() == -1)
+ {
+ StopPlayingFile();
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::StopPlayingFile()
+{
+ memset(&_codec, 0, sizeof(CodecInst));
+ _numberOf10MsPerFrame = 0;
+ _numberOf10MsInDecoder = 0;
+ return _fileModule.StopPlaying();
+}
+
+bool FilePlayerImpl::IsPlayingFile() const
+{
+ return _fileModule.IsPlaying();
+}
+
+WebRtc_Word32 FilePlayerImpl::GetPlayoutPosition(WebRtc_UWord32& durationMs)
+{
+ return _fileModule.PlayoutPositionMs(durationMs);
+}
+
+WebRtc_Word32 FilePlayerImpl::SetUpAudioDecoder()
+{
+ if ((_fileModule.codec_info(_codec) == -1))
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() failed to retrieve Codec info\
+ of file data.");
+ return -1;
+ }
+ if( STR_CASE_CMP(_codec.plname, "L16") != 0 &&
+ _audioDecoder.SetDecodeCodec(_codec,AMRFileStorage) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FilePlayerImpl::StartPlayingFile() codec %s not supported",
+ _codec.plname);
+ return -1;
+ }
+ _numberOf10MsPerFrame = _codec.pacsize / (_codec.plfreq / 100);
+ _numberOf10MsInDecoder = 0;
+ return 0;
+}
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+VideoFilePlayerImpl::VideoFilePlayerImpl(WebRtc_UWord32 instanceID,
+ FileFormats fileFormat)
+ : FilePlayerImpl(instanceID,fileFormat),
+ _videoDecoder(*new VideoCoder(instanceID)),
+ _decodedVideoFrames(0),
+ _encodedData(*new EncodedVideoData()),
+ _frameScaler(*new FrameScaler()),
+ _critSec(*CriticalSectionWrapper::CreateCriticalSection()),
+ _accumulatedRenderTimeMs(0),
+ _numberOfFramesRead(0),
+ _videoOnly(false)
+{
+ memset(&video_codec_info_, 0, sizeof(video_codec_info_));
+}
+
+VideoFilePlayerImpl::~VideoFilePlayerImpl()
+{
+ delete &_critSec;
+ delete &_frameScaler;
+ delete &_videoDecoder;
+ delete &_encodedData;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::StartPlayingVideoFile(
+ const WebRtc_Word8* fileName,
+ bool loop,
+ bool videoOnly)
+{
+ CriticalSectionScoped lock( _critSec);
+
+ if(_fileModule.StartPlayingVideoFile(fileName, loop, videoOnly,
+ _fileFormat) != 0)
+ {
+ return -1;
+ }
+
+ _decodedVideoFrames = 0;
+ _accumulatedRenderTimeMs = 0;
+ _frameLengthMS = 0;
+ _numberOfFramesRead = 0;
+ _videoOnly = videoOnly;
+
+ // Set up video_codec_info_ according to file,
+ if(SetUpVideoDecoder() != 0)
+ {
+ StopPlayingFile();
+ return -1;
+ }
+ if(!videoOnly)
+ {
+ // Set up _codec according to file,
+ if(SetUpAudioDecoder() != 0)
+ {
+ StopPlayingFile();
+ return -1;
+ }
+ }
+ return 0;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::StopPlayingFile()
+{
+ CriticalSectionScoped lock( _critSec);
+
+ _decodedVideoFrames = 0;
+ _videoDecoder.Reset();
+
+ return FilePlayerImpl::StopPlayingFile();
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame,
+ WebRtc_UWord32 outWidth,
+ WebRtc_UWord32 outHeight)
+{
+ CriticalSectionScoped lock( _critSec);
+
+ WebRtc_Word32 retVal = GetVideoFromFile(videoFrame);
+ if(retVal != 0)
+ {
+ return retVal;
+ }
+ if( videoFrame.Length() > 0)
+ {
+ retVal = _frameScaler.ResizeFrameIfNeeded(videoFrame, outWidth,
+ outHeight);
+ }
+ return retVal;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame)
+{
+ CriticalSectionScoped lock( _critSec);
+ // No new video data read from file.
+ if(_encodedData.payloadSize == 0)
+ {
+ videoFrame.SetLength(0);
+ return -1;
+ }
+ WebRtc_Word32 retVal = 0;
+ if(strncmp(video_codec_info_.plName, "I420", 5) == 0)
+ {
+ videoFrame.CopyFrame(_encodedData.payloadSize,_encodedData.payloadData);
+ videoFrame.SetLength(_encodedData.payloadSize);
+ videoFrame.SetWidth(video_codec_info_.width);
+ videoFrame.SetHeight(video_codec_info_.height);
+ }else
+ {
+ // Set the timestamp manually since there is no timestamp in the file.
+ // Update timestam according to 90 kHz stream.
+ _encodedData.timeStamp += (90000 / video_codec_info_.maxFramerate);
+ retVal = _videoDecoder.Decode(videoFrame, _encodedData);
+ }
+
+ WebRtc_Word64 renderTimeMs = TickTime::MillisecondTimestamp();
+ videoFrame.SetRenderTime(renderTimeMs);
+
+ // Indicate that the current frame in the encoded buffer is old/has
+ // already been read.
+ _encodedData.payloadSize = 0;
+ if( retVal == 0)
+ {
+ _decodedVideoFrames++;
+ }
+ return retVal;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::video_codec_info(
+ VideoCodec& videoCodec) const
+{
+ if(video_codec_info_.plName[0] == 0)
+ {
+ return -1;
+ }
+ memcpy(&videoCodec, &video_codec_info_, sizeof(VideoCodec));
+ return 0;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::TimeUntilNextVideoFrame()
+{
+ if(_fileFormat != kFileFormatAviFile)
+ {
+ return -1;
+ }
+ if(!_fileModule.IsPlaying())
+ {
+ return -1;
+ }
+ if(_encodedData.payloadSize <= 0)
+ {
+ // Read next frame from file.
+ CriticalSectionScoped lock( _critSec);
+
+ if(_fileFormat == kFileFormatAviFile)
+ {
+ // Get next video frame
+ WebRtc_UWord32 encodedBufferLengthInBytes = _encodedData.bufferSize;
+ if(_fileModule.PlayoutAVIVideoData(
+ reinterpret_cast< WebRtc_Word8*>(_encodedData.payloadData),
+ encodedBufferLengthInBytes) != 0)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVideo,
+ _instanceID,
+ "FilePlayerImpl::TimeUntilNextVideoFrame() error reading\
+ video data");
+ return -1;
+ }
+ _encodedData.payloadSize = encodedBufferLengthInBytes;
+ _encodedData.codec = video_codec_info_.codecType;
+ _numberOfFramesRead++;
+
+ if(_accumulatedRenderTimeMs == 0)
+ {
+ _startTime = TickTime::Now();
+ // This if-statement should only trigger once.
+ _accumulatedRenderTimeMs = 1;
+ } else {
+ // A full seconds worth of frames have been read.
+ if(_numberOfFramesRead % video_codec_info_.maxFramerate == 0)
+ {
+ // Frame rate is in frames per seconds. Frame length is
+ // calculated as an integer division which means it may
+ // be rounded down. Compensate for this every second.
+ WebRtc_UWord32 rest = 1000%_frameLengthMS;
+ _accumulatedRenderTimeMs += rest;
+ }
+ _accumulatedRenderTimeMs += _frameLengthMS;
+ }
+ }
+ }
+
+ WebRtc_Word64 timeToNextFrame;
+ if(_videoOnly)
+ {
+ timeToNextFrame = _accumulatedRenderTimeMs -
+ (TickTime::Now() - _startTime).Milliseconds();
+
+ } else {
+ // Synchronize with the audio stream instead of system clock.
+ timeToNextFrame = _accumulatedRenderTimeMs - _decodedLengthInMS;
+ }
+ if(timeToNextFrame < 0)
+ {
+ return 0;
+
+ } else if(timeToNextFrame > 0x0fffffff)
+ {
+ // Wraparound or audio stream has gone to far ahead of the video stream.
+ return -1;
+ }
+ return static_cast<WebRtc_Word32>(timeToNextFrame);
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::SetUpVideoDecoder()
+{
+ if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVideo,
+ _instanceID,
+ "FilePlayerImpl::SetVideoDecoder() failed to retrieve Codec info of\
+ file data.");
+ return -1;
+ }
+
+ WebRtc_Word32 useNumberOfCores = 1;
+ if(_videoDecoder.SetDecodeCodec(video_codec_info_, useNumberOfCores) != 0)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVideo,
+ _instanceID,
+ "FilePlayerImpl::SetUpVideoDecoder() codec %s not supported",
+ video_codec_info_.plName);
+ return -1;
+ }
+
+ if(strncmp(video_codec_info_.plName, "MP4V-ES", 8) == 0)
+ {
+ if(_videoDecoder.SetCodecConfigParameters(
+ video_codec_info_.plType,
+ video_codec_info_.codecSpecific.MPEG4.configParameters,
+ video_codec_info_.codecSpecific.MPEG4.configParametersSize) !=
+ 0)
+ {
+ return -1;
+ }
+ }
+
+ _frameLengthMS = 1000/video_codec_info_.maxFramerate;
+
+ // Size of unencoded data (I420) should be the largest possible frame size
+ // in a file.
+ const WebRtc_UWord32 KReadBufferSize = 3 * video_codec_info_.width *
+ video_codec_info_.height / 2;
+ _encodedData.VerifyAndAllocate(KReadBufferSize);
+ _encodedData.encodedHeight = video_codec_info_.height;
+ _encodedData.encodedWidth = video_codec_info_.width;
+ _encodedData.payloadType = video_codec_info_.plType;
+ _encodedData.timeStamp = 0;
+ return 0;
+}
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
diff --git a/modules/utility/source/file_player_impl.h b/modules/utility/source/file_player_impl.h
new file mode 100644
index 0000000..bd82861
--- /dev/null
+++ b/modules/utility/source/file_player_impl.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
+
+#include "coder.h"
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "engine_configurations.h"
+#include "file_player.h"
+#include "media_file_defines.h"
+#include "media_file.h"
+#include "resampler.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class VideoCoder;
+class FrameScaler;
+
+class FilePlayerImpl : public FilePlayer
+{
+public:
+ FilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ ~FilePlayerImpl();
+
+ // FilePlayer functions.
+ virtual WebRtc_Word32 Get10msAudioFromFile(
+ WebRtc_Word16* decodedDataBuffer,
+ WebRtc_UWord32& decodedDataLengthInSamples,
+ const WebRtc_UWord32 frequencyInHz);
+ virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+ virtual WebRtc_Word32 StartPlayingFile(
+ const WebRtc_Word8* fileName,
+ bool loop,
+ WebRtc_UWord32 startPosition,
+ float volumeScaling,
+ WebRtc_UWord32 notification,
+ WebRtc_UWord32 stopPosition = 0,
+ const CodecInst* codecInst = NULL);
+ virtual WebRtc_Word32 StartPlayingFile(
+ InStream& sourceStream,
+ WebRtc_UWord32 startPosition,
+ float volumeScaling,
+ WebRtc_UWord32 notification,
+ WebRtc_UWord32 stopPosition = 0,
+ const CodecInst* codecInst = NULL);
+ virtual WebRtc_Word32 StopPlayingFile();
+ virtual bool IsPlayingFile() const;
+ virtual WebRtc_Word32 GetPlayoutPosition(WebRtc_UWord32& durationMs);
+ virtual WebRtc_Word32 AudioCodec(CodecInst& audioCodec) const;
+ virtual WebRtc_Word32 Frequency() const;
+ virtual WebRtc_Word32 SetAudioScaling(float scaleFactor);
+
+protected:
+ WebRtc_Word32 SetUpAudioDecoder();
+
+ WebRtc_UWord32 _instanceID;
+ const FileFormats _fileFormat;
+ MediaFile& _fileModule;
+
+ WebRtc_UWord32 _decodedLengthInMS;
+
+private:
+ WebRtc_Word16 _decodedAudioBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
+ AudioCoder _audioDecoder;
+
+ CodecInst _codec;
+ WebRtc_Word32 _numberOf10MsPerFrame;
+ WebRtc_Word32 _numberOf10MsInDecoder;
+
+ Resampler _resampler;
+ float _scaling;
+};
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class VideoFilePlayerImpl: public FilePlayerImpl
+{
+public:
+ VideoFilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ ~VideoFilePlayerImpl();
+
+ // FilePlayer functions.
+ virtual WebRtc_Word32 TimeUntilNextVideoFrame();
+ virtual WebRtc_Word32 StartPlayingVideoFile(const WebRtc_Word8* fileName,
+ bool loop,
+ bool videoOnly);
+ virtual WebRtc_Word32 StopPlayingFile();
+ virtual WebRtc_Word32 video_codec_info(VideoCodec& videoCodec) const;
+ virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame);
+ virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame,
+ const WebRtc_UWord32 outWidth,
+ const WebRtc_UWord32 outHeight);
+
+private:
+ WebRtc_Word32 SetUpVideoDecoder();
+
+ VideoCoder& _videoDecoder;
+ VideoCodec video_codec_info_;
+ WebRtc_Word32 _decodedVideoFrames;
+
+ EncodedVideoData& _encodedData;
+
+ FrameScaler& _frameScaler;
+ CriticalSectionWrapper& _critSec;
+ TickTime _startTime;
+ WebRtc_Word64 _accumulatedRenderTimeMs;
+ WebRtc_UWord32 _frameLengthMS;
+
+ WebRtc_Word32 _numberOfFramesRead;
+ bool _videoOnly;
+};
+#endif //WEBRTC_MODULE_UTILITY_VIDEO
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
diff --git a/modules/utility/source/file_recorder_impl.cc b/modules/utility/source/file_recorder_impl.cc
new file mode 100644
index 0000000..3aaae44
--- /dev/null
+++ b/modules/utility/source/file_recorder_impl.cc
@@ -0,0 +1,766 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#include "file_recorder_impl.h"
+#include "media_file.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+ #include "cpu_wrapper.h"
+ #include "critical_section_wrapper.h"
+ #include "frame_scaler.h"
+ #include "video_coder.h"
+ #include "video_frames_queue.h"
+#endif
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+ #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+ #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+FileRecorder* FileRecorder::CreateFileRecorder(WebRtc_UWord32 instanceID,
+ FileFormats fileFormat)
+{
+ switch(fileFormat)
+ {
+ case kFileFormatWavFile:
+ case kFileFormatCompressedFile:
+ case kFileFormatPreencodedFile:
+ case kFileFormatPcm16kHzFile:
+ case kFileFormatPcm8kHzFile:
+ case kFileFormatPcm32kHzFile:
+ return new FileRecorderImpl(instanceID, fileFormat);
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+ case kFileFormatAviFile:
+ return new AviRecorder(instanceID, fileFormat);
+#endif
+ default:
+ return NULL;
+ }
+}
+
+void FileRecorder::DestroyFileRecorder(FileRecorder* recorder)
+{
+ delete recorder;
+}
+
+FileRecorderImpl::FileRecorderImpl(WebRtc_UWord32 instanceID,
+ FileFormats fileFormat)
+ : _fileFormat(fileFormat),
+ _instanceID(instanceID),
+ _audioEncoder(instanceID),
+ _amrFormat(AMRFileStorage),
+ _moduleFile(MediaFile::CreateMediaFile(_instanceID))
+{
+}
+
+FileRecorderImpl::~FileRecorderImpl()
+{
+ MediaFile::DestroyMediaFile(_moduleFile);
+}
+
+FileFormats FileRecorderImpl::RecordingFileFormat() const
+{
+ return _fileFormat;
+}
+
+WebRtc_Word32 FileRecorderImpl::RegisterModuleFileCallback(
+ FileCallback* callback)
+{
+ if(_moduleFile == NULL)
+ {
+ return -1;
+ }
+ return _moduleFile->SetModuleFileCallback(callback);
+}
+
+WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+ const WebRtc_Word8* fileName,
+ const CodecInst& codecInst,
+ WebRtc_UWord32 notificationTimeMs,
+ ACMAMRPackingFormat amrFormat)
+{
+ if(_moduleFile == NULL)
+ {
+ return -1;
+ }
+ codec_info_ = codecInst;
+ _amrFormat = amrFormat;
+
+ WebRtc_Word32 retVal = 0;
+ if(_fileFormat != kFileFormatAviFile)
+ {
+ // AVI files should be started using StartRecordingVideoFile(..) all
+ // other formats should use this API.
+ retVal =_moduleFile->StartRecordingAudioFile(fileName, _fileFormat,
+ codecInst,
+ notificationTimeMs);
+ }
+
+ if( retVal == 0)
+ {
+ retVal = SetUpAudioEncoder();
+ }
+ if( retVal != 0)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FileRecorder::StartRecording() failed to initialize file %s for\
+ recording.",
+ fileName);
+
+ if(IsRecording())
+ {
+ StopRecording();
+ }
+ }
+ return retVal;
+}
+
+WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+ OutStream& destStream,
+ const CodecInst& codecInst,
+ WebRtc_UWord32 notificationTimeMs,
+ ACMAMRPackingFormat amrFormat)
+{
+ codec_info_ = codecInst;
+ _amrFormat = amrFormat;
+
+ WebRtc_Word32 retVal = _moduleFile->StartRecordingAudioStream(
+ destStream,
+ _fileFormat,
+ codecInst,
+ notificationTimeMs);
+
+ if( retVal == 0)
+ {
+ retVal = SetUpAudioEncoder();
+ }
+ if( retVal != 0)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FileRecorder::StartRecording() failed to initialize outStream for\
+ recording.");
+
+ if(IsRecording())
+ {
+ StopRecording();
+ }
+ }
+ return retVal;
+}
+
+WebRtc_Word32 FileRecorderImpl::StopRecording()
+{
+ memset(&codec_info_, 0, sizeof(CodecInst));
+ return _moduleFile->StopRecording();
+}
+
+bool FileRecorderImpl::IsRecording() const
+{
+ return _moduleFile->IsRecording();
+}
+
+WebRtc_Word32 FileRecorderImpl::RecordAudioToFile(
+ const AudioFrame& incomingAudioFrame,
+ const TickTime* playoutTS)
+{
+ if (codec_info_.plfreq == 0)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FileRecorder::RecordAudioToFile() recording audio is not turned\
+ on");
+ return -1;
+ }
+ AudioFrame tempAudioFrame;
+ tempAudioFrame._payloadDataLengthInSamples = 0;
+ if( incomingAudioFrame._audioChannel == 2 &&
+ !_moduleFile->IsStereo())
+ {
+ // Recording mono but incoming audio is (interleaved) stereo.
+ tempAudioFrame._audioChannel = 1;
+ tempAudioFrame._frequencyInHz = incomingAudioFrame._frequencyInHz;
+ for (WebRtc_UWord16 i = 0;
+ i < (incomingAudioFrame._payloadDataLengthInSamples >> 1); i++)
+ {
+ // Sample value is the average of left and right buffer rounded to
+ // closest integer value. Note samples can be either 1 or 2 byte.
+ tempAudioFrame._payloadData[i] =
+ ((incomingAudioFrame._payloadData[2 * i] +
+ incomingAudioFrame._payloadData[(2 * i) + 1] + 1) >> 1);
+ }
+ tempAudioFrame._payloadDataLengthInSamples =
+ incomingAudioFrame._payloadDataLengthInSamples / 2;
+ }
+
+ const AudioFrame* ptrAudioFrame = &incomingAudioFrame;
+ if(tempAudioFrame._payloadDataLengthInSamples != 0)
+ {
+ // If ptrAudioFrame is not empty it contains the audio to be recorded.
+ ptrAudioFrame = &tempAudioFrame;
+ }
+
+ // Encode the audio data before writing to file. Don't encode if the codec
+ // is PCM.
+ // NOTE: stereo recording is only supported for WAV files.
+ // TODO (hellner): WAV expect PCM in little endian byte order. Not
+ // "encoding" with PCM coder should be a problem for big endian systems.
+ WebRtc_UWord32 encodedLenInBytes = 0;
+ if (_fileFormat == kFileFormatPreencodedFile ||
+ STR_CASE_CMP(codec_info_.plname, "L16") != 0)
+ {
+ if (_audioEncoder.Encode(*ptrAudioFrame, _audioBuffer,
+ encodedLenInBytes) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceWarning,
+ kTraceVoice,
+ _instanceID,
+ "FileRecorder::RecordAudioToFile() codec %s not supported or\
+ failed to encode stream",
+ codec_info_.plname);
+ return -1;
+ }
+ } else {
+ int outLen = 0;
+ if(ptrAudioFrame->_audioChannel == 2)
+ {
+ // ptrAudioFrame contains interleaved stereo audio.
+ _audioResampler.ResetIfNeeded(ptrAudioFrame->_frequencyInHz,
+ codec_info_.plfreq,
+ kResamplerSynchronousStereo);
+ _audioResampler.Push(ptrAudioFrame->_payloadData,
+ ptrAudioFrame->_payloadDataLengthInSamples,
+ (WebRtc_Word16*)_audioBuffer,
+ MAX_AUDIO_BUFFER_IN_BYTES, outLen);
+ } else {
+ _audioResampler.ResetIfNeeded(ptrAudioFrame->_frequencyInHz,
+ codec_info_.plfreq,
+ kResamplerSynchronous);
+ _audioResampler.Push(ptrAudioFrame->_payloadData,
+ ptrAudioFrame->_payloadDataLengthInSamples,
+ (WebRtc_Word16*)_audioBuffer,
+ MAX_AUDIO_BUFFER_IN_BYTES, outLen);
+ }
+ encodedLenInBytes = outLen*2;
+ }
+
+ // Codec may not be operating at a frame rate of 10 ms. Whenever enough
+ // 10 ms chunks of data has been pushed to the encoder an encoded frame
+ // will be available. Wait until then.
+ if (encodedLenInBytes)
+ {
+ WebRtc_UWord16 msOfData =
+ ptrAudioFrame->_payloadDataLengthInSamples /
+ WebRtc_UWord16(ptrAudioFrame->_frequencyInHz / 1000);
+ if (WriteEncodedAudioData(_audioBuffer,
+ (WebRtc_UWord16)encodedLenInBytes,
+ msOfData, playoutTS) == -1)
+ {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::SetUpAudioEncoder()
+{
+ if (_fileFormat == kFileFormatPreencodedFile ||
+ STR_CASE_CMP(codec_info_.plname, "L16") != 0)
+ {
+ if(_audioEncoder.SetEncodeCodec(codec_info_,_amrFormat) == -1)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceVoice,
+ _instanceID,
+ "FileRecorder::StartRecording() codec %s not supported",
+ codec_info_.plname);
+ return -1;
+ }
+ }
+ return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::codec_info(CodecInst& codecInst) const
+{
+ if(codec_info_.plfreq == 0)
+ {
+ return -1;
+ }
+ codecInst = codec_info_;
+ return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::WriteEncodedAudioData(
+ const WebRtc_Word8* audioBuffer,
+ WebRtc_UWord16 bufferLength,
+ WebRtc_UWord16 /*millisecondsOfData*/,
+ const TickTime* /*playoutTS*/)
+{
+ return _moduleFile->IncomingAudioData(audioBuffer, bufferLength);
+}
+
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AudioFrameFileInfo
+{
+ public:
+ AudioFrameFileInfo(const WebRtc_Word8* audioData,
+ const WebRtc_UWord16 audioSize,
+ const WebRtc_UWord16 audioMS,
+ const TickTime& playoutTS)
+ : _audioSize(audioSize), _audioMS(audioMS) ,_playoutTS(playoutTS)
+ {
+ if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
+ {
+ assert(false);
+ _audioSize = 0;
+ return;
+ }
+ memcpy(_audioData, audioData, audioSize);
+ };
+ // TODO (hellner): either turn into a struct or provide get/set functions.
+ WebRtc_Word8 _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
+ WebRtc_UWord16 _audioSize;
+ WebRtc_UWord16 _audioMS;
+ TickTime _playoutTS;
+};
+
+AviRecorder::AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat)
+ : FileRecorderImpl(instanceID, fileFormat),
+ _thread( 0),
+ _videoOnly(false),
+ _timeEvent(*EventWrapper::Create()),
+ _critSec(*CriticalSectionWrapper::CreateCriticalSection()),
+ _writtenVideoFramesCounter(0),
+ _writtenAudioMS(0),
+ _writtenVideoMS(0)
+{
+ _videoEncoder = new VideoCoder(instanceID);
+ _frameScaler = new FrameScaler();
+ _videoFramesQueue = new VideoFramesQueue();
+ _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+ "AviRecorder()");
+}
+
+AviRecorder::~AviRecorder( )
+{
+ StopRecording( );
+
+ delete _videoEncoder;
+ delete _frameScaler;
+ delete _videoFramesQueue;
+ delete _thread;
+ delete &_timeEvent;
+ delete &_critSec;
+}
+
+WebRtc_Word32 AviRecorder::StartRecordingVideoFile(
+ const WebRtc_Word8* fileName,
+ const CodecInst& audioCodecInst,
+ const VideoCodec& videoCodecInst,
+ ACMAMRPackingFormat amrFormat,
+ bool videoOnly)
+{
+ _firstAudioFrameReceived = false;
+ _videoCodecInst = videoCodecInst;
+ _videoOnly = videoOnly;
+
+ if(_moduleFile->StartRecordingVideoFile(fileName, _fileFormat,
+ audioCodecInst, videoCodecInst,
+ videoOnly) != 0)
+ {
+ return -1;
+ }
+
+ if(!videoOnly)
+ {
+ if(FileRecorderImpl::StartRecordingAudioFile(fileName,audioCodecInst, 0,
+ amrFormat) !=0)
+ {
+ StopRecording();
+ return -1;
+ }
+ }
+ if( SetUpVideoEncoder() != 0)
+ {
+ StopRecording();
+ return -1;
+ }
+ if(_videoOnly)
+ {
+ // Writing to AVI file is non-blocking.
+ // Start non-blocking timer if video only. If recording both video and
+ // audio let the pushing of audio frames be the timer.
+ _timeEvent.StartTimer(true, 1000 / _videoCodecInst.maxFramerate);
+ }
+ StartThread();
+ return 0;
+}
+
+WebRtc_Word32 AviRecorder::StopRecording()
+{
+ _timeEvent.StopTimer();
+
+ StopThread();
+ _videoEncoder->Reset();
+ return FileRecorderImpl::StopRecording();
+}
+
+WebRtc_Word32 AviRecorder::CalcI420FrameSize( ) const
+{
+ return 3 * _videoCodecInst.width * _videoCodecInst.height / 2;
+}
+
+WebRtc_Word32 AviRecorder::SetUpVideoEncoder()
+{
+ // Size of unencoded data (I420) should be the largest possible frame size
+ // in a file.
+ _videoMaxPayloadSize = CalcI420FrameSize();
+ _videoEncodedData.VerifyAndAllocate(_videoMaxPayloadSize);
+
+ _videoCodecInst.plType = _videoEncoder->DefaultPayloadType(
+ _videoCodecInst.plName);
+
+ WebRtc_Word32 useNumberOfCores = 1;
+ // Set the max payload size to 16000. This means that the codec will try to
+ // create slices that will fit in 16000 kByte packets. However, the
+ // Encode() call will still generate one full frame.
+ if(_videoEncoder->SetEncodeCodec(_videoCodecInst, useNumberOfCores,
+ 16000))
+ {
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word32 AviRecorder::RecordVideoToFile(const VideoFrame& videoFrame)
+{
+ CriticalSectionScoped lock(_critSec);
+
+ if(!IsRecording() || ( videoFrame.Length() == 0))
+ {
+ return -1;
+ }
+ // The frame is written to file in AviRecorder::Process().
+ WebRtc_Word32 retVal = _videoFramesQueue->AddFrame(videoFrame);
+ if(retVal != 0)
+ {
+ StopRecording();
+ }
+ return retVal;
+}
+
+bool AviRecorder::StartThread()
+{
+ unsigned int id;
+ if( _thread == 0)
+ {
+ return false;
+ }
+
+ return _thread->Start(id);
+}
+
+bool AviRecorder::StopThread()
+{
+ _critSec.Enter();
+
+ if(_thread)
+ {
+ _thread->SetNotAlive();
+
+ ThreadWrapper* thread = _thread;
+ _thread = NULL;
+
+ _timeEvent.Set();
+
+ _critSec.Leave();
+
+ if(thread->Stop())
+ {
+ delete thread;
+ } else {
+ return false;
+ }
+ } else {
+ _critSec.Leave();
+ }
+ return true;
+}
+
+bool AviRecorder::Run( ThreadObj threadObj)
+{
+ return static_cast<AviRecorder*>( threadObj)->Process();
+}
+
+WebRtc_Word32 AviRecorder::ProcessAudio()
+{
+ if (_writtenVideoFramesCounter == 0)
+ {
+ // Get the most recent frame that is due for writing to file. Since
+ // frames are unencoded it's safe to throw away frames if necessary
+ // for synchronizing audio and video.
+ VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
+ if(frameToProcess)
+ {
+ // Syncronize audio to the current frame to process by throwing away
+ // audio samples with older timestamp than the video frame.
+ WebRtc_UWord32 numberOfAudioElements =
+ _audioFramesToWrite.GetSize();
+ for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+ {
+ AudioFrameFileInfo* frameInfo =
+ (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
+ if(frameInfo)
+ {
+ if(TickTime::TicksToMilliseconds(
+ frameInfo->_playoutTS.Ticks()) <
+ frameToProcess->RenderTimeMs())
+ {
+ delete frameInfo;
+ _audioFramesToWrite.PopFront();
+ } else
+ {
+ break;
+ }
+ }
+ }
+ }
+ }
+ // Write all audio up to current timestamp.
+ WebRtc_Word32 error = 0;
+ WebRtc_UWord32 numberOfAudioElements = _audioFramesToWrite.GetSize();
+ for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+ {
+ AudioFrameFileInfo* frameInfo =
+ (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
+ if(frameInfo)
+ {
+ if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
+ {
+ _moduleFile->IncomingAudioData(frameInfo->_audioData,
+ frameInfo->_audioSize);
+ _writtenAudioMS += frameInfo->_audioMS;
+ delete frameInfo;
+ _audioFramesToWrite.PopFront();
+ } else {
+ break;
+ }
+ } else {
+ _audioFramesToWrite.PopFront();
+ }
+ }
+ return error;
+}
+
+bool AviRecorder::Process()
+{
+ switch(_timeEvent.Wait(500))
+ {
+ case kEventSignaled:
+ if(_thread == NULL)
+ {
+ return false;
+ }
+ break;
+ case kEventError:
+ return false;
+ case kEventTimeout:
+ // No events triggered. No work to do.
+ return true;
+ }
+ CriticalSectionScoped lock( _critSec);
+
+ // Get the most recent frame to write to file (if any). Synchronize it with
+ // the audio stream (if any). Synchronization the video based on its render
+ // timestamp (i.e. VideoFrame::RenderTimeMS())
+ VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
+ if( frameToProcess == NULL)
+ {
+ return true;
+ }
+ WebRtc_Word32 error = 0;
+ if(!_videoOnly)
+ {
+ if(!_firstAudioFrameReceived)
+ {
+ // Video and audio can only be synchronized if both have been
+ // received.
+ return true;
+ }
+ error = ProcessAudio();
+
+ while (_writtenAudioMS > _writtenVideoMS)
+ {
+ error = EncodeAndWriteVideoToFile( *frameToProcess);
+ if( error != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+ "AviRecorder::Process() error writing to file.");
+ break;
+ } else {
+ WebRtc_UWord32 frameLengthMS = 1000 /
+ _videoCodecInst.maxFramerate;
+ _writtenVideoFramesCounter++;
+ _writtenVideoMS += frameLengthMS;
+ // A full seconds worth of frames have been written.
+ if(_writtenVideoFramesCounter%_videoCodecInst.maxFramerate == 0)
+ {
+ // Frame rate is in frames per seconds. Frame length is
+ // calculated as an integer division which means it may
+ // be rounded down. Compensate for this every second.
+ WebRtc_UWord32 rest = 1000 % frameLengthMS;
+ _writtenVideoMS += rest;
+ }
+ }
+ }
+ } else {
+ // Frame rate is in frames per seconds. Frame length is calculated as an
+ // integer division which means it may be rounded down. This introduces
+ // drift. Once a full frame worth of drift has happened, skip writing
+ // one frame. Note that frame rate is in frames per second so the
+ // drift is completely compensated for.
+ WebRtc_UWord32 frameLengthMS = 1000/_videoCodecInst.maxFramerate;
+ WebRtc_UWord32 restMS = 1000 % frameLengthMS;
+ WebRtc_UWord32 frameSkip = (_videoCodecInst.maxFramerate *
+ frameLengthMS) / restMS;
+
+ _writtenVideoFramesCounter++;
+ if(_writtenVideoFramesCounter % frameSkip == 0)
+ {
+ _writtenVideoMS += frameLengthMS;
+ return true;
+ }
+
+ error = EncodeAndWriteVideoToFile( *frameToProcess);
+ if(error != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+ "AviRecorder::Process() error writing to file.");
+ } else {
+ _writtenVideoMS += frameLengthMS;
+ }
+ }
+ return error == 0;
+}
+
+WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame)
+{
+ if(!IsRecording() || (videoFrame.Length() == 0))
+ {
+ return -1;
+ }
+
+ if(_frameScaler->ResizeFrameIfNeeded(videoFrame, _videoCodecInst.width,
+ _videoCodecInst.height) != 0)
+ {
+ return -1;
+ }
+
+ _videoEncodedData.payloadSize = 0;
+
+ if( STR_CASE_CMP(_videoCodecInst.plName, "I420") == 0)
+ {
+ _videoEncodedData.VerifyAndAllocate(videoFrame.Length());
+
+ // I420 is raw data. No encoding needed (each sample is represented by
+ // 1 byte so there is no difference depending on endianness).
+ memcpy(_videoEncodedData.payloadData, videoFrame.Buffer(),
+ videoFrame.Length());
+
+ _videoEncodedData.payloadSize = videoFrame.Length();
+ _videoEncodedData.frameType = kVideoFrameKey;
+ }else {
+ if( _videoEncoder->Encode(videoFrame, _videoEncodedData) != 0)
+ {
+ return -1;
+ }
+ }
+
+ if(_videoEncodedData.payloadSize > 0)
+ {
+ if(_moduleFile->IncomingAVIVideoData(
+ (WebRtc_Word8*)(_videoEncodedData.payloadData),
+ _videoEncodedData.payloadSize))
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+ "Error writing AVI file");
+ return -1;
+ }
+ } else {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceVideo,
+ _instanceID,
+ "FileRecorder::RecordVideoToFile() frame dropped by encoder bitrate\
+ likely to low.");
+ }
+ return 0;
+}
+
+// Store audio frame in the _audioFramesToWrite buffer. The writing to file
+// happens in AviRecorder::Process().
+WebRtc_Word32 AviRecorder::WriteEncodedAudioData(
+ const WebRtc_Word8* audioBuffer,
+ WebRtc_UWord16 bufferLength,
+ WebRtc_UWord16 millisecondsOfData,
+ const TickTime* playoutTS)
+{
+ if (!IsRecording())
+ {
+ return -1;
+ }
+ if (bufferLength > MAX_AUDIO_BUFFER_IN_BYTES)
+ {
+ return -1;
+ }
+ if (_videoOnly)
+ {
+ return -1;
+ }
+ if (_audioFramesToWrite.GetSize() > kMaxAudioBufferQueueLength)
+ {
+ StopRecording();
+ return -1;
+ }
+ _firstAudioFrameReceived = true;
+
+ if(playoutTS)
+ {
+ _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
+ bufferLength,
+ millisecondsOfData,
+ *playoutTS));
+ } else {
+ _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
+ bufferLength,
+ millisecondsOfData,
+ TickTime::Now()));
+ }
+ _timeEvent.Set();
+ return 0;
+}
+
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
diff --git a/modules/utility/source/file_recorder_impl.h b/modules/utility/source/file_recorder_impl.h
new file mode 100644
index 0000000..6b32b30
--- /dev/null
+++ b/modules/utility/source/file_recorder_impl.h
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains a class that can write audio and/or video to file in
+// multiple file formats. The unencoded input data is written to file in the
+// encoded format specified.
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+
+#include "coder.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "event_wrapper.h"
+#include "file_recorder.h"
+#include "media_file_defines.h"
+#include "media_file.h"
+#include "module_common_types.h"
+#include "resampler.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+ #include "frame_scaler.h"
+ #include "video_coder.h"
+ #include "video_frames_queue.h"
+#endif
+
+namespace webrtc {
+// The largest decoded frame size in samples (60ms with 32kHz sample rate).
+enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
+enum { MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
+enum { kMaxAudioBufferQueueLength = 100 };
+
+class FileRecorderImpl : public FileRecorder
+{
+public:
+ FileRecorderImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ virtual ~FileRecorderImpl();
+
+ // FileRecorder functions.
+ virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+ virtual FileFormats RecordingFileFormat() const;
+ virtual WebRtc_Word32 StartRecordingAudioFile(
+ const WebRtc_Word8* fileName,
+ const CodecInst& codecInst,
+ WebRtc_UWord32 notificationTimeMs,
+ ACMAMRPackingFormat amrFormat = AMRFileStorage);
+ virtual WebRtc_Word32 StartRecordingAudioFile(
+ OutStream& destStream,
+ const CodecInst& codecInst,
+ WebRtc_UWord32 notificationTimeMs,
+ ACMAMRPackingFormat amrFormat = AMRFileStorage);
+ virtual WebRtc_Word32 StopRecording();
+ virtual bool IsRecording() const;
+ virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const;
+ virtual WebRtc_Word32 RecordAudioToFile(
+ const AudioFrame& frame,
+ const TickTime* playoutTS = NULL);
+ virtual WebRtc_Word32 StartRecordingVideoFile(
+ const WebRtc_Word8* fileName,
+ const CodecInst& audioCodecInst,
+ const VideoCodec& videoCodecInst,
+ ACMAMRPackingFormat amrFormat = AMRFileStorage,
+ bool videoOnly = false)
+ {
+ return -1;
+ }
+ virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame)
+ {
+ return -1;
+ }
+
+protected:
+ virtual WebRtc_Word32 WriteEncodedAudioData(
+ const WebRtc_Word8* audioBuffer,
+ WebRtc_UWord16 bufferLength,
+ WebRtc_UWord16 millisecondsOfData,
+ const TickTime* playoutTS);
+
+ WebRtc_Word32 SetUpAudioEncoder();
+
+ WebRtc_UWord32 _instanceID;
+ FileFormats _fileFormat;
+ MediaFile* _moduleFile;
+
+private:
+ OutStream* _stream;
+ CodecInst codec_info_;
+ ACMAMRPackingFormat _amrFormat;
+
+ WebRtc_Word8 _audioBuffer[MAX_AUDIO_BUFFER_IN_BYTES];
+ AudioCoder _audioEncoder;
+ Resampler _audioResampler;
+};
+
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AviRecorder : public FileRecorderImpl
+{
+public:
+ AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ virtual ~AviRecorder();
+
+ // FileRecorder functions.
+ virtual WebRtc_Word32 StartRecordingVideoFile(
+ const WebRtc_Word8* fileName,
+ const CodecInst& audioCodecInst,
+ const VideoCodec& videoCodecInst,
+ ACMAMRPackingFormat amrFormat = AMRFileStorage,
+ bool videoOnly = false);
+ virtual WebRtc_Word32 StopRecording();
+ virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame);
+
+protected:
+ virtual WebRtc_Word32 WriteEncodedAudioData(
+ const WebRtc_Word8* audioBuffer,
+ WebRtc_UWord16 bufferLength,
+ WebRtc_UWord16 millisecondsOfData,
+ const TickTime* playoutTS);
+private:
+ static bool Run(ThreadObj threadObj);
+ bool Process();
+
+ bool StartThread();
+ bool StopThread();
+
+ WebRtc_Word32 EncodeAndWriteVideoToFile(VideoFrame& videoFrame);
+ WebRtc_Word32 ProcessAudio();
+
+ WebRtc_Word32 CalcI420FrameSize() const;
+ WebRtc_Word32 SetUpVideoEncoder();
+
+ VideoCodec _videoCodecInst;
+ bool _videoOnly;
+
+ ListWrapper _audioFramesToWrite;
+ bool _firstAudioFrameReceived;
+
+ VideoFramesQueue* _videoFramesQueue;
+
+ FrameScaler* _frameScaler;
+ VideoCoder* _videoEncoder;
+ WebRtc_Word32 _videoMaxPayloadSize;
+ EncodedVideoData _videoEncodedData;
+
+ ThreadWrapper* _thread;
+ EventWrapper& _timeEvent;
+ CriticalSectionWrapper& _critSec;
+ WebRtc_Word64 _writtenVideoFramesCounter;
+ WebRtc_Word64 _writtenAudioMS;
+ WebRtc_Word64 _writtenVideoMS;
+};
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
diff --git a/modules/utility/source/frame_scaler.cc b/modules/utility/source/frame_scaler.cc
new file mode 100644
index 0000000..e3ec0b0
--- /dev/null
+++ b/modules/utility/source/frame_scaler.cc
@@ -0,0 +1,219 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+#include "frame_scaler.h"
+
+#include "trace.h"
+#include "vplib.h"
+
+#ifndef NO_INTERPOLATOR
+ #include "InterpolatorInterface.h"
+#endif
+
+namespace webrtc {
+FrameScaler::FrameScaler()
+ : _ptrVideoInterpolator(0),
+ _outWidth(0),
+ _outHeight(0),
+ _inWidth(0),
+ _inHeight(0)
+{
+}
+
+FrameScaler::~FrameScaler( )
+{
+#ifndef NO_INTERPOLATOR
+ if( _ptrVideoInterpolator != 0)
+ {
+ deleteInterpolator(_ptrVideoInterpolator);
+ }
+ #endif
+}
+
+WebRtc_Word32 FrameScaler::ResizeFrameIfNeeded(VideoFrame& videoFrame,
+ WebRtc_UWord32 outWidth,
+ WebRtc_UWord32 outHeight)
+{
+ if( videoFrame.Length( ) == 0)
+ {
+ return -1;
+ }
+
+ if((videoFrame.Width() != outWidth) || ( videoFrame.Height() != outHeight))
+ {
+ // Scale down by factor 2-4.
+ if(videoFrame.Width() % outWidth == 0 &&
+ videoFrame.Height() % outHeight == 0 &&
+ (videoFrame.Width() / outWidth) == (videoFrame.Height() / outHeight))
+ {
+ const WebRtc_Word32 multiple = videoFrame.Width() / outWidth;
+ WebRtc_UWord32 scaledWidth;
+ WebRtc_UWord32 scaledHeight;
+ switch(multiple)
+ {
+ case 2:
+ ScaleI420FrameQuarter(videoFrame.Width(), videoFrame.Height(),
+ videoFrame.Buffer());
+
+ videoFrame.SetLength(outWidth * outHeight * 3 / 2);
+ videoFrame.SetWidth( outWidth);
+ videoFrame.SetHeight(outHeight);
+ return 0;
+ case 3:
+ ScaleI420Down1_3(videoFrame.Width(), videoFrame.Height(),
+ videoFrame.Buffer(), videoFrame.Size(),
+ scaledWidth, scaledHeight);
+ videoFrame.SetLength((outWidth * outHeight * 3) / 2);
+ videoFrame.SetWidth(outWidth);
+ videoFrame.SetHeight(outHeight);
+ return 0;
+ case 4:
+ ScaleI420FrameQuarter(videoFrame.Width(), videoFrame.Height(),
+ videoFrame.Buffer());
+
+ ScaleI420FrameQuarter(videoFrame.Width() >> 1,
+ videoFrame.Height() >> 1,
+ videoFrame.Buffer());
+
+ videoFrame.SetLength((outWidth * outHeight * 3)/ 2);
+ videoFrame.SetWidth(outWidth);
+ videoFrame.SetHeight(outHeight);
+ return 0;
+ default:
+ break;
+ }
+ }
+ // Scale up by factor 2-4.
+ if(outWidth % videoFrame.Width() == 0 &&
+ outHeight % videoFrame.Height() == 0 &&
+ (outWidth / videoFrame.Width()) == (outHeight / videoFrame.Height()))
+ {
+ const WebRtc_Word32 multiple = outWidth / videoFrame.Width();
+ WebRtc_UWord32 scaledWidth = 0;
+ WebRtc_UWord32 scaledHeight = 0;
+ switch(multiple)
+ {
+ case 2:
+ videoFrame.VerifyAndAllocate((outHeight * outWidth * 3) / 2);
+ ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
+ videoFrame.Buffer(), videoFrame.Size(),
+ scaledWidth, scaledHeight);
+ videoFrame.SetLength((outWidth * outHeight * 3) / 2);
+ videoFrame.SetWidth(outWidth);
+ videoFrame.SetHeight(outHeight);
+ return 0;
+ case 3:
+ videoFrame.VerifyAndAllocate((outWidth * outHeight * 3) / 2);
+ ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
+ videoFrame.Buffer(), videoFrame.Size(),
+ scaledWidth, scaledHeight);
+
+ ScaleI420Up3_2(scaledWidth, scaledHeight, videoFrame.Buffer(),
+ videoFrame.Size(), scaledWidth, scaledHeight);
+ videoFrame.SetLength((outWidth * outHeight * 3) / 2);
+ videoFrame.SetWidth(outWidth);
+ videoFrame.SetHeight(outHeight);
+ return 0;
+ case 4:
+ videoFrame.VerifyAndAllocate((outWidth * outHeight * 3) / 2);
+ ScaleI420Up2(videoFrame.Width(), videoFrame.Height(),
+ videoFrame.Buffer(), videoFrame.Size(),
+ scaledWidth, scaledHeight);
+ ScaleI420Up2(scaledWidth, scaledHeight, videoFrame.Buffer(),
+ videoFrame.Size(), scaledWidth, scaledHeight);
+ videoFrame.SetLength((outWidth * outHeight * 3) / 2);
+ videoFrame.SetWidth(outWidth);
+ videoFrame.SetHeight(outHeight);
+ return 0;
+ default:
+ break;
+ }
+ }
+ // Use interpolator
+#ifdef NO_INTERPOLATOR
+ assert(!"Interpolation not available");
+#else
+ // Create new interpolator if the scaling changed.
+ if((_outWidth != outWidth) || (_outHeight != outHeight) ||
+ (_inWidth != videoFrame.Width()) ||
+ (_inHeight != videoFrame.Height()))
+ {
+ if(_ptrVideoInterpolator != 0)
+ {
+ deleteInterpolator(_ptrVideoInterpolator);
+ _ptrVideoInterpolator = 0;
+ }
+
+ _outWidth = outWidth;
+ _outHeight = outHeight;
+ _inWidth = videoFrame.Width();
+ _inHeight = videoFrame.Height();
+ }
+
+
+ if (!_ptrVideoInterpolator)
+ {
+ InterpolatorType interpolator = BiCubicBSpline;
+
+ if((_inWidth > ( _outWidth * 2)) ||
+ (_inWidth < ( _outWidth / 2)) ||
+ (_inHeight > ( _outHeight * 2)) ||
+ (_inHeight < ( _outHeight / 2)))
+
+ {
+ interpolator = BiCubicSine;
+ }
+
+ VideoFrameFormat inputFormat;
+ VideoFrameFormat outputFormat;
+
+ inputFormat.videoType = YUV420P;
+ inputFormat.xChannels = static_cast<short>(_inWidth);
+ inputFormat.yChannels = static_cast<short>(_inHeight);
+
+ outputFormat.videoType = YUV420P;
+ outputFormat.xChannels = static_cast<short>(_outWidth);
+ outputFormat.yChannels = static_cast<short>(_outHeight);
+
+ _interpolatorBuffer.VerifyAndAllocate(_outWidth * _outHeight *
+ 3 / 2);
+
+ _ptrVideoInterpolator = createInterpolator(
+ interpolator,
+ &inputFormat,
+ &outputFormat);
+ if (_ptrVideoInterpolator == NULL)
+ {
+ WEBRTC_TRACE(
+ kTraceError,
+ kTraceVideo,
+ -1,
+ "FrameScaler::ResizeFrame(): Could not create\
+ interpolator");
+ return -1;
+ }
+ }
+
+ interpolateFrame(_ptrVideoInterpolator, videoFrame.Buffer(),
+ _interpolatorBuffer.Buffer());
+
+ videoFrame.VerifyAndAllocate(_interpolatorBuffer.Size());
+ videoFrame.SetLength(_outWidth * _outHeight * 3 / 2);
+ videoFrame.CopyFrame(videoFrame.Length(), _interpolatorBuffer.Buffer());
+ videoFrame.SetWidth(_outWidth);
+ videoFrame.SetHeight(_outHeight);
+#endif // NO_INTERPOLATOR
+ }
+ return 0;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/modules/utility/source/frame_scaler.h b/modules/utility/source/frame_scaler.h
new file mode 100644
index 0000000..2f295c7
--- /dev/null
+++ b/modules/utility/source/frame_scaler.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file implements a class that can be used for scaling frames.
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+// TODO (perkj): add interpolator. Current implementation only support scaling
+// (up or down) where the width and height are scaled by a constant factor 2-4.
+// Also remove NO_INTERPOLATOR.
+
+// Disable usage of the old intertpolator implementation.
+#define NO_INTERPOLATOR 1
+
+
+class VideoFrame;
+class FrameScaler
+{
+public:
+ FrameScaler();
+ ~FrameScaler();
+
+ // Re-size videoFrame so that it has the width outWidth and height
+ // outHeight.
+ WebRtc_Word32 ResizeFrameIfNeeded(VideoFrame& videoFrame,
+ WebRtc_UWord32 outWidth,
+ WebRtc_UWord32 outHeight);
+private:
+ typedef WebRtc_Word8* VideoInterpolator;
+ VideoInterpolator*_ptrVideoInterpolator;
+
+ VideoFrame _interpolatorBuffer;
+ WebRtc_UWord32 _outWidth;
+ WebRtc_UWord32 _outHeight;
+ WebRtc_UWord32 _inWidth;
+ WebRtc_UWord32 _inHeight;
+
+};
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
diff --git a/modules/utility/source/process_thread_impl.cc b/modules/utility/source/process_thread_impl.cc
new file mode 100644
index 0000000..57fe1cd
--- /dev/null
+++ b/modules/utility/source/process_thread_impl.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "process_thread_impl.h"
+#include "module.h"
+#include "trace.h"
+
+namespace webrtc {
+ProcessThread::~ProcessThread()
+{
+}
+
+ProcessThread* ProcessThread::CreateProcessThread()
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1, "CreateProcessThread()");
+ return new ProcessThreadImpl();
+}
+
+void ProcessThread::DestroyProcessThread(ProcessThread* module)
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1, "DestroyProcessThread()");
+ delete module;
+}
+
+ProcessThreadImpl::ProcessThreadImpl()
+ : _timeEvent(*EventWrapper::Create()),
+ _critSectModules(*CriticalSectionWrapper::CreateCriticalSection()),
+ _thread(NULL)
+{
+ WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
+}
+
+ProcessThreadImpl::~ProcessThreadImpl()
+{
+ delete &_critSectModules;
+ delete &_timeEvent;
+ WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 ProcessThreadImpl::Start()
+{
+ CriticalSectionScoped lock(_critSectModules);
+ if(_thread)
+ {
+ return -1;
+ }
+ _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+ "ProcessThread");
+ unsigned int id;
+ WebRtc_Word32 retVal = _thread->Start(id);
+ if(retVal >= 0)
+ {
+ return 0;
+ }
+ delete _thread;
+ _thread = NULL;
+ return -1;
+}
+
+WebRtc_Word32 ProcessThreadImpl::Stop()
+{
+ _critSectModules.Enter();
+ if(_thread)
+ {
+ _thread->SetNotAlive();
+
+ ThreadWrapper* thread = _thread;
+ _thread = NULL;
+
+ _timeEvent.Set();
+ _critSectModules.Leave();
+
+ if(thread->Stop())
+ {
+ delete thread;
+ } else {
+ return -1;
+ }
+ } else {
+ _critSectModules.Leave();
+ }
+ return 0;
+}
+
+WebRtc_Word32 ProcessThreadImpl::RegisterModule(const Module* module)
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1,
+ "RegisterModule(module:0x%x)", module);
+ CriticalSectionScoped lock(_critSectModules);
+
+ // Only allow module to be registered once.
+ ListItem* item = _modules.First();
+ for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ {
+ if(module == item->GetItem())
+ {
+ return -1;
+ }
+ item = _modules.Next(item);
+ }
+
+ _modules.PushFront(module);
+ WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
+ "number of registered modules has increased to %d",
+ _modules.GetSize());
+ // Wake the thread calling ProcessThreadImpl::Process() to update the
+ // waiting time. The waiting time for the just registered module may be
+ // shorter than all other registered modules.
+ _timeEvent.Set();
+ return 0;
+}
+
+WebRtc_Word32 ProcessThreadImpl::DeRegisterModule(const Module* module)
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1,
+ "DeRegisterModule(module:0x%x)", module);
+ CriticalSectionScoped lock(_critSectModules);
+
+ ListItem* item = _modules.First();
+ for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ {
+ if(module == item->GetItem())
+ {
+ int res = _modules.Erase(item);
+ WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
+ "number of registered modules has decreased to %d",
+ _modules.GetSize());
+ return res;
+ }
+ item = _modules.Next(item);
+ }
+ return -1;
+}
+
+bool ProcessThreadImpl::Run(void* obj)
+{
+ return static_cast<ProcessThreadImpl*>(obj)->Process();
+}
+
+bool ProcessThreadImpl::Process()
+{
+ // Wait for the module that should be called next, but don't block thread
+ // longer than 100 ms.
+ WebRtc_Word32 minTimeToNext = 100;
+ {
+ CriticalSectionScoped lock(_critSectModules);
+ ListItem* item = _modules.First();
+ for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ {
+ WebRtc_Word32 timeToNext =
+ static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+ if(minTimeToNext > timeToNext)
+ {
+ minTimeToNext = timeToNext;
+ }
+ item = _modules.Next(item);
+ }
+ }
+
+ if(minTimeToNext > 0)
+ {
+ if(kEventError == _timeEvent.Wait(minTimeToNext))
+ {
+ return true;
+ }
+ if(!_thread)
+ {
+ return false;
+ }
+ }
+ {
+ CriticalSectionScoped lock(_critSectModules);
+ ListItem* item = _modules.First();
+ for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ {
+ WebRtc_Word32 timeToNext =
+ static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+ if(timeToNext < 1)
+ {
+ static_cast<Module*>(item->GetItem())->Process();
+ }
+ item = _modules.Next(item);
+ }
+ }
+ return true;
+}
+} // namespace webrtc
diff --git a/modules/utility/source/process_thread_impl.h b/modules/utility/source/process_thread_impl.h
new file mode 100644
index 0000000..a712ffc
--- /dev/null
+++ b/modules/utility/source/process_thread_impl.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "process_thread.h"
+#include "thread_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class ProcessThreadImpl : public ProcessThread
+{
+public:
+ ProcessThreadImpl();
+ virtual ~ProcessThreadImpl();
+
+ virtual WebRtc_Word32 Start();
+ virtual WebRtc_Word32 Stop();
+
+ virtual WebRtc_Word32 RegisterModule(const Module* module);
+ virtual WebRtc_Word32 DeRegisterModule(const Module* module);
+
+protected:
+ static bool Run(void* obj);
+
+ bool Process();
+
+private:
+ EventWrapper& _timeEvent;
+ CriticalSectionWrapper& _critSectModules;
+ ListWrapper _modules;
+ ThreadWrapper* _thread;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
diff --git a/modules/utility/source/rtp_dump_impl.cc b/modules/utility/source/rtp_dump_impl.cc
new file mode 100644
index 0000000..e5fe38a
--- /dev/null
+++ b/modules/utility/source/rtp_dump_impl.cc
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_dump_impl.h"
+
+#include <cassert>
+#include <stdio.h>
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#if defined(_WIN32)
+#include <Windows.h>
+#include <mmsystem.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <string.h>
+#include <sys/time.h>
+#include <time.h>
+#endif
+
+#if (defined(_DEBUG) && defined(_WIN32))
+#define DEBUG_PRINT(expr) OutputDebugString(##expr)
+#define DEBUG_PRINTP(expr, p) \
+{ \
+ char msg[128]; \
+ sprintf(msg, ##expr, p); \
+ OutputDebugString(msg); \
+}
+#else
+#define DEBUG_PRINT(expr) ((void)0)
+#define DEBUG_PRINTP(expr,p) ((void)0)
+#endif // defined(_DEBUG) && defined(_WIN32)
+
+namespace webrtc {
+const WebRtc_Word8* RTPFILE_VERSION = "1.0";
+const WebRtc_UWord32 MAX_UWORD32 = 0xffffffff;
+
+// This stucture is specified in the rtpdump documentation.
+// This struct corresponds to RD_packet_t in
+// http://www.cs.columbia.edu/irt/software/rtptools/
+typedef struct
+{
+ // Length of packet, including this header (may be smaller than plen if not
+ // whole packet recorded).
+ WebRtc_UWord16 length;
+ // Actual header+payload length for RTP, 0 for RTCP.
+ WebRtc_UWord16 plen;
+ // Milliseconds since the start of recording.
+ WebRtc_UWord32 offset;
+} rtpDumpPktHdr_t;
+
+RtpDump* RtpDump::CreateRtpDump()
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1, "CreateRtpDump()");
+ return new RtpDumpImpl();
+}
+
+void RtpDump::DestroyRtpDump(RtpDump* object)
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1, "DestroyRtpDump()");
+ delete object;
+}
+
+RtpDumpImpl::RtpDumpImpl()
+ : _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _file(*FileWrapper::Create()),
+ _startTime(0)
+{
+ WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
+}
+
+RtpDump::~RtpDump()
+{
+}
+
+RtpDumpImpl::~RtpDumpImpl()
+{
+ _file.Flush();
+ _file.CloseFile();
+ delete &_file;
+ delete &_critSect;
+ WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 RtpDumpImpl::Start(const WebRtc_Word8* fileNameUTF8)
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1, "Start()");
+
+ if (fileNameUTF8 == NULL)
+ {
+ return -1;
+ }
+
+ CriticalSectionScoped lock(_critSect);
+ _file.Flush();
+ _file.CloseFile();
+ if (_file.OpenFile(fileNameUTF8, false, false, false) == -1)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+ "failed to open the specified file");
+ return -1;
+ }
+
+ // Store start of RTP dump (to be used for offset calculation later).
+ _startTime = GetTimeInMS();
+
+ // All rtp dump files start with #!rtpplay.
+ WebRtc_Word8 magic[16];
+ sprintf(magic, "#!rtpplay%s \n", RTPFILE_VERSION);
+ _file.WriteText(magic);
+
+ // The header according to the rtpdump documentation is sizeof(RD_hdr_t)
+ // which is 8 + 4 + 2 = 14 bytes for 32-bit architecture (and 22 bytes on
+ // 64-bit architecture). However, Wireshark use 16 bytes for the header
+ // regardless of if the binary is 32-bit or 64-bit. Go by the same approach
+ // as Wireshark since it makes more sense.
+ // http://wiki.wireshark.org/rtpdump explains that an additional 2 bytes
+ // of padding should be added to the header.
+ WebRtc_Word8 dummyHdr[16];
+ memset(dummyHdr, 0, 16);
+ _file.Write(dummyHdr, sizeof(dummyHdr));
+ return 0;
+}
+
+WebRtc_Word32 RtpDumpImpl::Stop()
+{
+ WEBRTC_TRACE(kTraceModuleCall, kTraceUtility, -1, "Stop()");
+ CriticalSectionScoped lock(_critSect);
+ _file.Flush();
+ _file.CloseFile();
+ return 0;
+}
+
+bool RtpDumpImpl::IsActive() const
+{
+ CriticalSectionScoped lock(_critSect);
+ return _file.Open();
+}
+
+WebRtc_Word32 RtpDumpImpl::DumpPacket(const WebRtc_UWord8* packet,
+ WebRtc_UWord16 packetLength)
+{
+ CriticalSectionScoped lock(_critSect);
+ if (!IsActive())
+ {
+ return 0;
+ }
+
+ if (packet == NULL)
+ {
+ return -1;
+ }
+
+ if (packetLength < 1)
+ {
+ return -1;
+ }
+
+ // If the packet doesn't contain a valid RTCP header the packet will be
+ // considered RTP (without further verification).
+ bool isRTCP = RTCP(packet);
+
+ rtpDumpPktHdr_t hdr;
+ WebRtc_UWord32 offset;
+
+ // Offset is relative to when recording was started.
+ offset = GetTimeInMS();
+ if (offset < _startTime)
+ {
+ // Compensate for wraparound.
+ offset += MAX_UWORD32 - _startTime + 1;
+ } else {
+ offset -= _startTime;
+ }
+ hdr.offset = RtpDumpHtonl(offset);
+
+ hdr.length = RtpDumpHtons((WebRtc_UWord16)(packetLength + sizeof(hdr)));
+ if (isRTCP)
+ {
+ hdr.plen = 0;
+ }
+ else
+ {
+ hdr.plen = RtpDumpHtons((WebRtc_UWord16)packetLength);
+ }
+ _file.Write(&hdr, sizeof(hdr));
+ _file.Write(packet, packetLength);
+ return 0;
+}
+
+bool RtpDumpImpl::RTCP(const WebRtc_UWord8* packet) const
+{
+ const WebRtc_UWord8 payloadType = packet[1];
+ bool is_rtcp = false;
+
+ switch(payloadType)
+ {
+ case 192:
+ is_rtcp = true;
+ break;
+ case 193: case 195:
+ break;
+ case 200: case 201: case 202: case 203:
+ case 204: case 205: case 206: case 207:
+ is_rtcp = true;
+ break;
+ }
+ return is_rtcp;
+}
+
+// TODO (hellner): why is TickUtil not used here?
+inline WebRtc_UWord32 RtpDumpImpl::GetTimeInMS() const
+{
+#if defined(_WIN32)
+ return timeGetTime();
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+ struct timeval tv;
+ struct timezone tz;
+ unsigned long val;
+
+ gettimeofday(&tv, &tz);
+ val = tv.tv_sec * 1000 + tv.tv_usec / 1000;
+ return val;
+#else
+ #error Either _WIN32 or LINUX or WEBRTC_MAC has to be defined!
+ assert(false);
+ return 0;
+#endif
+}
+
+inline WebRtc_UWord32 RtpDumpImpl::RtpDumpHtonl(WebRtc_UWord32 x) const
+{
+#if defined(WEBRTC_BIG_ENDIAN)
+ return x;
+#elif defined(WEBRTC_LITTLE_ENDIAN)
+ return (x >> 24) + ((((x >> 16) & 0xFF) << 8) + ((((x >> 8) & 0xFF) << 16) +
+ ((x & 0xFF) << 24)));
+#else
+#error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined!
+ assert(false);
+ return 0;
+#endif
+}
+
+inline WebRtc_UWord16 RtpDumpImpl::RtpDumpHtons(WebRtc_UWord16 x) const
+{
+#if defined(WEBRTC_BIG_ENDIAN)
+ return x;
+#elif defined(WEBRTC_LITTLE_ENDIAN)
+ return (x >> 8) + ((x & 0xFF) << 8);
+#else
+ #error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined!
+ assert(false);
+ return 0;
+#endif
+}
+} // namespace webrtc
diff --git a/modules/utility/source/rtp_dump_impl.h b/modules/utility/source/rtp_dump_impl.h
new file mode 100644
index 0000000..a84e598
--- /dev/null
+++ b/modules/utility/source/rtp_dump_impl.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
+
+#include "rtp_dump.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class FileWrapper;
+class RtpDumpImpl : public RtpDump
+{
+public:
+ RtpDumpImpl();
+ virtual ~RtpDumpImpl();
+
+ virtual WebRtc_Word32 Start(const WebRtc_Word8* fileNameUTF8);
+ virtual WebRtc_Word32 Stop();
+ virtual bool IsActive() const;
+ virtual WebRtc_Word32 DumpPacket(const WebRtc_UWord8* packet,
+ WebRtc_UWord16 packetLength);
+private:
+ // Return the system time in ms.
+ inline WebRtc_UWord32 GetTimeInMS() const;
+ // Return x in network byte order (big endian).
+ inline WebRtc_UWord32 RtpDumpHtonl(WebRtc_UWord32 x) const;
+ // Return x in network byte order (big endian).
+ inline WebRtc_UWord16 RtpDumpHtons(WebRtc_UWord16 x) const;
+
+ // Return true if the packet starts with a valid RTCP header.
+ // Note: See ModuleRTPUtility::RTPHeaderParser::RTCP() for details on how
+ // to determine if the packet is an RTCP packet.
+ bool RTCP(const WebRtc_UWord8* packet) const;
+
+private:
+ CriticalSectionWrapper& _critSect;
+ FileWrapper& _file;
+ WebRtc_UWord32 _startTime;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
diff --git a/modules/utility/source/utility.gyp b/modules/utility/source/utility.gyp
new file mode 100644
index 0000000..cd48c2d
--- /dev/null
+++ b/modules/utility/source/utility.gyp
@@ -0,0 +1,65 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'includes': [
+ '../../../common_settings.gypi', # Common settings
+ ],
+ 'targets': [
+ {
+ 'target_name': 'webrtc_utility',
+ 'type': '<(library)',
+ 'dependencies': [
+ '../../audio_coding/main/source/audio_coding_module.gyp:audio_coding_module',
+ '../../video_coding/main/source/video_coding.gyp:webrtc_video_coding',
+ '../../../common_audio/resampler/main/source/resampler.gyp:resampler',
+ '../../../system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ 'defines': [
+ 'WEBRTC_MODULE_UTILITY_VIDEO', # for compiling support for video recording
+ ],
+ 'include_dirs': [
+ '../interface',
+ '../../interface',
+ '../../../common_video/vplib/main/interface',
+ '../../media_file/interface',
+ '../../video_coding/main/interface'
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '../interface',
+ '../../interface',
+ '../../audio_coding/main/interface',
+ ],
+ },
+ 'sources': [
+ '../interface/file_player.h',
+ '../interface/file_recorder.h',
+ '../interface/process_thread.h',
+ '../interface/rtp_dump.h',
+ 'coder.cc',
+ 'coder.h',
+ 'file_player_impl.cc',
+ 'file_player_impl.h',
+ 'file_recorder_impl.cc',
+ 'file_recorder_impl.h',
+ 'process_thread_impl.cc',
+ 'process_thread_impl.h',
+ 'rtp_dump_impl.cc',
+ 'rtp_dump_impl.h',
+ # Video only
+ # TODO: Use some variable for building for video and voice or voice only
+ 'frame_scaler.cc',
+ 'video_coder.cc',
+ 'video_frames_queue.cc',
+ ],
+ },
+ ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/modules/utility/source/video_coder.cc b/modules/utility/source/video_coder.cc
new file mode 100644
index 0000000..2ac7160
--- /dev/null
+++ b/modules/utility/source/video_coder.cc
@@ -0,0 +1,166 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "video_coder.h"
+
+namespace webrtc {
+VideoCoder::VideoCoder(WebRtc_UWord32 instanceID)
+ : _instanceID( instanceID),
+ _vcm(VideoCodingModule::Create(instanceID)),
+ _decodedVideo(0)
+{
+ _vcm->InitializeSender();
+ _vcm->InitializeReceiver();
+
+ _vcm->RegisterTransportCallback(this);
+ _vcm->RegisterReceiveCallback(this);
+}
+
+VideoCoder::~VideoCoder()
+{
+ VideoCodingModule::Destroy(_vcm);
+}
+
+WebRtc_Word32 VideoCoder::Reset()
+{
+ _vcm->ResetDecoder();
+ _vcm->ResetEncoder();
+
+ _vcm->InitializeSender();
+ _vcm->InitializeReceiver();
+
+ _vcm->RegisterTransportCallback(this);
+ _vcm->RegisterReceiveCallback(this);
+ return 0;
+}
+
+WebRtc_Word32 VideoCoder::SetEncodeCodec(VideoCodec& videoCodecInst,
+ WebRtc_UWord32 numberOfCores,
+ WebRtc_UWord32 maxPayloadSize)
+{
+ if(_vcm->RegisterSendCodec(&videoCodecInst, numberOfCores,
+ maxPayloadSize) != VCM_OK)
+ {
+ return -1;
+ }
+ return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
+ WebRtc_Word32 numberOfCores)
+{
+ if (videoCodecInst.plType == 0)
+ {
+ WebRtc_Word8 plType = DefaultPayloadType(videoCodecInst.plName);
+ if (plType == -1)
+ {
+ return -1;
+ }
+ videoCodecInst.plType = plType;
+ }
+
+ if(_vcm->RegisterReceiveCodec(&videoCodecInst, numberOfCores) != VCM_OK)
+ {
+ return -1;
+ }
+ return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::CodecConfigParameters(WebRtc_UWord8* buffer,
+ WebRtc_Word32 size)
+{
+ return _vcm->CodecConfigParameters(buffer, size);
+}
+
+WebRtc_Word32 VideoCoder::SetCodecConfigParameters(WebRtc_UWord8 payloadType,
+ const WebRtc_UWord8* buffer,
+ WebRtc_Word32 length)
+{
+ return _vcm->SetCodecConfigParameters(payloadType, buffer, length);
+}
+
+WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo,
+ const EncodedVideoData& encodedData)
+{
+ decodedVideo.SetLength(0);
+ if(encodedData.payloadSize <= 0)
+ {
+ return -1;
+ }
+
+ _decodedVideo = &decodedVideo;
+ if(_vcm->DecodeFromStorage(encodedData) != VCM_OK)
+ {
+ return -1;
+ }
+ return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::Encode(const VideoFrame& videoFrame,
+ EncodedVideoData& videoEncodedData)
+{
+ // The AddVideoFrame(..) call will (indirectly) call SendData(). Store a
+ // pointer to videoFrame so that it can be updated.
+ _videoEncodedData = &videoEncodedData;
+ videoEncodedData.payloadSize = 0;
+ if(_vcm->AddVideoFrame(videoFrame) != VCM_OK)
+ {
+ return -1;
+ }
+ return 0;
+}
+
+WebRtc_Word8 VideoCoder::DefaultPayloadType(const WebRtc_Word8* plName)
+{
+ VideoCodec tmpCodec;
+ WebRtc_Word32 numberOfCodecs = _vcm->NumberOfCodecs();
+ for (WebRtc_UWord8 i = 0; i < numberOfCodecs; i++)
+ {
+ _vcm->Codec(i, &tmpCodec);
+ if(strncmp(tmpCodec.plName, plName, kPayloadNameSize) == 0)
+ {
+ return tmpCodec.plType;
+ }
+ }
+ return -1;
+}
+
+WebRtc_Word32 VideoCoder::FrameToRender(VideoFrame& videoFrame)
+{
+ return _decodedVideo->CopyFrame(videoFrame);
+}
+
+WebRtc_Word32 VideoCoder::SendData(
+ FrameType frameType,
+ WebRtc_UWord8 payloadType,
+ WebRtc_UWord32 timeStamp,
+ const WebRtc_UWord8* payloadData,
+ WebRtc_UWord32 payloadSize,
+ const RTPFragmentationHeader& fragmentationHeader)
+{
+ // Store the data in _videoEncodedData which is a pointer to videoFrame in
+ // Encode(..)
+ _videoEncodedData->VerifyAndAllocate(payloadSize);
+ _videoEncodedData->frameType = frameType;
+ _videoEncodedData->payloadType = payloadType;
+ _videoEncodedData->timeStamp = timeStamp;
+ _videoEncodedData->fragmentationHeader = fragmentationHeader;
+ memcpy(_videoEncodedData->payloadData, payloadData,
+ sizeof(WebRtc_UWord8) * payloadSize);
+ _videoEncodedData->payloadSize = payloadSize;
+ return 0;
+}
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/modules/utility/source/video_coder.h b/modules/utility/source/video_coder.h
new file mode 100644
index 0000000..7a214ad
--- /dev/null
+++ b/modules/utility/source/video_coder.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "video_coding.h"
+
+namespace webrtc {
+class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
+{
+public:
+ VideoCoder(WebRtc_UWord32 instanceID);
+ ~VideoCoder();
+
+ WebRtc_Word32 Reset();
+
+ WebRtc_Word32 SetEncodeCodec(VideoCodec& videoCodecInst,
+ WebRtc_UWord32 numberOfCores,
+ WebRtc_UWord32 maxPayloadSize);
+
+
+ // Select the codec that should be used for decoding. videoCodecInst.plType
+ // will be set to the codec's default payload type.
+ WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst,
+ WebRtc_Word32 numberOfCores);
+
+ WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer,
+ WebRtc_Word32 size);
+
+ WebRtc_Word32 SetCodecConfigParameters(WebRtc_UWord8 payloadType,
+ const WebRtc_UWord8* buffer,
+ WebRtc_Word32 length);
+
+ WebRtc_Word32 Decode(VideoFrame& decodedVideo,
+ const EncodedVideoData& encodedData);
+
+ WebRtc_Word32 Encode(const VideoFrame& videoFrame,
+ EncodedVideoData& videoEncodedData);
+
+ WebRtc_Word8 DefaultPayloadType(const WebRtc_Word8* plName);
+
+private:
+ // VCMReceiveCallback function.
+ // Note: called by VideoCodingModule when decoding finished.
+ WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
+
+ // VCMPacketizationCallback function.
+ // Note: called by VideoCodingModule when encoding finished.
+ WebRtc_Word32 SendData(
+ const FrameType /*frameType*/,
+ const WebRtc_UWord8 /*payloadType*/,
+ const WebRtc_UWord32 /*timeStamp*/,
+ const WebRtc_UWord8* payloadData,
+ const WebRtc_UWord32 payloadSize,
+ const RTPFragmentationHeader& /* fragmentationHeader*/);
+
+ WebRtc_UWord32 _instanceID;
+ VideoCodingModule* _vcm;
+ VideoFrame* _decodedVideo;
+ EncodedVideoData* _videoEncodedData;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
diff --git a/modules/utility/source/video_frames_queue.cc b/modules/utility/source/video_frames_queue.cc
new file mode 100644
index 0000000..ab590c4
--- /dev/null
+++ b/modules/utility/source/video_frames_queue.cc
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_frames_queue.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include <cassert>
+
+#include "module_common_types.h"
+#include "tick_util.h"
+#include "trace.h"
+
+namespace webrtc {
+VideoFramesQueue::VideoFramesQueue()
+ : _incomingFrames(),
+ _renderDelayMs(10)
+{
+}
+
+VideoFramesQueue::~VideoFramesQueue()
+{
+ while (!_incomingFrames.Empty())
+ {
+ ListItem* item = _incomingFrames.First();
+ if (item)
+ {
+ VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
+ assert(ptrFrame != NULL);
+ ptrFrame->Free();
+ delete ptrFrame;
+ }
+ _incomingFrames.Erase(item);
+ }
+ while (!_emptyFrames.Empty())
+ {
+ ListItem* item = _emptyFrames.First();
+ if (item)
+ {
+ VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
+ assert(ptrFrame != NULL);
+ ptrFrame->Free();
+ delete ptrFrame;
+ }
+ _emptyFrames.Erase(item);
+ }
+}
+
+WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame)
+{
+ VideoFrame* ptrFrameToAdd = NULL;
+ // Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
+ if (!_emptyFrames.Empty())
+ {
+ ListItem* item = _emptyFrames.First();
+ if (item)
+ {
+ ptrFrameToAdd = static_cast<VideoFrame*>(item->GetItem());
+ _emptyFrames.Erase(item);
+ }
+ }
+ if (!ptrFrameToAdd)
+ {
+ if (_emptyFrames.GetSize() + _incomingFrames.GetSize() >
+ KMaxNumberOfFrames)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+ "%s: too many frames, limit: %d", __FUNCTION__,
+ KMaxNumberOfFrames);
+ return -1;
+ }
+
+ WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
+ "%s: allocating buffer %d", __FUNCTION__,
+ _emptyFrames.GetSize() + _incomingFrames.GetSize());
+
+ ptrFrameToAdd = new VideoFrame();
+ if (!ptrFrameToAdd)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+ "%s: could not create new frame for", __FUNCTION__);
+ return -1;
+ }
+ }
+ ptrFrameToAdd->CopyFrame(newFrame);
+ _incomingFrames.PushBack(ptrFrameToAdd);
+ return 0;
+}
+
+// Find the most recent frame that has a VideoFrame::RenderTimeMs() that is
+// lower than current time in ms (TickTime::MillisecondTimestamp()).
+// Note _incomingFrames is sorted so that the oldest frame is first.
+// Recycle all frames that are older than the most recent frame.
+VideoFrame* VideoFramesQueue::FrameToRecord()
+{
+ VideoFrame* ptrRenderFrame = NULL;
+ ListItem* item = _incomingFrames.First();
+ while(item)
+ {
+ VideoFrame* ptrOldestFrameInList =
+ static_cast<VideoFrame*>(item->GetItem());
+ if (ptrOldestFrameInList->RenderTimeMs() <=
+ TickTime::MillisecondTimestamp() + _renderDelayMs)
+ {
+ if (ptrRenderFrame)
+ {
+ // List is traversed beginning to end. If ptrRenderFrame is not
+ // NULL it must be the first, and thus oldest, VideoFrame in the
+ // queue. It can be recycled.
+ ReturnFrame(ptrRenderFrame);
+ _incomingFrames.PopFront();
+ }
+ item = _incomingFrames.Next(item);
+ ptrRenderFrame = ptrOldestFrameInList;
+ }else
+ {
+ // All VideoFrames following this one will be even newer. No match
+ // will be found.
+ break;
+ }
+ }
+ return ptrRenderFrame;
+}
+
+WebRtc_Word32 VideoFramesQueue::ReturnFrame(VideoFrame* ptrOldFrame)
+{
+ ptrOldFrame->SetTimeStamp(0);
+ ptrOldFrame->SetWidth(0);
+ ptrOldFrame->SetHeight(0);
+ ptrOldFrame->SetRenderTime(0);
+ ptrOldFrame->SetLength(0);
+ _emptyFrames.PushBack(ptrOldFrame);
+ return 0;
+}
+
+//
+WebRtc_Word32 VideoFramesQueue::SetRenderDelay(WebRtc_UWord32 renderDelay)
+{
+ _renderDelayMs = renderDelay;
+ return 0;
+}
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/modules/utility/source/video_frames_queue.h b/modules/utility/source/video_frames_queue.h
new file mode 100644
index 0000000..6c9be1c
--- /dev/null
+++ b/modules/utility/source/video_frames_queue.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "list_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class VideoFrame;
+
+class VideoFramesQueue
+{
+public:
+ VideoFramesQueue();
+ ~VideoFramesQueue();
+
+ // Put newFrame (last) in the queue.
+ WebRtc_Word32 AddFrame(const VideoFrame& newFrame);
+
+ // Return the most current frame. I.e. the frame with the highest
+ // VideoFrame::RenderTimeMs() that is lower than
+ // TickTime::MillisecondTimestamp().
+ VideoFrame* FrameToRecord();
+
+ // Set the render delay estimate to renderDelay ms.
+ WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 renderDelay);
+
+protected:
+ // Make ptrOldFrame available for re-use. I.e. put it in the empty frames
+ // queue.
+ WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
+
+private:
+ // Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
+ // 300 frames correspond to 10 seconds worth of frames at 30 fps.
+ enum {KMaxNumberOfFrames = 300};
+
+ // List of VideoFrame pointers. The list is sorted in the order of when the
+ // VideoFrame was inserted into the list. The first VideoFrame in the list
+ // was inserted first.
+ ListWrapper _incomingFrames;
+ // A list of frames that are free to be re-used.
+ ListWrapper _emptyFrames;
+
+ // Estimated render delay.
+ WebRtc_UWord32 _renderDelayMs;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_