andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include "voe_external_media_impl.h" |
| 12 | |
| 13 | #include "channel.h" |
| 14 | #include "critical_section_wrapper.h" |
| 15 | #include "output_mixer.h" |
| 16 | #include "trace.h" |
| 17 | #include "transmit_mixer.h" |
| 18 | #include "voice_engine_impl.h" |
| 19 | #include "voe_errors.h" |
| 20 | |
| 21 | namespace webrtc { |
| 22 | |
| 23 | VoEExternalMedia* VoEExternalMedia::GetInterface(VoiceEngine* voiceEngine) |
| 24 | { |
| 25 | #ifndef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API |
| 26 | return NULL; |
| 27 | #else |
| 28 | if (NULL == voiceEngine) |
| 29 | { |
| 30 | return NULL; |
| 31 | } |
| 32 | VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine); |
| 33 | s->AddRef(); |
| 34 | return s; |
| 35 | #endif |
| 36 | } |
| 37 | |
| 38 | #ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API |
| 39 | |
| 40 | VoEExternalMediaImpl::VoEExternalMediaImpl(voe::SharedData* shared) |
| 41 | : playout_delay_ms_(0), shared_(shared) |
| 42 | { |
| 43 | WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 44 | "VoEExternalMediaImpl() - ctor"); |
| 45 | } |
| 46 | |
| 47 | VoEExternalMediaImpl::~VoEExternalMediaImpl() |
| 48 | { |
| 49 | WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 50 | "~VoEExternalMediaImpl() - dtor"); |
| 51 | } |
| 52 | |
| 53 | int VoEExternalMediaImpl::RegisterExternalMediaProcessing( |
| 54 | int channel, |
| 55 | ProcessingTypes type, |
| 56 | VoEMediaProcess& processObject) |
| 57 | { |
| 58 | WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 59 | "RegisterExternalMediaProcessing(channel=%d, type=%d, " |
| 60 | "processObject=0x%x)", channel, type, &processObject); |
| 61 | ANDROID_NOT_SUPPORTED(shared_->statistics()); |
| 62 | IPHONE_NOT_SUPPORTED(shared_->statistics()); |
| 63 | if (!shared_->statistics().Initialized()) |
| 64 | { |
| 65 | shared_->SetLastError(VE_NOT_INITED, kTraceError); |
| 66 | return -1; |
| 67 | } |
| 68 | switch (type) |
| 69 | { |
| 70 | case kPlaybackPerChannel: |
| 71 | case kRecordingPerChannel: |
| 72 | { |
| 73 | voe::ScopedChannel sc(shared_->channel_manager(), channel); |
| 74 | voe::Channel* channelPtr = sc.ChannelPtr(); |
| 75 | if (channelPtr == NULL) |
| 76 | { |
| 77 | shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, |
| 78 | "RegisterExternalMediaProcessing() failed to locate " |
| 79 | "channel"); |
| 80 | return -1; |
| 81 | } |
| 82 | return channelPtr->RegisterExternalMediaProcessing(type, |
| 83 | processObject); |
| 84 | } |
| 85 | case kPlaybackAllChannelsMixed: |
| 86 | { |
| 87 | return shared_->output_mixer()->RegisterExternalMediaProcessing( |
| 88 | processObject); |
| 89 | } |
| 90 | case kRecordingAllChannelsMixed: |
| 91 | case kRecordingPreprocessing: |
| 92 | { |
| 93 | return shared_->transmit_mixer()->RegisterExternalMediaProcessing( |
| 94 | &processObject, type); |
| 95 | } |
| 96 | } |
| 97 | return -1; |
| 98 | } |
| 99 | |
| 100 | int VoEExternalMediaImpl::DeRegisterExternalMediaProcessing( |
| 101 | int channel, |
| 102 | ProcessingTypes type) |
| 103 | { |
| 104 | WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 105 | "DeRegisterExternalMediaProcessing(channel=%d)", channel); |
| 106 | ANDROID_NOT_SUPPORTED(shared_->statistics()); |
| 107 | IPHONE_NOT_SUPPORTED(shared_->statistics()); |
| 108 | if (!shared_->statistics().Initialized()) |
| 109 | { |
| 110 | shared_->SetLastError(VE_NOT_INITED, kTraceError); |
| 111 | return -1; |
| 112 | } |
| 113 | switch (type) |
| 114 | { |
| 115 | case kPlaybackPerChannel: |
| 116 | case kRecordingPerChannel: |
| 117 | { |
| 118 | voe::ScopedChannel sc(shared_->channel_manager(), channel); |
| 119 | voe::Channel* channelPtr = sc.ChannelPtr(); |
| 120 | if (channelPtr == NULL) |
| 121 | { |
| 122 | shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError, |
| 123 | "RegisterExternalMediaProcessing() " |
| 124 | "failed to locate channel"); |
| 125 | return -1; |
| 126 | } |
| 127 | return channelPtr->DeRegisterExternalMediaProcessing(type); |
| 128 | } |
| 129 | case kPlaybackAllChannelsMixed: |
| 130 | { |
| 131 | return shared_->output_mixer()-> |
| 132 | DeRegisterExternalMediaProcessing(); |
| 133 | } |
| 134 | case kRecordingAllChannelsMixed: |
| 135 | case kRecordingPreprocessing: |
| 136 | { |
| 137 | return shared_->transmit_mixer()-> |
| 138 | DeRegisterExternalMediaProcessing(type); |
| 139 | } |
| 140 | } |
| 141 | return -1; |
| 142 | } |
| 143 | |
| 144 | int VoEExternalMediaImpl::SetExternalRecordingStatus(bool enable) |
| 145 | { |
| 146 | WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 147 | "SetExternalRecordingStatus(enable=%d)", enable); |
| 148 | ANDROID_NOT_SUPPORTED(shared_->statistics()); |
| 149 | IPHONE_NOT_SUPPORTED(shared_->statistics()); |
| 150 | #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT |
| 151 | if (shared_->audio_device()->Recording()) |
| 152 | { |
| 153 | shared_->SetLastError(VE_ALREADY_SENDING, kTraceError, |
| 154 | "SetExternalRecordingStatus() cannot set state while sending"); |
| 155 | return -1; |
| 156 | } |
| 157 | shared_->set_ext_recording(enable); |
| 158 | return 0; |
| 159 | #else |
| 160 | shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 161 | "SetExternalRecordingStatus() external recording is not supported"); |
| 162 | return -1; |
| 163 | #endif |
| 164 | } |
| 165 | |
| 166 | int VoEExternalMediaImpl::ExternalRecordingInsertData( |
| 167 | const WebRtc_Word16 speechData10ms[], |
| 168 | int lengthSamples, |
| 169 | int samplingFreqHz, |
| 170 | int current_delay_ms) |
| 171 | { |
| 172 | WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 173 | "ExternalRecordingInsertData(speechData10ms=0x%x," |
| 174 | " lengthSamples=%u, samplingFreqHz=%d, current_delay_ms=%d)", |
| 175 | &speechData10ms[0], lengthSamples, samplingFreqHz, |
| 176 | current_delay_ms); |
| 177 | ANDROID_NOT_SUPPORTED(shared_->statistics()); |
| 178 | IPHONE_NOT_SUPPORTED(shared_->statistics()); |
| 179 | |
| 180 | #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT |
| 181 | if (!shared_->statistics().Initialized()) |
| 182 | { |
| 183 | shared_->SetLastError(VE_NOT_INITED, kTraceError); |
| 184 | return -1; |
| 185 | } |
| 186 | if (!shared_->ext_recording()) |
| 187 | { |
| 188 | shared_->SetLastError(VE_INVALID_OPERATION, kTraceError, |
| 189 | "ExternalRecordingInsertData() external recording is not enabled"); |
| 190 | return -1; |
| 191 | } |
| 192 | if (shared_->NumOfSendingChannels() == 0) |
| 193 | { |
| 194 | shared_->SetLastError(VE_ALREADY_SENDING, kTraceError, |
| 195 | "SetExternalRecordingStatus() no channel is sending"); |
| 196 | return -1; |
| 197 | } |
| 198 | if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) && |
| 199 | (48000 != samplingFreqHz) && (44000 != samplingFreqHz)) |
| 200 | { |
| 201 | shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, |
| 202 | "SetExternalRecordingStatus() invalid sample rate"); |
| 203 | return -1; |
| 204 | } |
| 205 | if ((0 == lengthSamples) || |
| 206 | ((lengthSamples % (samplingFreqHz / 100)) != 0)) |
| 207 | { |
| 208 | shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, |
| 209 | "SetExternalRecordingStatus() invalid buffer size"); |
| 210 | return -1; |
| 211 | } |
| 212 | if (current_delay_ms < 0) |
| 213 | { |
| 214 | shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, |
| 215 | "SetExternalRecordingStatus() invalid delay)"); |
| 216 | return -1; |
| 217 | } |
| 218 | |
| 219 | WebRtc_UWord16 blockSize = samplingFreqHz / 100; |
| 220 | WebRtc_UWord32 nBlocks = lengthSamples / blockSize; |
| 221 | WebRtc_Word16 totalDelayMS = 0; |
| 222 | WebRtc_UWord16 playoutDelayMS = 0; |
| 223 | |
| 224 | for (WebRtc_UWord32 i = 0; i < nBlocks; i++) |
| 225 | { |
| 226 | if (!shared_->ext_playout()) |
| 227 | { |
| 228 | // Use real playout delay if external playout is not enabled. |
| 229 | if (shared_->audio_device()->PlayoutDelay(&playoutDelayMS) != 0) { |
| 230 | shared_->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning, |
| 231 | "PlayoutDelay() unable to get the playout delay"); |
| 232 | } |
| 233 | totalDelayMS = current_delay_ms + playoutDelayMS; |
| 234 | } |
| 235 | else |
| 236 | { |
| 237 | // Use stored delay value given the last call |
| 238 | // to ExternalPlayoutGetData. |
| 239 | totalDelayMS = current_delay_ms + playout_delay_ms_; |
| 240 | // Compensate for block sizes larger than 10ms |
| 241 | totalDelayMS -= (WebRtc_Word16)(i*10); |
| 242 | if (totalDelayMS < 0) |
| 243 | totalDelayMS = 0; |
| 244 | } |
| 245 | shared_->transmit_mixer()->PrepareDemux( |
| 246 | (const WebRtc_Word8*)(&speechData10ms[i*blockSize]), |
| 247 | blockSize, |
| 248 | 1, |
| 249 | samplingFreqHz, |
| 250 | totalDelayMS, |
| 251 | 0, |
| 252 | 0); |
| 253 | |
| 254 | shared_->transmit_mixer()->DemuxAndMix(); |
| 255 | shared_->transmit_mixer()->EncodeAndSend(); |
| 256 | } |
| 257 | return 0; |
| 258 | #else |
| 259 | shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 260 | "ExternalRecordingInsertData() external recording is not supported"); |
| 261 | return -1; |
| 262 | #endif |
| 263 | } |
| 264 | |
| 265 | int VoEExternalMediaImpl::SetExternalPlayoutStatus(bool enable) |
| 266 | { |
| 267 | WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 268 | "SetExternalPlayoutStatus(enable=%d)", enable); |
| 269 | ANDROID_NOT_SUPPORTED(shared_->statistics()); |
| 270 | IPHONE_NOT_SUPPORTED(shared_->statistics()); |
| 271 | #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT |
| 272 | if (shared_->audio_device()->Playing()) |
| 273 | { |
| 274 | shared_->SetLastError(VE_ALREADY_SENDING, kTraceError, |
| 275 | "SetExternalPlayoutStatus() cannot set state while playing"); |
| 276 | return -1; |
| 277 | } |
| 278 | shared_->set_ext_playout(enable); |
| 279 | return 0; |
| 280 | #else |
| 281 | shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 282 | "SetExternalPlayoutStatus() external playout is not supported"); |
| 283 | return -1; |
| 284 | #endif |
| 285 | } |
| 286 | |
| 287 | int VoEExternalMediaImpl::ExternalPlayoutGetData( |
| 288 | WebRtc_Word16 speechData10ms[], |
| 289 | int samplingFreqHz, |
| 290 | int current_delay_ms, |
| 291 | int& lengthSamples) |
| 292 | { |
| 293 | WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(shared_->instance_id(), -1), |
| 294 | "ExternalPlayoutGetData(speechData10ms=0x%x, samplingFreqHz=%d" |
| 295 | ", current_delay_ms=%d)", &speechData10ms[0], samplingFreqHz, |
| 296 | current_delay_ms); |
| 297 | ANDROID_NOT_SUPPORTED(shared_->statistics()); |
| 298 | IPHONE_NOT_SUPPORTED(shared_->statistics()); |
| 299 | #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT |
| 300 | if (!shared_->statistics().Initialized()) |
| 301 | { |
| 302 | shared_->SetLastError(VE_NOT_INITED, kTraceError); |
| 303 | return -1; |
| 304 | } |
| 305 | if (!shared_->ext_playout()) |
| 306 | { |
| 307 | shared_->SetLastError(VE_INVALID_OPERATION, kTraceError, |
| 308 | "ExternalPlayoutGetData() external playout is not enabled"); |
| 309 | return -1; |
| 310 | } |
| 311 | if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) && |
| 312 | (48000 != samplingFreqHz) && (44000 != samplingFreqHz)) |
| 313 | { |
| 314 | shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, |
| 315 | "ExternalPlayoutGetData() invalid sample rate"); |
| 316 | return -1; |
| 317 | } |
| 318 | if (current_delay_ms < 0) |
| 319 | { |
| 320 | shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError, |
| 321 | "ExternalPlayoutGetData() invalid delay)"); |
| 322 | return -1; |
| 323 | } |
| 324 | |
| 325 | AudioFrame audioFrame; |
| 326 | |
| 327 | // Retrieve mixed output at the specified rate |
| 328 | shared_->output_mixer()->MixActiveChannels(); |
| 329 | shared_->output_mixer()->DoOperationsOnCombinedSignal(); |
| 330 | shared_->output_mixer()->GetMixedAudio(samplingFreqHz, 1, &audioFrame); |
| 331 | |
| 332 | // Deliver audio (PCM) samples to the external sink |
| 333 | memcpy(speechData10ms, |
| 334 | audioFrame.data_, |
| 335 | sizeof(WebRtc_Word16)*(audioFrame.samples_per_channel_)); |
| 336 | lengthSamples = audioFrame.samples_per_channel_; |
| 337 | |
| 338 | // Store current playout delay (to be used by ExternalRecordingInsertData). |
| 339 | playout_delay_ms_ = current_delay_ms; |
| 340 | |
| 341 | return 0; |
| 342 | #else |
| 343 | shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, |
| 344 | "ExternalPlayoutGetData() external playout is not supported"); |
| 345 | return -1; |
| 346 | #endif |
| 347 | } |
| 348 | |
| 349 | #endif // WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API |
| 350 | |
| 351 | } // namespace webrtc |