blob: 63a4ed768a9684ab84b43913ffc300d553808ebb [file] [log] [blame]
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
pbos@webrtc.org471ae722013-05-21 13:52:32 +000011#include "webrtc/voice_engine/voe_audio_processing_impl.h"
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000012
pbos@webrtc.org471ae722013-05-21 13:52:32 +000013#include "webrtc/modules/audio_processing/include/audio_processing.h"
14#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
15#include "webrtc/system_wrappers/interface/logging.h"
16#include "webrtc/system_wrappers/interface/trace.h"
17#include "webrtc/voice_engine/channel.h"
18#include "webrtc/voice_engine/include/voe_errors.h"
19#include "webrtc/voice_engine/transmit_mixer.h"
20#include "webrtc/voice_engine/voice_engine_impl.h"
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000021
22// TODO(andrew): move to a common place.
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000023#define WEBRTC_VOICE_INIT_CHECK() \
24 do { \
25 if (!_shared->statistics().Initialized()) { \
26 _shared->SetLastError(VE_NOT_INITED, kTraceError); \
27 return -1; \
28 } \
29 } while (0)
30
31#define WEBRTC_VOICE_INIT_CHECK_BOOL() \
32 do { \
33 if (!_shared->statistics().Initialized()) { \
34 _shared->SetLastError(VE_NOT_INITED, kTraceError); \
35 return false; \
36 } \
37 } while (0)
38
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000039namespace webrtc {
40
41#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
42static const EcModes kDefaultEcMode = kEcAecm;
43#else
44static const EcModes kDefaultEcMode = kEcAec;
45#endif
46
47VoEAudioProcessing* VoEAudioProcessing::GetInterface(VoiceEngine* voiceEngine) {
48#ifndef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
49 return NULL;
50#else
51 if (NULL == voiceEngine) {
52 return NULL;
53 }
tommi@webrtc.orgb9e5a3d2013-02-15 15:07:32 +000054 VoiceEngineImpl* s = static_cast<VoiceEngineImpl*>(voiceEngine);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000055 s->AddRef();
56 return s;
57#endif
58}
59
60#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
61VoEAudioProcessingImpl::VoEAudioProcessingImpl(voe::SharedData* shared)
62 : _isAecMode(kDefaultEcMode == kEcAec),
63 _shared(shared) {
64 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
65 "VoEAudioProcessingImpl::VoEAudioProcessingImpl() - ctor");
66}
67
68VoEAudioProcessingImpl::~VoEAudioProcessingImpl() {
69 WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
70 "VoEAudioProcessingImpl::~VoEAudioProcessingImpl() - dtor");
71}
72
73int VoEAudioProcessingImpl::SetNsStatus(bool enable, NsModes mode) {
74 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
75 "SetNsStatus(enable=%d, mode=%d)", enable, mode);
76#ifdef WEBRTC_VOICE_ENGINE_NR
77 if (!_shared->statistics().Initialized()) {
78 _shared->SetLastError(VE_NOT_INITED, kTraceError);
79 return -1;
80 }
81
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +000082 NoiseSuppression::Level nsLevel = kDefaultNsMode;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000083 switch (mode) {
84 case kNsDefault:
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +000085 nsLevel = kDefaultNsMode;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000086 break;
87 case kNsUnchanged:
88 nsLevel = _shared->audio_processing()->noise_suppression()->level();
89 break;
90 case kNsConference:
91 nsLevel = NoiseSuppression::kHigh;
92 break;
93 case kNsLowSuppression:
94 nsLevel = NoiseSuppression::kLow;
95 break;
96 case kNsModerateSuppression:
97 nsLevel = NoiseSuppression::kModerate;
98 break;
99 case kNsHighSuppression:
100 nsLevel = NoiseSuppression::kHigh;
101 break;
102 case kNsVeryHighSuppression:
103 nsLevel = NoiseSuppression::kVeryHigh;
104 break;
105 }
106
107 if (_shared->audio_processing()->noise_suppression()->
108 set_level(nsLevel) != 0) {
109 _shared->SetLastError(VE_APM_ERROR, kTraceError,
110 "SetNsStatus() failed to set Ns mode");
111 return -1;
112 }
113 if (_shared->audio_processing()->noise_suppression()->Enable(enable) != 0) {
114 _shared->SetLastError(VE_APM_ERROR, kTraceError,
115 "SetNsStatus() failed to set Ns state");
116 return -1;
117 }
118
119 return 0;
120#else
121 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
122 "SetNsStatus() Ns is not supported");
123 return -1;
124#endif
125}
126
127int VoEAudioProcessingImpl::GetNsStatus(bool& enabled, NsModes& mode) {
128 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
129 "GetNsStatus(enabled=?, mode=?)");
130#ifdef WEBRTC_VOICE_ENGINE_NR
131 if (!_shared->statistics().Initialized()) {
132 _shared->SetLastError(VE_NOT_INITED, kTraceError);
133 return -1;
134 }
135
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +0000136 enabled = _shared->audio_processing()->noise_suppression()->is_enabled();
137 NoiseSuppression::Level nsLevel =
138 _shared->audio_processing()->noise_suppression()->level();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000139
140 switch (nsLevel) {
141 case NoiseSuppression::kLow:
142 mode = kNsLowSuppression;
143 break;
144 case NoiseSuppression::kModerate:
145 mode = kNsModerateSuppression;
146 break;
147 case NoiseSuppression::kHigh:
148 mode = kNsHighSuppression;
149 break;
150 case NoiseSuppression::kVeryHigh:
151 mode = kNsVeryHighSuppression;
152 break;
153 }
154
155 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
156 "GetNsStatus() => enabled=% d, mode=%d", enabled, mode);
157 return 0;
158#else
159 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
160 "GetNsStatus() Ns is not supported");
161 return -1;
162#endif
163}
164
165int VoEAudioProcessingImpl::SetAgcStatus(bool enable, AgcModes mode) {
166 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
167 "SetAgcStatus(enable=%d, mode=%d)", enable, mode);
168#ifdef WEBRTC_VOICE_ENGINE_AGC
169 if (!_shared->statistics().Initialized()) {
170 _shared->SetLastError(VE_NOT_INITED, kTraceError);
171 return -1;
172 }
173
174#if defined(WEBRTC_IOS) || defined(ATA) || defined(WEBRTC_ANDROID)
175 if (mode == kAgcAdaptiveAnalog) {
176 _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
177 "SetAgcStatus() invalid Agc mode for mobile device");
178 return -1;
179 }
180#endif
181
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +0000182 GainControl::Mode agcMode = kDefaultAgcMode;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000183 switch (mode) {
184 case kAgcDefault:
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +0000185 agcMode = kDefaultAgcMode;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000186 break;
187 case kAgcUnchanged:
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +0000188 agcMode = _shared->audio_processing()->gain_control()->mode();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000189 break;
190 case kAgcFixedDigital:
191 agcMode = GainControl::kFixedDigital;
192 break;
193 case kAgcAdaptiveAnalog:
194 agcMode = GainControl::kAdaptiveAnalog;
195 break;
196 case kAgcAdaptiveDigital:
197 agcMode = GainControl::kAdaptiveDigital;
198 break;
199 }
200
201 if (_shared->audio_processing()->gain_control()->set_mode(agcMode) != 0) {
202 _shared->SetLastError(VE_APM_ERROR, kTraceError,
203 "SetAgcStatus() failed to set Agc mode");
204 return -1;
205 }
206 if (_shared->audio_processing()->gain_control()->Enable(enable) != 0) {
207 _shared->SetLastError(VE_APM_ERROR, kTraceError,
208 "SetAgcStatus() failed to set Agc state");
209 return -1;
210 }
211
212 if (agcMode != GainControl::kFixedDigital) {
213 // Set Agc state in the ADM when adaptive Agc mode has been selected.
214 // Note that we also enable the ADM Agc when Adaptive Digital mode is
215 // used since we want to be able to provide the APM with updated mic
216 // levels when the user modifies the mic level manually.
217 if (_shared->audio_device()->SetAGC(enable) != 0) {
218 _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
219 kTraceWarning, "SetAgcStatus() failed to set Agc mode");
220 }
221 }
222
223 return 0;
224#else
225 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
226 "SetAgcStatus() Agc is not supported");
227 return -1;
228#endif
229}
230
231int VoEAudioProcessingImpl::GetAgcStatus(bool& enabled, AgcModes& mode) {
232 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
233 "GetAgcStatus(enabled=?, mode=?)");
234#ifdef WEBRTC_VOICE_ENGINE_AGC
235 if (!_shared->statistics().Initialized()) {
236 _shared->SetLastError(VE_NOT_INITED, kTraceError);
237 return -1;
238 }
239
andrew@webrtc.orgb79627b2013-03-05 01:12:49 +0000240 enabled = _shared->audio_processing()->gain_control()->is_enabled();
sjlee@webrtc.org0c1f10b2013-03-25 11:12:20 +0000241 GainControl::Mode agcMode =
242 _shared->audio_processing()->gain_control()->mode();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000243
244 switch (agcMode) {
245 case GainControl::kFixedDigital:
246 mode = kAgcFixedDigital;
247 break;
248 case GainControl::kAdaptiveAnalog:
249 mode = kAgcAdaptiveAnalog;
250 break;
251 case GainControl::kAdaptiveDigital:
252 mode = kAgcAdaptiveDigital;
253 break;
254 }
255
256 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
257 "GetAgcStatus() => enabled=%d, mode=%d", enabled, mode);
258 return 0;
259#else
260 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
261 "GetAgcStatus() Agc is not supported");
262 return -1;
263#endif
264}
265
pbos@webrtc.orgca7a9a22013-05-14 08:31:39 +0000266int VoEAudioProcessingImpl::SetAgcConfig(AgcConfig config) {
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000267 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
268 "SetAgcConfig()");
269#ifdef WEBRTC_VOICE_ENGINE_AGC
270 if (!_shared->statistics().Initialized()) {
271 _shared->SetLastError(VE_NOT_INITED, kTraceError);
272 return -1;
273 }
274
275 if (_shared->audio_processing()->gain_control()->set_target_level_dbfs(
276 config.targetLeveldBOv) != 0) {
277 _shared->SetLastError(VE_APM_ERROR, kTraceError,
278 "SetAgcConfig() failed to set target peak |level|"
279 " (or envelope) of the Agc");
280 return -1;
281 }
282 if (_shared->audio_processing()->gain_control()->set_compression_gain_db(
283 config.digitalCompressionGaindB) != 0) {
284 _shared->SetLastError(VE_APM_ERROR, kTraceError,
285 "SetAgcConfig() failed to set the range in |gain| "
286 "the digital compression stage may apply");
287 return -1;
288 }
289 if (_shared->audio_processing()->gain_control()->enable_limiter(
290 config.limiterEnable) != 0) {
291 _shared->SetLastError(VE_APM_ERROR, kTraceError,
292 "SetAgcConfig() failed to set hard limiter to the signal");
293 return -1;
294 }
295
296 return 0;
297#else
298 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
299 "SetAgcConfig() EC is not supported");
300 return -1;
301#endif
302}
303
304int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) {
305 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
306 "GetAgcConfig(config=?)");
307#ifdef WEBRTC_VOICE_ENGINE_AGC
308 if (!_shared->statistics().Initialized()) {
309 _shared->SetLastError(VE_NOT_INITED, kTraceError);
310 return -1;
311 }
312
313 config.targetLeveldBOv =
314 _shared->audio_processing()->gain_control()->target_level_dbfs();
315 config.digitalCompressionGaindB =
316 _shared->audio_processing()->gain_control()->compression_gain_db();
317 config.limiterEnable =
318 _shared->audio_processing()->gain_control()->is_limiter_enabled();
319
320 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
321 "GetAgcConfig() => targetLeveldBOv=%u, "
322 "digitalCompressionGaindB=%u, limiterEnable=%d",
323 config.targetLeveldBOv,
324 config.digitalCompressionGaindB,
325 config.limiterEnable);
326
327 return 0;
328#else
329 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
330 "GetAgcConfig() EC is not supported");
331 return -1;
332#endif
333}
334
335int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
336 bool enable,
337 NsModes mode) {
andrew@webrtc.orgd898c012012-11-14 19:07:54 +0000338 LOG_API3(channel, enable, mode);
andrew@webrtc.org0dd483f2012-11-06 18:39:40 +0000339#ifdef WEBRTC_VOICE_ENGINE_NR
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000340 if (!_shared->statistics().Initialized()) {
341 _shared->SetLastError(VE_NOT_INITED, kTraceError);
342 return -1;
343 }
344
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000345 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
346 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000347 if (channelPtr == NULL) {
348 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
349 "SetRxNsStatus() failed to locate channel");
350 return -1;
351 }
352 return channelPtr->SetRxNsStatus(enable, mode);
353#else
354 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
andrew@webrtc.org0dd483f2012-11-06 18:39:40 +0000355 "SetRxNsStatus() NS is not supported");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000356 return -1;
357#endif
358}
359
360int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
361 bool& enabled,
362 NsModes& mode) {
363 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
364 "GetRxNsStatus(channel=%d, enable=?, mode=?)", channel);
andrew@webrtc.org0dd483f2012-11-06 18:39:40 +0000365#ifdef WEBRTC_VOICE_ENGINE_NR
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000366 if (!_shared->statistics().Initialized()) {
367 _shared->SetLastError(VE_NOT_INITED, kTraceError);
368 return -1;
369 }
370
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000371 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
372 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000373 if (channelPtr == NULL) {
374 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
375 "GetRxNsStatus() failed to locate channel");
376 return -1;
377 }
378 return channelPtr->GetRxNsStatus(enabled, mode);
379#else
380 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
andrew@webrtc.org0dd483f2012-11-06 18:39:40 +0000381 "GetRxNsStatus() NS is not supported");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000382 return -1;
383#endif
384}
385
386int VoEAudioProcessingImpl::SetRxAgcStatus(int channel,
387 bool enable,
388 AgcModes mode) {
389 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
390 "SetRxAgcStatus(channel=%d, enable=%d, mode=%d)",
391 channel, (int)enable, (int)mode);
392#ifdef WEBRTC_VOICE_ENGINE_AGC
393 if (!_shared->statistics().Initialized()) {
394 _shared->SetLastError(VE_NOT_INITED, kTraceError);
395 return -1;
396 }
397
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000398 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
399 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000400 if (channelPtr == NULL) {
401 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
402 "SetRxAgcStatus() failed to locate channel");
403 return -1;
404 }
405 return channelPtr->SetRxAgcStatus(enable, mode);
406#else
407 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
408 "SetRxAgcStatus() Agc is not supported");
409 return -1;
410#endif
411}
412
413int VoEAudioProcessingImpl::GetRxAgcStatus(int channel,
414 bool& enabled,
415 AgcModes& mode) {
416 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
417 "GetRxAgcStatus(channel=%d, enable=?, mode=?)", channel);
418#ifdef WEBRTC_VOICE_ENGINE_AGC
419 if (!_shared->statistics().Initialized()) {
420 _shared->SetLastError(VE_NOT_INITED, kTraceError);
421 return -1;
422 }
423
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000424 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
425 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000426 if (channelPtr == NULL) {
427 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
428 "GetRxAgcStatus() failed to locate channel");
429 return -1;
430 }
431 return channelPtr->GetRxAgcStatus(enabled, mode);
432#else
433 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
434 "GetRxAgcStatus() Agc is not supported");
435 return -1;
436#endif
437}
438
439int VoEAudioProcessingImpl::SetRxAgcConfig(int channel,
pbos@webrtc.orgca7a9a22013-05-14 08:31:39 +0000440 AgcConfig config) {
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000441 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
442 "SetRxAgcConfig(channel=%d)", channel);
443#ifdef WEBRTC_VOICE_ENGINE_AGC
444 if (!_shared->statistics().Initialized()) {
445 _shared->SetLastError(VE_NOT_INITED, kTraceError);
446 return -1;
447 }
448
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000449 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
450 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000451 if (channelPtr == NULL) {
452 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
453 "SetRxAgcConfig() failed to locate channel");
454 return -1;
455 }
456 return channelPtr->SetRxAgcConfig(config);
457#else
458 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
459 "SetRxAgcConfig() Agc is not supported");
460 return -1;
461#endif
462}
463
464int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config) {
465 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
466 "GetRxAgcConfig(channel=%d)", channel);
467#ifdef WEBRTC_VOICE_ENGINE_AGC
468 if (!_shared->statistics().Initialized()) {
469 _shared->SetLastError(VE_NOT_INITED, kTraceError);
470 return -1;
471 }
472
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000473 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
474 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000475 if (channelPtr == NULL) {
476 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
477 "GetRxAgcConfig() failed to locate channel");
478 return -1;
479 }
480 return channelPtr->GetRxAgcConfig(config);
481#else
482 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
483 "GetRxAgcConfig() Agc is not supported");
484 return -1;
485#endif
486}
487
488bool VoEAudioProcessing::DriftCompensationSupported() {
489#if defined(WEBRTC_DRIFT_COMPENSATION_SUPPORTED)
490 return true;
491#else
492 return false;
493#endif
494}
495
496int VoEAudioProcessingImpl::EnableDriftCompensation(bool enable) {
andrew@webrtc.orgd898c012012-11-14 19:07:54 +0000497 LOG_API1(enable);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000498 WEBRTC_VOICE_INIT_CHECK();
499
500 if (!DriftCompensationSupported()) {
501 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
502 "Drift compensation is not supported on this platform.");
503 return -1;
504 }
505
506 EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
507 if (aec->enable_drift_compensation(enable) != 0) {
508 _shared->SetLastError(VE_APM_ERROR, kTraceError,
509 "aec->enable_drift_compensation() failed");
510 return -1;
511 }
512 return 0;
513}
514
515bool VoEAudioProcessingImpl::DriftCompensationEnabled() {
andrew@webrtc.orgd898c012012-11-14 19:07:54 +0000516 LOG_API0();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000517 WEBRTC_VOICE_INIT_CHECK_BOOL();
518
519 EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
520 return aec->is_drift_compensation_enabled();
521}
522
523int VoEAudioProcessingImpl::SetEcStatus(bool enable, EcModes mode) {
524 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
525 "SetEcStatus(enable=%d, mode=%d)", enable, mode);
526#ifdef WEBRTC_VOICE_ENGINE_ECHO
527 if (!_shared->statistics().Initialized()) {
528 _shared->SetLastError(VE_NOT_INITED, kTraceError);
529 return -1;
530 }
531
532 // AEC mode
533 if ((mode == kEcDefault) ||
534 (mode == kEcConference) ||
535 (mode == kEcAec) ||
536 ((mode == kEcUnchanged) &&
537 (_isAecMode == true))) {
538 if (enable) {
539 // Disable the AECM before enable the AEC
540 if (_shared->audio_processing()->echo_control_mobile()->is_enabled()) {
541 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
542 "SetEcStatus() disable AECM before enabling AEC");
543 if (_shared->audio_processing()->echo_control_mobile()->
544 Enable(false) != 0) {
545 _shared->SetLastError(VE_APM_ERROR, kTraceError,
546 "SetEcStatus() failed to disable AECM");
547 return -1;
548 }
549 }
550 }
551 if (_shared->audio_processing()->echo_cancellation()->Enable(enable) != 0) {
552 _shared->SetLastError(VE_APM_ERROR, kTraceError,
553 "SetEcStatus() failed to set AEC state");
554 return -1;
555 }
556 if (mode == kEcConference) {
557 if (_shared->audio_processing()->echo_cancellation()->
558 set_suppression_level(EchoCancellation::kHighSuppression) != 0) {
559 _shared->SetLastError(VE_APM_ERROR, kTraceError,
560 "SetEcStatus() failed to set aggressiveness to high");
561 return -1;
562 }
563 } else {
564 if (_shared->audio_processing()->echo_cancellation()->
565 set_suppression_level(
566 EchoCancellation::kModerateSuppression) != 0) {
567 _shared->SetLastError(VE_APM_ERROR, kTraceError,
568 "SetEcStatus() failed to set aggressiveness to moderate");
569 return -1;
570 }
571 }
572
573 _isAecMode = true;
574 } else if ((mode == kEcAecm) ||
575 ((mode == kEcUnchanged) &&
576 (_isAecMode == false))) {
577 if (enable) {
578 // Disable the AEC before enable the AECM
579 if (_shared->audio_processing()->echo_cancellation()->is_enabled()) {
580 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
581 "SetEcStatus() disable AEC before enabling AECM");
582 if (_shared->audio_processing()->echo_cancellation()->
583 Enable(false) != 0) {
584 _shared->SetLastError(VE_APM_ERROR, kTraceError,
585 "SetEcStatus() failed to disable AEC");
586 return -1;
587 }
588 }
589 }
590 if (_shared->audio_processing()->echo_control_mobile()->
591 Enable(enable) != 0) {
592 _shared->SetLastError(VE_APM_ERROR, kTraceError,
593 "SetEcStatus() failed to set AECM state");
594 return -1;
595 }
596 _isAecMode = false;
597 } else {
598 _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
599 "SetEcStatus() invalid EC mode");
600 return -1;
601 }
602
603 return 0;
604#else
605 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
606 "SetEcStatus() EC is not supported");
607 return -1;
608#endif
609}
610
611int VoEAudioProcessingImpl::GetEcStatus(bool& enabled, EcModes& mode) {
612 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
613 "GetEcStatus()");
614#ifdef WEBRTC_VOICE_ENGINE_ECHO
615 if (!_shared->statistics().Initialized()) {
616 _shared->SetLastError(VE_NOT_INITED, kTraceError);
617 return -1;
618 }
619
620 if (_isAecMode == true) {
621 mode = kEcAec;
622 enabled = _shared->audio_processing()->echo_cancellation()->is_enabled();
623 } else {
624 mode = kEcAecm;
625 enabled = _shared->audio_processing()->echo_control_mobile()->
626 is_enabled();
627 }
628
629 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
630 "GetEcStatus() => enabled=%i, mode=%i",
631 enabled, (int)mode);
632 return 0;
633#else
634 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
635 "GetEcStatus() EC is not supported");
636 return -1;
637#endif
638}
639
640void VoEAudioProcessingImpl::SetDelayOffsetMs(int offset) {
641 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
642 "SetDelayOffsetMs(offset = %d)", offset);
643 _shared->audio_processing()->set_delay_offset_ms(offset);
644}
645
646int VoEAudioProcessingImpl::DelayOffsetMs() {
647 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
648 "DelayOffsetMs()");
649 return _shared->audio_processing()->delay_offset_ms();
650}
651
652int VoEAudioProcessingImpl::SetAecmMode(AecmModes mode, bool enableCNG) {
653 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
654 "SetAECMMode(mode = %d)", mode);
655#ifdef WEBRTC_VOICE_ENGINE_ECHO
656 if (!_shared->statistics().Initialized()) {
657 _shared->SetLastError(VE_NOT_INITED, kTraceError);
658 return -1;
659 }
660
661 EchoControlMobile::RoutingMode aecmMode(
662 EchoControlMobile::kQuietEarpieceOrHeadset);
663
664 switch (mode) {
665 case kAecmQuietEarpieceOrHeadset:
666 aecmMode = EchoControlMobile::kQuietEarpieceOrHeadset;
667 break;
668 case kAecmEarpiece:
669 aecmMode = EchoControlMobile::kEarpiece;
670 break;
671 case kAecmLoudEarpiece:
672 aecmMode = EchoControlMobile::kLoudEarpiece;
673 break;
674 case kAecmSpeakerphone:
675 aecmMode = EchoControlMobile::kSpeakerphone;
676 break;
677 case kAecmLoudSpeakerphone:
678 aecmMode = EchoControlMobile::kLoudSpeakerphone;
679 break;
680 }
681
682
683 if (_shared->audio_processing()->echo_control_mobile()->
684 set_routing_mode(aecmMode) != 0) {
685 _shared->SetLastError(VE_APM_ERROR, kTraceError,
686 "SetAECMMode() failed to set AECM routing mode");
687 return -1;
688 }
689 if (_shared->audio_processing()->echo_control_mobile()->
690 enable_comfort_noise(enableCNG) != 0) {
691 _shared->SetLastError(VE_APM_ERROR, kTraceError,
692 "SetAECMMode() failed to set comfort noise state for AECM");
693 return -1;
694 }
695
696 return 0;
697#else
698 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
699 "SetAECMMode() EC is not supported");
700 return -1;
701#endif
702}
703
704int VoEAudioProcessingImpl::GetAecmMode(AecmModes& mode, bool& enabledCNG) {
705 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
706 "GetAECMMode(mode=?)");
707#ifdef WEBRTC_VOICE_ENGINE_ECHO
708 if (!_shared->statistics().Initialized()) {
709 _shared->SetLastError(VE_NOT_INITED, kTraceError);
710 return -1;
711 }
712
713 enabledCNG = false;
714
715 EchoControlMobile::RoutingMode aecmMode =
716 _shared->audio_processing()->echo_control_mobile()->routing_mode();
717 enabledCNG = _shared->audio_processing()->echo_control_mobile()->
718 is_comfort_noise_enabled();
719
720 switch (aecmMode) {
721 case EchoControlMobile::kQuietEarpieceOrHeadset:
722 mode = kAecmQuietEarpieceOrHeadset;
723 break;
724 case EchoControlMobile::kEarpiece:
725 mode = kAecmEarpiece;
726 break;
727 case EchoControlMobile::kLoudEarpiece:
728 mode = kAecmLoudEarpiece;
729 break;
730 case EchoControlMobile::kSpeakerphone:
731 mode = kAecmSpeakerphone;
732 break;
733 case EchoControlMobile::kLoudSpeakerphone:
734 mode = kAecmLoudSpeakerphone;
735 break;
736 }
737
738 return 0;
739#else
740 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
741 "GetAECMMode() EC is not supported");
742 return -1;
743#endif
744}
745
746int VoEAudioProcessingImpl::EnableHighPassFilter(bool enable) {
747 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
748 "EnableHighPassFilter(%d)", enable);
749 if (_shared->audio_processing()->high_pass_filter()->Enable(enable) !=
750 AudioProcessing::kNoError) {
751 _shared->SetLastError(VE_APM_ERROR, kTraceError,
752 "HighPassFilter::Enable() failed.");
753 return -1;
754 }
755
756 return 0;
757}
758
759bool VoEAudioProcessingImpl::IsHighPassFilterEnabled() {
760 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
761 "IsHighPassFilterEnabled()");
762 return _shared->audio_processing()->high_pass_filter()->is_enabled();
763}
764
765int VoEAudioProcessingImpl::RegisterRxVadObserver(
766 int channel,
767 VoERxVadCallback& observer) {
768 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
769 "RegisterRxVadObserver()");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000770 if (!_shared->statistics().Initialized()) {
771 _shared->SetLastError(VE_NOT_INITED, kTraceError);
772 return -1;
773 }
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000774 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
775 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000776 if (channelPtr == NULL) {
777 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
778 "RegisterRxVadObserver() failed to locate channel");
779 return -1;
780 }
781 return channelPtr->RegisterRxVadObserver(observer);
782}
783
784int VoEAudioProcessingImpl::DeRegisterRxVadObserver(int channel) {
785 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
786 "DeRegisterRxVadObserver()");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000787 if (!_shared->statistics().Initialized()) {
788 _shared->SetLastError(VE_NOT_INITED, kTraceError);
789 return -1;
790 }
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000791 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
792 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000793 if (channelPtr == NULL) {
794 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
795 "DeRegisterRxVadObserver() failed to locate channel");
796 return -1;
797 }
798
799 return channelPtr->DeRegisterRxVadObserver();
800}
801
802int VoEAudioProcessingImpl::VoiceActivityIndicator(int channel) {
803 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
804 "VoiceActivityIndicator(channel=%d)", channel);
805 if (!_shared->statistics().Initialized()) {
806 _shared->SetLastError(VE_NOT_INITED, kTraceError);
807 return -1;
808 }
809
pbos@webrtc.orgb3ada152013-08-07 17:57:36 +0000810 voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
811 voe::Channel* channelPtr = ch.channel();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000812 if (channelPtr == NULL) {
813 _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
814 "DeRegisterRxVadObserver() failed to locate channel");
815 return -1;
816 }
817 int activity(-1);
818 channelPtr->VoiceActivityIndicator(activity);
819
820 return activity;
821}
822
823int VoEAudioProcessingImpl::SetEcMetricsStatus(bool enable) {
824 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
825 "SetEcMetricsStatus(enable=%d)", enable);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000826#ifdef WEBRTC_VOICE_ENGINE_ECHO
827 if (!_shared->statistics().Initialized()) {
828 _shared->SetLastError(VE_NOT_INITED, kTraceError);
829 return -1;
830 }
831
832 if ((_shared->audio_processing()->echo_cancellation()->enable_metrics(enable)
833 != 0) ||
834 (_shared->audio_processing()->echo_cancellation()->enable_delay_logging(
835 enable) != 0)) {
836 _shared->SetLastError(VE_APM_ERROR, kTraceError,
837 "SetEcMetricsStatus() unable to set EC metrics mode");
838 return -1;
839 }
840 return 0;
841#else
842 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
843 "SetEcStatus() EC is not supported");
844 return -1;
845#endif
846}
847
848int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) {
849 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
850 "GetEcMetricsStatus(enabled=?)");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000851#ifdef WEBRTC_VOICE_ENGINE_ECHO
852 if (!_shared->statistics().Initialized()) {
853 _shared->SetLastError(VE_NOT_INITED, kTraceError);
854 return -1;
855 }
856
857 bool echo_mode =
858 _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
859 bool delay_mode = _shared->audio_processing()->echo_cancellation()->
860 is_delay_logging_enabled();
861
862 if (echo_mode != delay_mode) {
863 _shared->SetLastError(VE_APM_ERROR, kTraceError,
864 "GetEcMetricsStatus() delay logging and echo mode are not the same");
865 return -1;
866 }
867
868 enabled = echo_mode;
869
870 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
871 "GetEcMetricsStatus() => enabled=%d", enabled);
872 return 0;
873#else
874 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
875 "SetEcStatus() EC is not supported");
876 return -1;
877#endif
878}
879
880int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL,
881 int& ERLE,
882 int& RERL,
883 int& A_NLP) {
884 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
885 "GetEchoMetrics(ERL=?, ERLE=?, RERL=?, A_NLP=?)");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000886#ifdef WEBRTC_VOICE_ENGINE_ECHO
887 if (!_shared->statistics().Initialized()) {
888 _shared->SetLastError(VE_NOT_INITED, kTraceError);
889 return -1;
890 }
891 if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) {
892 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
893 "GetEchoMetrics() AudioProcessingModule AEC is not enabled");
894 return -1;
895 }
896
897 // Get Echo Metrics from Audio Processing Module.
898 EchoCancellation::Metrics echoMetrics;
899 if (_shared->audio_processing()->echo_cancellation()->GetMetrics(
900 &echoMetrics)) {
901 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1),
902 "GetEchoMetrics(), AudioProcessingModule metrics error");
903 return -1;
904 }
905
906 // Echo quality metrics.
907 ERL = echoMetrics.echo_return_loss.instant;
908 ERLE = echoMetrics.echo_return_loss_enhancement.instant;
909 RERL = echoMetrics.residual_echo_return_loss.instant;
910 A_NLP = echoMetrics.a_nlp.instant;
911
912 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
913 "GetEchoMetrics() => ERL=%d, ERLE=%d, RERL=%d, A_NLP=%d",
914 ERL, ERLE, RERL, A_NLP);
915 return 0;
916#else
917 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
918 "SetEcStatus() EC is not supported");
919 return -1;
920#endif
921}
922
923int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median,
924 int& delay_std) {
925 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
926 "GetEcDelayMetrics(median=?, std=?)");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000927#ifdef WEBRTC_VOICE_ENGINE_ECHO
928 if (!_shared->statistics().Initialized()) {
929 _shared->SetLastError(VE_NOT_INITED, kTraceError);
930 return -1;
931 }
932 if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) {
933 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
934 "GetEcDelayMetrics() AudioProcessingModule AEC is not enabled");
935 return -1;
936 }
937
938 int median = 0;
939 int std = 0;
940 // Get delay-logging values from Audio Processing Module.
941 if (_shared->audio_processing()->echo_cancellation()->GetDelayMetrics(
942 &median, &std)) {
943 WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1),
944 "GetEcDelayMetrics(), AudioProcessingModule delay-logging "
945 "error");
946 return -1;
947 }
948
949 // EC delay-logging metrics
950 delay_median = median;
951 delay_std = std;
952
953 WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
954 "GetEcDelayMetrics() => delay_median=%d, delay_std=%d",
955 delay_median, delay_std);
956 return 0;
957#else
958 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
959 "SetEcStatus() EC is not supported");
960 return -1;
961#endif
962}
963
964int VoEAudioProcessingImpl::StartDebugRecording(const char* fileNameUTF8) {
965 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
966 "StartDebugRecording()");
967 if (!_shared->statistics().Initialized()) {
968 _shared->SetLastError(VE_NOT_INITED, kTraceError);
969 return -1;
970 }
971
972 return _shared->audio_processing()->StartDebugRecording(fileNameUTF8);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000973}
974
henrikg@webrtc.org7b722642013-12-06 16:05:17 +0000975int VoEAudioProcessingImpl::StartDebugRecording(FILE* file_handle) {
976 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
977 "StartDebugRecording()");
978 if (!_shared->statistics().Initialized()) {
979 _shared->SetLastError(VE_NOT_INITED, kTraceError);
980 return -1;
981 }
982
983 return _shared->audio_processing()->StartDebugRecording(file_handle);
984}
985
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000986int VoEAudioProcessingImpl::StopDebugRecording() {
987 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
988 "StopDebugRecording()");
989 if (!_shared->statistics().Initialized()) {
990 _shared->SetLastError(VE_NOT_INITED, kTraceError);
991 return -1;
992 }
993
994 return _shared->audio_processing()->StopDebugRecording();
995}
996
997int VoEAudioProcessingImpl::SetTypingDetectionStatus(bool enable) {
998 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
999 "SetTypingDetectionStatus()");
andrew@webrtc.orgf47d0f82013-06-19 17:03:47 +00001000#if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
1001 NOT_SUPPORTED(_shared->statistics());
1002#else
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001003 if (!_shared->statistics().Initialized()) {
1004 _shared->SetLastError(VE_NOT_INITED, kTraceError);
1005 return -1;
1006 }
1007
1008 // Just use the VAD state to determine if we should enable typing detection
1009 // or not
1010
1011 if (_shared->audio_processing()->voice_detection()->Enable(enable)) {
1012 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
1013 "SetTypingDetectionStatus() failed to set VAD state");
1014 return -1;
1015 }
1016 if (_shared->audio_processing()->voice_detection()->set_likelihood(
1017 VoiceDetection::kVeryLowLikelihood)) {
1018 _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
1019 "SetTypingDetectionStatus() failed to set VAD likelihood to low");
1020 return -1;
1021 }
1022
1023 return 0;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001024#endif
1025}
1026
1027int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled) {
1028 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
1029 "GetTypingDetectionStatus()");
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001030 if (!_shared->statistics().Initialized()) {
1031 _shared->SetLastError(VE_NOT_INITED, kTraceError);
1032 return -1;
1033 }
1034 // Just use the VAD state to determine if we should enable typing
1035 // detection or not
1036
1037 enabled = _shared->audio_processing()->voice_detection()->is_enabled();
1038
1039 return 0;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001040}
1041
1042
1043int VoEAudioProcessingImpl::TimeSinceLastTyping(int &seconds) {
1044 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
1045 "TimeSinceLastTyping()");
andrew@webrtc.orgf47d0f82013-06-19 17:03:47 +00001046#if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
1047 NOT_SUPPORTED(_shared->statistics());
1048#else
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001049 if (!_shared->statistics().Initialized()) {
1050 _shared->SetLastError(VE_NOT_INITED, kTraceError);
1051 return -1;
1052 }
1053 // Check if typing detection is enabled
1054 bool enabled = _shared->audio_processing()->voice_detection()->is_enabled();
1055 if (enabled)
1056 {
1057 _shared->transmit_mixer()->TimeSinceLastTyping(seconds);
1058 return 0;
1059 }
1060 else
1061 {
1062 _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
1063 "SetTypingDetectionStatus is not enabled");
1064 return -1;
1065 }
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001066#endif
1067}
1068
1069int VoEAudioProcessingImpl::SetTypingDetectionParameters(int timeWindow,
1070 int costPerTyping,
1071 int reportingThreshold,
1072 int penaltyDecay,
1073 int typeEventDelay) {
1074 WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
1075 "SetTypingDetectionParameters()");
andrew@webrtc.orgf47d0f82013-06-19 17:03:47 +00001076#if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
1077 NOT_SUPPORTED(_shared->statistics());
1078#else
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001079 if (!_shared->statistics().Initialized()) {
1080 _shared->statistics().SetLastError(VE_NOT_INITED, kTraceError);
1081 return -1;
1082 }
1083 return (_shared->transmit_mixer()->SetTypingDetectionParameters(timeWindow,
1084 costPerTyping, reportingThreshold, penaltyDecay, typeEventDelay));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001085#endif
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001086}
1087
1088void VoEAudioProcessingImpl::EnableStereoChannelSwapping(bool enable) {
andrew@webrtc.orgd898c012012-11-14 19:07:54 +00001089 LOG_API1(enable);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001090 _shared->transmit_mixer()->EnableStereoChannelSwapping(enable);
1091}
1092
1093bool VoEAudioProcessingImpl::IsStereoChannelSwappingEnabled() {
andrew@webrtc.orgd898c012012-11-14 19:07:54 +00001094 LOG_API0();
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001095 return _shared->transmit_mixer()->IsStereoChannelSwappingEnabled();
1096}
1097
1098#endif // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
1099
pbos@webrtc.org3b89e102013-07-03 15:12:26 +00001100} // namespace webrtc