Update libjingle to 57692857

R=wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/4999004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5217 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/app/webrtc/peerconnection.cc b/talk/app/webrtc/peerconnection.cc
index 17f187d..e10e8fc 100644
--- a/talk/app/webrtc/peerconnection.cc
+++ b/talk/app/webrtc/peerconnection.cc
@@ -235,16 +235,17 @@
     }
 
     int port = kDefaultStunPort;
+    if (service_type == TURNS) {
+      port = kDefaultStunTlsPort;
+      turn_transport_type = kTcpTransportType;
+    }
+
     std::string address;
     if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) {
       LOG(WARNING) << "Invalid Hostname format: " << uri_without_transport;
       continue;
     }
 
-    if (service_type == TURNS) {
-      port = kDefaultStunTlsPort;
-      turn_transport_type = kTcpTransportType;
-    }
 
     if (port <= 0 || port > 0xffff) {
       LOG(WARNING) << "Invalid port: " << port;
diff --git a/talk/app/webrtc/peerconnectionfactory_unittest.cc b/talk/app/webrtc/peerconnectionfactory_unittest.cc
index 4f0b729..4ab9e35 100644
--- a/talk/app/webrtc/peerconnectionfactory_unittest.cc
+++ b/talk/app/webrtc/peerconnectionfactory_unittest.cc
@@ -62,6 +62,8 @@
 static const char kSecureTurnIceServer[] =
     "turns:test@hello.com?transport=tcp";
 static const char kSecureTurnIceServerWithoutTransportParam[] =
+    "turns:test_no_transport@hello.com:443";
+static const char kSecureTurnIceServerWithoutTransportAndPortParam[] =
     "turns:test_no_transport@hello.com";
 static const char kTurnIceServerWithNoUsernameInUri[] =
     "turn:test.com:1234";
@@ -256,6 +258,9 @@
   ice_server.uri = kSecureTurnIceServerWithoutTransportParam;
   ice_server.password = kTurnPassword;
   ice_servers.push_back(ice_server);
+  ice_server.uri = kSecureTurnIceServerWithoutTransportAndPortParam;
+  ice_server.password = kTurnPassword;
+  ice_servers.push_back(ice_server);
   talk_base::scoped_refptr<PeerConnectionInterface> pc(
       factory_->CreatePeerConnection(ice_servers, NULL,
                                      allocator_factory_.get(),
@@ -268,9 +273,12 @@
   turn_configs.push_back(turn1);
   // TURNS with transport param should be default to tcp.
   webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
+      "hello.com", 443, "test_no_transport", kTurnPassword, "tcp", true);
+  turn_configs.push_back(turn2);
+  webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn3(
       "hello.com", kDefaultStunTlsPort, "test_no_transport",
       kTurnPassword, "tcp", true);
-  turn_configs.push_back(turn2);
+  turn_configs.push_back(turn3);
   VerifyTurnConfigurations(turn_configs);
 }
 
diff --git a/talk/app/webrtc/statscollector.cc b/talk/app/webrtc/statscollector.cc
index 57277b6..18ab459 100644
--- a/talk/app/webrtc/statscollector.cc
+++ b/talk/app/webrtc/statscollector.cc
@@ -47,9 +47,12 @@
     "googAvailableReceiveBandwidth";
 const char StatsReport::kStatsValueNameAvailableSendBandwidth[] =
     "googAvailableSendBandwidth";
+const char StatsReport::kStatsValueNameAvgEncodeMs[] = "googAvgEncodeMs";
 const char StatsReport::kStatsValueNameBucketDelay[] = "googBucketDelay";
 const char StatsReport::kStatsValueNameBytesReceived[] = "bytesReceived";
 const char StatsReport::kStatsValueNameBytesSent[] = "bytesSent";
+const char StatsReport::kStatsValueNameCaptureJitterMs[] =
+    "googCaptureJitterMs";
 const char StatsReport::kStatsValueNameChannelId[] = "googChannelId";
 const char StatsReport::kStatsValueNameCodecName[] = "googCodecName";
 const char StatsReport::kStatsValueNameComponent[] = "googComponent";
@@ -292,6 +295,9 @@
                    info.framerate_sent);
   report->AddValue(StatsReport::kStatsValueNameRtt, info.rtt_ms);
   report->AddValue(StatsReport::kStatsValueNameCodecName, info.codec_name);
+  report->AddValue(StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms);
+  report->AddValue(StatsReport::kStatsValueNameCaptureJitterMs,
+                   info.capture_jitter_ms);
 }
 
 void ExtractStats(const cricket::BandwidthEstimationInfo& info,
@@ -334,24 +340,10 @@
   // TODO(hta): Extract some stats here.
 }
 
-uint32 ExtractSsrc(const cricket::VoiceReceiverInfo& info) {
-  return info.ssrc;
-}
-
-uint32 ExtractSsrc(const cricket::VoiceSenderInfo& info) {
-  return info.ssrc;
-}
-
-uint32 ExtractSsrc(const cricket::VideoReceiverInfo& info) {
-  return info.ssrcs[0];
-}
-
-uint32 ExtractSsrc(const cricket::VideoSenderInfo& info) {
-  return info.ssrcs[0];
-}
-
 // Template to extract stats from a data vector.
-// ExtractSsrc and ExtractStats must be defined and overloaded for each type.
+// In order to use the template, the functions that are called from it,
+// ExtractStats and ExtractRemoteStats, must be defined and overloaded
+// for each type.
 template<typename T>
 void ExtractStatsFromList(const std::vector<T>& data,
                           const std::string& transport_id,
@@ -359,7 +351,7 @@
   typename std::vector<T>::const_iterator it = data.begin();
   for (; it != data.end(); ++it) {
     std::string id;
-    uint32 ssrc = ExtractSsrc(*it);
+    uint32 ssrc = it->ssrc();
     // Each object can result in 2 objects, a local and a remote object.
     // TODO(hta): Handle the case of multiple SSRCs per object.
     StatsReport* report = collector->PrepareLocalReport(ssrc, transport_id);
@@ -772,7 +764,7 @@
     report->id = statsid;
     report->type = type;
   } else {
-    report = &reports_[statsid];
+    report = &(it->second);
   }
   return report;
 }
diff --git a/talk/app/webrtc/statscollector_unittest.cc b/talk/app/webrtc/statscollector_unittest.cc
index 66a5ee0..1adcb0e 100644
--- a/talk/app/webrtc/statscollector_unittest.cc
+++ b/talk/app/webrtc/statscollector_unittest.cc
@@ -341,7 +341,7 @@
   stats.AddStream(stream_);
 
   // Construct a stats value to read.
-  video_sender_info.ssrcs.push_back(1234);
+  video_sender_info.add_ssrc(1234);
   video_sender_info.bytes_sent = kBytesSent;
   stats_read.senders.push_back(video_sender_info);
 
@@ -375,7 +375,7 @@
   stats.AddStream(stream_);
 
   // Construct a stats value to read.
-  video_sender_info.ssrcs.push_back(1234);
+  video_sender_info.add_ssrc(1234);
   video_sender_info.bytes_sent = kBytesSent;
   stats_read.senders.push_back(video_sender_info);
   cricket::BandwidthEstimationInfo bwe;
@@ -479,7 +479,7 @@
   const int64 kBytesSent = 12345678901234LL;
 
   // Construct a stats value to read.
-  video_sender_info.ssrcs.push_back(1234);
+  video_sender_info.add_ssrc(1234);
   video_sender_info.bytes_sent = kBytesSent;
   stats_read.senders.push_back(video_sender_info);
 
@@ -537,7 +537,7 @@
   const int64 kBytesSent = 12345678901234LL;
 
   // Construct a stats value to read.
-  video_sender_info.ssrcs.push_back(1234);
+  video_sender_info.add_ssrc(1234);
   video_sender_info.bytes_sent = kBytesSent;
   stats_read.senders.push_back(video_sender_info);
 
@@ -618,7 +618,7 @@
   remote_ssrc_stats.timestamp = 12345.678;
   remote_ssrc_stats.ssrc = kSsrcOfTrack;
   cricket::VideoSenderInfo video_sender_info;
-  video_sender_info.ssrcs.push_back(kSsrcOfTrack);
+  video_sender_info.add_ssrc(kSsrcOfTrack);
   video_sender_info.remote_stats.push_back(remote_ssrc_stats);
   stats_read.senders.push_back(video_sender_info);
 
diff --git a/talk/app/webrtc/statstypes.h b/talk/app/webrtc/statstypes.h
index 11a8146..15210b6 100644
--- a/talk/app/webrtc/statstypes.h
+++ b/talk/app/webrtc/statstypes.h
@@ -128,6 +128,8 @@
 
 
   // Internal StatsValue names
+  static const char kStatsValueNameAvgEncodeMs[];
+  static const char kStatsValueNameCaptureJitterMs[];
   static const char kStatsValueNameCodecName[];
   static const char kStatsValueNameEchoCancellationQualityMin[];
   static const char kStatsValueNameEchoDelayMedian[];
diff --git a/talk/app/webrtc/test/peerconnectiontestwrapper.cc b/talk/app/webrtc/test/peerconnectiontestwrapper.cc
index c22ecaf..91b6668 100644
--- a/talk/app/webrtc/test/peerconnectiontestwrapper.cc
+++ b/talk/app/webrtc/test/peerconnectiontestwrapper.cc
@@ -119,6 +119,8 @@
 }
 
 void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
+  // This callback should take the ownership of |desc|.
+  talk_base::scoped_ptr<SessionDescriptionInterface> owned_desc(desc);
   std::string sdp;
   EXPECT_TRUE(desc->ToString(&sdp));
 
@@ -183,9 +185,9 @@
 void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
                                                 int sdp_mline_index,
                                                 const std::string& candidate) {
-  EXPECT_TRUE(peer_connection_->AddIceCandidate(
-                  webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index,
-                                             candidate, NULL)));
+  talk_base::scoped_ptr<webrtc::IceCandidateInterface> owned_candidate(
+      webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
+  EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
 }
 
 void PeerConnectionTestWrapper::WaitForCallEstablished() {
diff --git a/talk/app/webrtc/webrtcsdp.cc b/talk/app/webrtc/webrtcsdp.cc
index ce23459..79f94fe 100644
--- a/talk/app/webrtc/webrtcsdp.cc
+++ b/talk/app/webrtc/webrtcsdp.cc
@@ -341,13 +341,15 @@
                         const std::string& description,
                         SdpParseError* error) {
   // Get the first line of |message| from |line_start|.
-  std::string first_line = message;
+  std::string first_line;
   size_t line_end = message.find(kNewLine, line_start);
   if (line_end != std::string::npos) {
     if (line_end > 0 && (message.at(line_end - 1) == kReturn)) {
       --line_end;
     }
     first_line = message.substr(line_start, (line_end - line_start));
+  } else {
+    first_line = message.substr(line_start);
   }
 
   if (error) {
@@ -2387,7 +2389,7 @@
       if (*pos >= message.size()) {
         break;  // Done parsing
       } else {
-        return ParseFailed(message, *pos, "Can't find valid SDP line.", error);
+        return ParseFailed(message, *pos, "Invalid SDP line.", error);
       }
     }
 
diff --git a/talk/app/webrtc/webrtcsdp_unittest.cc b/talk/app/webrtc/webrtcsdp_unittest.cc
index 97ec843..5418683 100644
--- a/talk/app/webrtc/webrtcsdp_unittest.cc
+++ b/talk/app/webrtc/webrtcsdp_unittest.cc
@@ -1852,6 +1852,18 @@
   TestDeserializeExtmap(true, true);
 }
 
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutEndLineBreak) {
+  JsepSessionDescription jdesc(kDummyString);
+  std::string sdp = kSdpFullString;
+  sdp = sdp.substr(0, sdp.size() - 2);  // Remove \r\n at the end.
+  // Deserialize
+  SdpParseError error;
+  EXPECT_FALSE(webrtc::SdpDeserialize(sdp, &jdesc, &error));
+  const std::string lastline = "a=ssrc:6 label:video_track_id_3";
+  EXPECT_EQ(lastline, error.line);
+  EXPECT_EQ("Invalid SDP line.", error.description);
+}
+
 TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) {
   JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
   std::string new_sdp = kSdpOneCandidate;
diff --git a/talk/base/macutils.cc b/talk/base/macutils.cc
index 28f96e2..a1dcc04 100644
--- a/talk/base/macutils.cc
+++ b/talk/base/macutils.cc
@@ -38,27 +38,29 @@
 ///////////////////////////////////////////////////////////////////////////////
 
 bool ToUtf8(const CFStringRef str16, std::string* str8) {
-  if ((NULL == str16) || (NULL == str8))
+  if ((NULL == str16) || (NULL == str8)) {
     return false;
+  }
   size_t maxlen = CFStringGetMaximumSizeForEncoding(CFStringGetLength(str16),
-                                                    kCFStringEncodingUTF8)
-                  + 1;
+                                                    kCFStringEncodingUTF8) + 1;
   scoped_ptr<char[]> buffer(new char[maxlen]);
   if (!buffer || !CFStringGetCString(str16, buffer.get(), maxlen,
-                                     kCFStringEncodingUTF8))
+                                     kCFStringEncodingUTF8)) {
     return false;
+  }
   str8->assign(buffer.get());
   return true;
 }
 
 bool ToUtf16(const std::string& str8, CFStringRef* str16) {
-  if (NULL == str16)
+  if (NULL == str16) {
     return false;
+  }
   *str16 = CFStringCreateWithBytes(kCFAllocatorDefault,
                                    reinterpret_cast<const UInt8*>(str8.data()),
                                    str8.length(), kCFStringEncodingUTF8,
                                    false);
-  return (NULL != *str16);
+  return NULL != *str16;
 }
 
 #ifdef OSX
@@ -100,23 +102,25 @@
 
 bool GetOSVersion(int* major, int* minor, int* bugfix) {
   ASSERT(major && minor && bugfix);
-  if (!GetGestalt(gestaltSystemVersion, major))
+  if (!GetGestalt(gestaltSystemVersion, major)) {
     return false;
+  }
   if (*major < 0x1040) {
     *bugfix = *major & 0xF;
     *minor = (*major >> 4) & 0xF;
     *major = (*major >> 8);
     return true;
   }
-  return GetGestalt(gestaltSystemVersionMajor, major)
-      && GetGestalt(gestaltSystemVersionMinor, minor)
-      && GetGestalt(gestaltSystemVersionBugFix, bugfix);
+  return GetGestalt(gestaltSystemVersionMajor, major) &&
+         GetGestalt(gestaltSystemVersionMinor, minor) &&
+         GetGestalt(gestaltSystemVersionBugFix, bugfix);
 }
 
 MacOSVersionName GetOSVersionName() {
   int major = 0, minor = 0, bugfix = 0;
-  if (!GetOSVersion(&major, &minor, &bugfix))
+  if (!GetOSVersion(&major, &minor, &bugfix)) {
     return kMacOSUnknown;
+  }
   if (major > 10) {
     return kMacOSNewer;
   }
@@ -136,14 +140,17 @@
       return kMacOSLion;
     case 8:
       return kMacOSMountainLion;
+    case 9:
+      return kMacOSMavericks;
   }
   return kMacOSNewer;
 }
 
 bool GetQuickTimeVersion(std::string* out) {
   int ver;
-  if (!GetGestalt(gestaltQuickTimeVersion, &ver))
+  if (!GetGestalt(gestaltQuickTimeVersion, &ver)) {
     return false;
+  }
 
   std::stringstream ss;
   ss << std::hex << ver;
diff --git a/talk/base/macutils.h b/talk/base/macutils.h
index ad5e7ad..17c09ed 100644
--- a/talk/base/macutils.h
+++ b/talk/base/macutils.h
@@ -56,7 +56,8 @@
   kMacOSSnowLeopard,   // 10.6
   kMacOSLion,          // 10.7
   kMacOSMountainLion,  // 10.8
-  kMacOSNewer,         // 10.9+
+  kMacOSMavericks,     // 10.9
+  kMacOSNewer,         // 10.10+
 };
 
 bool GetOSVersion(int* major, int* minor, int* bugfix);
diff --git a/talk/base/macutils_unittest.cc b/talk/base/macutils_unittest.cc
index 25858a2..dfc211a 100644
--- a/talk/base/macutils_unittest.cc
+++ b/talk/base/macutils_unittest.cc
@@ -30,12 +30,14 @@
 
 TEST(MacUtilsTest, GetOsVersionName) {
   talk_base::MacOSVersionName ver = talk_base::GetOSVersionName();
+  LOG(LS_INFO) << "GetOsVersionName " << ver;
   EXPECT_NE(talk_base::kMacOSUnknown, ver);
 }
 
 TEST(MacUtilsTest, GetQuickTimeVersion) {
   std::string version;
   EXPECT_TRUE(talk_base::GetQuickTimeVersion(&version));
+  LOG(LS_INFO) << "GetQuickTimeVersion " << version;
 }
 
 TEST(MacUtilsTest, RunAppleScriptCompileError) {
diff --git a/talk/base/ssladapter.cc b/talk/base/ssladapter.cc
index b7d8294..5bcb619 100644
--- a/talk/base/ssladapter.cc
+++ b/talk/base/ssladapter.cc
@@ -58,6 +58,7 @@
 #elif SSL_USE_OPENSSL  // && !SSL_USE_SCHANNEL
   return new OpenSSLAdapter(socket);
 #else  // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
+  delete socket;
   return NULL;
 #endif  // !SSL_USE_OPENSSL && !SSL_USE_SCHANNEL
 }
diff --git a/talk/base/ssladapter.h b/talk/base/ssladapter.h
index 1583dc2..4d2dbcd 100644
--- a/talk/base/ssladapter.h
+++ b/talk/base/ssladapter.h
@@ -47,7 +47,9 @@
   // negotiation will begin as soon as the socket connects.
   virtual int StartSSL(const char* hostname, bool restartable) = 0;
 
-  // Create the default SSL adapter for this platform
+  // Create the default SSL adapter for this platform. On failure, returns NULL
+  // and deletes |socket|. Otherwise, the returned SSLAdapter takes ownership
+  // of |socket|.
   static SSLAdapter* Create(AsyncSocket* socket);
 
  private:
diff --git a/talk/examples/call/callclient.cc b/talk/examples/call/callclient.cc
index afbdd29..849455e 100644
--- a/talk/examples/call/callclient.cc
+++ b/talk/examples/call/callclient.cc
@@ -1601,7 +1601,7 @@
        vmi.senders.begin(); it != vmi.senders.end(); ++it) {
     console_->PrintLine("Sender: ssrc=%u codec='%s' bytes=%d packets=%d "
                         "rtt=%d jitter=%d",
-                        it->ssrc, it->codec_name.c_str(), it->bytes_sent,
+                        it->ssrc(), it->codec_name.c_str(), it->bytes_sent,
                         it->packets_sent, it->rtt_ms, it->jitter_ms);
   }
 
@@ -1609,7 +1609,7 @@
        vmi.receivers.begin(); it != vmi.receivers.end(); ++it) {
     console_->PrintLine("Receiver: ssrc=%u bytes=%d packets=%d "
                         "jitter=%d loss=%.2f",
-                        it->ssrc, it->bytes_rcvd, it->packets_rcvd,
+                        it->ssrc(), it->bytes_rcvd, it->packets_rcvd,
                         it->jitter_ms, it->fraction_lost);
   }
 }
diff --git a/talk/media/base/mediachannel.h b/talk/media/base/mediachannel.h
index 919248f..d7e7192 100644
--- a/talk/media/base/mediachannel.h
+++ b/talk/media/base/mediachannel.h
@@ -173,6 +173,7 @@
     experimental_agc.SetFrom(change.experimental_agc);
     experimental_aec.SetFrom(change.experimental_aec);
     aec_dump.SetFrom(change.aec_dump);
+    experimental_acm.SetFrom(change.experimental_acm);
     tx_agc_target_dbov.SetFrom(change.tx_agc_target_dbov);
     tx_agc_digital_compression_gain.SetFrom(
         change.tx_agc_digital_compression_gain);
@@ -200,6 +201,7 @@
         experimental_aec == o.experimental_aec &&
         adjust_agc_delta == o.adjust_agc_delta &&
         aec_dump == o.aec_dump &&
+        experimental_acm == o.experimental_acm &&
         tx_agc_target_dbov == o.tx_agc_target_dbov &&
         tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain &&
         tx_agc_limiter == o.tx_agc_limiter &&
@@ -227,6 +229,7 @@
     ost << ToStringIfSet("experimental_agc", experimental_agc);
     ost << ToStringIfSet("experimental_aec", experimental_aec);
     ost << ToStringIfSet("aec_dump", aec_dump);
+    ost << ToStringIfSet("experimental_acm", experimental_acm);
     ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
     ost << ToStringIfSet("tx_agc_digital_compression_gain",
         tx_agc_digital_compression_gain);
@@ -263,6 +266,7 @@
   Settable<bool> experimental_agc;
   Settable<bool> experimental_aec;
   Settable<bool> aec_dump;
+  Settable<bool> experimental_acm;
   // Note that tx_agc_* only applies to non-experimental AGC.
   Settable<uint16> tx_agc_target_dbov;
   Settable<uint16> tx_agc_digital_compression_gain;
@@ -313,6 +317,7 @@
     buffered_mode_latency.SetFrom(change.buffered_mode_latency);
     lower_min_bitrate.SetFrom(change.lower_min_bitrate);
     dscp.SetFrom(change.dscp);
+    suspend_below_min_bitrate.SetFrom(change.suspend_below_min_bitrate);
   }
 
   bool operator==(const VideoOptions& o) const {
@@ -338,7 +343,8 @@
             o.system_high_adaptation_threshhold &&
         buffered_mode_latency == o.buffered_mode_latency &&
         lower_min_bitrate == o.lower_min_bitrate &&
-        dscp == o.dscp;
+        dscp == o.dscp &&
+        suspend_below_min_bitrate == o.suspend_below_min_bitrate;
   }
 
   std::string ToString() const {
@@ -367,6 +373,8 @@
     ost << ToStringIfSet("buffered mode latency", buffered_mode_latency);
     ost << ToStringIfSet("lower min bitrate", lower_min_bitrate);
     ost << ToStringIfSet("dscp", dscp);
+    ost << ToStringIfSet("suspend below min bitrate",
+                         suspend_below_min_bitrate);
     ost << "}";
     return ost.str();
   }
@@ -415,6 +423,9 @@
   Settable<bool> lower_min_bitrate;
   // Set DSCP value for packet sent from video channel.
   Settable<bool> dscp;
+  // Enable WebRTC suspension of video. No video frames will be sent when the
+  // bitrate is below the configured minimum bitrate.
+  Settable<bool> suspend_below_min_bitrate;
 };
 
 // A class for playing out soundclips.
@@ -624,6 +635,35 @@
         fraction_lost(0.0),
         rtt_ms(0) {
   }
+  void add_ssrc(const SsrcSenderInfo& stat) {
+    local_stats.push_back(stat);
+  }
+  // Temporary utility function for call sites that only provide SSRC.
+  // As more info is added into SsrcSenderInfo, this function should go away.
+  void add_ssrc(uint32 ssrc) {
+    SsrcSenderInfo stat;
+    stat.ssrc = ssrc;
+    add_ssrc(stat);
+  }
+  // Utility accessor for clients that are only interested in ssrc numbers.
+  std::vector<uint32> ssrcs() const {
+    std::vector<uint32> retval;
+    for (std::vector<SsrcSenderInfo>::const_iterator it = local_stats.begin();
+         it != local_stats.end(); ++it) {
+      retval.push_back(it->ssrc);
+    }
+    return retval;
+  }
+  // Utility accessor for clients that make the assumption only one ssrc
+  // exists per media.
+  // This will eventually go away.
+  uint32 ssrc() const {
+    if (local_stats.size() > 0) {
+      return local_stats[0].ssrc;
+    } else {
+      return 0;
+    }
+  }
   int64 bytes_sent;
   int packets_sent;
   int packets_lost;
@@ -641,6 +681,35 @@
         packets_lost(0),
         fraction_lost(0.0) {
   }
+  void add_ssrc(const SsrcReceiverInfo& stat) {
+    local_stats.push_back(stat);
+  }
+  // Temporary utility function for call sites that only provide SSRC.
+  // As more info is added into SsrcSenderInfo, this function should go away.
+  void add_ssrc(uint32 ssrc) {
+    SsrcReceiverInfo stat;
+    stat.ssrc = ssrc;
+    add_ssrc(stat);
+  }
+  std::vector<uint32> ssrcs() const {
+    std::vector<uint32> retval;
+    for (std::vector<SsrcReceiverInfo>::const_iterator it = local_stats.begin();
+         it != local_stats.end(); ++it) {
+      retval.push_back(it->ssrc);
+    }
+    return retval;
+  }
+  // Utility accessor for clients that make the assumption only one ssrc
+  // exists per media.
+  // This will eventually go away.
+  uint32 ssrc() const {
+    if (local_stats.size() > 0) {
+      return local_stats[0].ssrc;
+    } else {
+      return 0;
+    }
+  }
+
   int64 bytes_rcvd;
   int packets_rcvd;
   int packets_lost;
@@ -651,8 +720,7 @@
 
 struct VoiceSenderInfo : public MediaSenderInfo {
   VoiceSenderInfo()
-      : ssrc(0),
-        ext_seqnum(0),
+      : ext_seqnum(0),
         jitter_ms(0),
         audio_level(0),
         aec_quality_min(0.0),
@@ -663,7 +731,6 @@
         typing_noise_detected(false) {
   }
 
-  uint32 ssrc;
   int ext_seqnum;
   int jitter_ms;
   int audio_level;
@@ -677,8 +744,7 @@
 
 struct VoiceReceiverInfo : public MediaReceiverInfo {
   VoiceReceiverInfo()
-      : ssrc(0),
-        ext_seqnum(0),
+      : ext_seqnum(0),
         jitter_ms(0),
         jitter_buffer_ms(0),
         jitter_buffer_preferred_ms(0),
@@ -687,7 +753,6 @@
         expand_rate(0) {
   }
 
-  uint32 ssrc;
   int ext_seqnum;
   int jitter_ms;
   int jitter_buffer_ms;
@@ -709,10 +774,11 @@
         framerate_sent(0),
         nominal_bitrate(0),
         preferred_bitrate(0),
-        adapt_reason(0) {
+        adapt_reason(0),
+        capture_jitter_ms(0),
+        avg_encode_ms(0) {
   }
 
-  std::vector<uint32> ssrcs;
   std::vector<SsrcGroup> ssrc_groups;
   int packets_cached;
   int firs_rcvd;
@@ -724,6 +790,8 @@
   int nominal_bitrate;
   int preferred_bitrate;
   int adapt_reason;
+  int capture_jitter_ms;
+  int avg_encode_ms;
 };
 
 struct VideoReceiverInfo : public MediaReceiverInfo {
@@ -747,7 +815,6 @@
         current_delay_ms(0) {
   }
 
-  std::vector<uint32> ssrcs;
   std::vector<SsrcGroup> ssrc_groups;
   int packets_concealed;
   int firs_sent;
diff --git a/talk/media/base/rtpdataengine.cc b/talk/media/base/rtpdataengine.cc
index 1254b29..3a9228a 100644
--- a/talk/media/base/rtpdataengine.cc
+++ b/talk/media/base/rtpdataengine.cc
@@ -259,10 +259,12 @@
 
   DataCodec codec;
   if (!FindCodecById(recv_codecs_, header.payload_type, &codec)) {
-    LOG(LS_WARNING) << "Not receiving packet "
-                    << header.ssrc << ":" << header.seq_num
-                    << " (" << data_len << ")"
-                    << " because unknown payload id: " << header.payload_type;
+    // For bundling, this will be logged for every message.
+    // So disable this logging.
+    // LOG(LS_WARNING) << "Not receiving packet "
+    //                << header.ssrc << ":" << header.seq_num
+    //                << " (" << data_len << ")"
+    //                << " because unknown payload id: " << header.payload_type;
     return;
   }
 
diff --git a/talk/media/base/streamparams.cc b/talk/media/base/streamparams.cc
index c508b68..19d8269 100644
--- a/talk/media/base/streamparams.cc
+++ b/talk/media/base/streamparams.cc
@@ -27,6 +27,7 @@
 
 #include "talk/media/base/streamparams.h"
 
+#include <list>
 #include <sstream>
 
 namespace cricket {
@@ -180,4 +181,49 @@
   return RemoveStream(streams, StreamSelector(groupid, id));
 }
 
+bool IsOneSsrcStream(const StreamParams& sp) {
+  if (sp.ssrcs.size() == 1 && sp.ssrc_groups.empty()) {
+    return true;
+  }
+  if (sp.ssrcs.size() == 2) {
+    const SsrcGroup* fid_group = sp.get_ssrc_group(kFidSsrcGroupSemantics);
+    if (fid_group != NULL) {
+      return (sp.ssrcs == fid_group->ssrcs);
+    }
+  }
+  return false;
+}
+
+static void RemoveFirst(std::list<uint32>* ssrcs, uint32 value) {
+  std::list<uint32>::iterator it =
+      std::find(ssrcs->begin(), ssrcs->end(), value);
+  if (it != ssrcs->end()) {
+    ssrcs->erase(it);
+  }
+}
+
+bool IsSimulcastStream(const StreamParams& sp) {
+  const SsrcGroup* const sg = sp.get_ssrc_group(kSimSsrcGroupSemantics);
+  if (sg == NULL || sg->ssrcs.size() < 2) {
+    return false;
+  }
+  // Start with all StreamParams SSRCs. Remove simulcast SSRCs (from sg) and
+  // RTX SSRCs. If we still have SSRCs left, we don't know what they're for.
+  // Also we remove first-found SSRCs only. So duplicates should lead to errors.
+  std::list<uint32> sp_ssrcs(sp.ssrcs.begin(), sp.ssrcs.end());
+  for (size_t i = 0; i < sg->ssrcs.size(); ++i) {
+    RemoveFirst(&sp_ssrcs, sg->ssrcs[i]);
+  }
+  for (size_t i = 0; i < sp.ssrc_groups.size(); ++i) {
+    const SsrcGroup& group = sp.ssrc_groups[i];
+    if (group.semantics.compare(kFidSsrcGroupSemantics) != 0 ||
+        group.ssrcs.size() != 2) {
+      continue;
+    }
+    RemoveFirst(&sp_ssrcs, group.ssrcs[1]);
+  }
+  // If there's SSRCs left that we don't know how to handle, we bail out.
+  return sp_ssrcs.size() == 0;
+}
+
 }  // namespace cricket
diff --git a/talk/media/base/streamparams.h b/talk/media/base/streamparams.h
index dc25a6e..b57f1f7 100644
--- a/talk/media/base/streamparams.h
+++ b/talk/media/base/streamparams.h
@@ -213,6 +213,16 @@
                        const std::string& groupid,
                        const std::string& id);
 
+// Checks if |sp| defines parameters for a single primary stream. There may
+// be an RTX stream associated with the primary stream. Leaving as non-static so
+// we can test this function.
+bool IsOneSsrcStream(const StreamParams& sp);
+
+// Checks if |sp| defines parameters for one Simulcast stream. There may be RTX
+// streams associated with the simulcast streams. Leaving as non-static so we
+// can test this function.
+bool IsSimulcastStream(const StreamParams& sp);
+
 }  // namespace cricket
 
 #endif  // TALK_MEDIA_BASE_STREAMPARAMS_H_
diff --git a/talk/media/base/streamparams_unittest.cc b/talk/media/base/streamparams_unittest.cc
index 99d1603..f3a03d6 100644
--- a/talk/media/base/streamparams_unittest.cc
+++ b/talk/media/base/streamparams_unittest.cc
@@ -29,8 +29,10 @@
 #include "talk/media/base/streamparams.h"
 #include "talk/media/base/testutils.h"
 
-static const uint32 kSscrs1[] = {1};
-static const uint32 kSscrs2[] = {1, 2};
+static const uint32 kSsrcs1[] = {1};
+static const uint32 kSsrcs2[] = {1, 2};
+static const uint32 kSsrcs3[] = {1, 2, 3};
+static const uint32 kRtxSsrcs3[] = {4, 5, 6};
 
 static cricket::StreamParams CreateStreamParamsWithSsrcGroup(
     const std::string& semantics, const uint32 ssrcs_in[], size_t len) {
@@ -44,10 +46,10 @@
 
 TEST(SsrcGroup, EqualNotEqual) {
   cricket::SsrcGroup ssrc_groups[] = {
-    cricket::SsrcGroup("ABC", MAKE_VECTOR(kSscrs1)),
-    cricket::SsrcGroup("ABC", MAKE_VECTOR(kSscrs2)),
-    cricket::SsrcGroup("Abc", MAKE_VECTOR(kSscrs2)),
-    cricket::SsrcGroup("abc", MAKE_VECTOR(kSscrs2)),
+    cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs1)),
+    cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs2)),
+    cricket::SsrcGroup("Abc", MAKE_VECTOR(kSsrcs2)),
+    cricket::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)),
   };
 
   for (size_t i = 0; i < ARRAY_SIZE(ssrc_groups); ++i) {
@@ -59,18 +61,18 @@
 }
 
 TEST(SsrcGroup, HasSemantics) {
-  cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSscrs1));
+  cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
   EXPECT_TRUE(sg1.has_semantics("ABC"));
 
-  cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSscrs1));
+  cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSsrcs1));
   EXPECT_FALSE(sg2.has_semantics("ABC"));
 
-  cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSscrs1));
+  cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSsrcs1));
   EXPECT_FALSE(sg3.has_semantics("ABC"));
 }
 
 TEST(SsrcGroup, ToString) {
-  cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSscrs1));
+  cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
   EXPECT_STREQ("{semantics:ABC;ssrcs:[1]}", sg1.ToString().c_str());
 }
 
@@ -88,22 +90,22 @@
 
 TEST(StreamParams, HasSsrcGroup) {
   cricket::StreamParams sp =
-      CreateStreamParamsWithSsrcGroup("XYZ", kSscrs2, ARRAY_SIZE(kSscrs2));
+      CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
   EXPECT_EQ(2U, sp.ssrcs.size());
-  EXPECT_EQ(kSscrs2[0], sp.first_ssrc());
+  EXPECT_EQ(kSsrcs2[0], sp.first_ssrc());
   EXPECT_TRUE(sp.has_ssrcs());
-  EXPECT_TRUE(sp.has_ssrc(kSscrs2[0]));
-  EXPECT_TRUE(sp.has_ssrc(kSscrs2[1]));
+  EXPECT_TRUE(sp.has_ssrc(kSsrcs2[0]));
+  EXPECT_TRUE(sp.has_ssrc(kSsrcs2[1]));
   EXPECT_TRUE(sp.has_ssrc_group("XYZ"));
   EXPECT_EQ(1U, sp.ssrc_groups.size());
   EXPECT_EQ(2U, sp.ssrc_groups[0].ssrcs.size());
-  EXPECT_EQ(kSscrs2[0], sp.ssrc_groups[0].ssrcs[0]);
-  EXPECT_EQ(kSscrs2[1], sp.ssrc_groups[0].ssrcs[1]);
+  EXPECT_EQ(kSsrcs2[0], sp.ssrc_groups[0].ssrcs[0]);
+  EXPECT_EQ(kSsrcs2[1], sp.ssrc_groups[0].ssrcs[1]);
 }
 
 TEST(StreamParams, GetSsrcGroup) {
   cricket::StreamParams sp =
-      CreateStreamParamsWithSsrcGroup("XYZ", kSscrs2, ARRAY_SIZE(kSscrs2));
+      CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
   EXPECT_EQ(NULL, sp.get_ssrc_group("xyz"));
   EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ"));
 }
@@ -112,13 +114,13 @@
   cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1);
   cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2);
   cricket::StreamParams sg1 =
-      CreateStreamParamsWithSsrcGroup("ABC", kSscrs1, ARRAY_SIZE(kSscrs1));
+      CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, ARRAY_SIZE(kSsrcs1));
   cricket::StreamParams sg2 =
-      CreateStreamParamsWithSsrcGroup("ABC", kSscrs2, ARRAY_SIZE(kSscrs2));
+      CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, ARRAY_SIZE(kSsrcs2));
   cricket::StreamParams sg3 =
-      CreateStreamParamsWithSsrcGroup("Abc", kSscrs2, ARRAY_SIZE(kSscrs2));
+      CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, ARRAY_SIZE(kSsrcs2));
   cricket::StreamParams sg4 =
-      CreateStreamParamsWithSsrcGroup("abc", kSscrs2, ARRAY_SIZE(kSscrs2));
+      CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, ARRAY_SIZE(kSsrcs2));
   cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4};
 
   for (size_t i = 0; i < ARRAY_SIZE(sps); ++i) {
@@ -159,7 +161,90 @@
 
 TEST(StreamParams, ToString) {
   cricket::StreamParams sp =
-      CreateStreamParamsWithSsrcGroup("XYZ", kSscrs2, ARRAY_SIZE(kSscrs2));
+      CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, ARRAY_SIZE(kSsrcs2));
   EXPECT_STREQ("{ssrcs:[1,2];ssrc_groups:{semantics:XYZ;ssrcs:[1,2]};}",
                sp.ToString().c_str());
 }
+
+
+TEST(StreamParams, TestIsOneSsrcStream_LegacyStream) {
+  EXPECT_TRUE(
+      cricket::IsOneSsrcStream(cricket::StreamParams::CreateLegacy(13)));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleRtxStream) {
+  cricket::StreamParams stream;
+  stream.add_ssrc(13);
+  EXPECT_TRUE(stream.AddFidSsrc(13, 14));
+  EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SimulcastStream) {
+  EXPECT_FALSE(cricket::IsOneSsrcStream(
+      cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
+  EXPECT_FALSE(cricket::IsOneSsrcStream(
+      cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SimRtxStream) {
+  cricket::StreamParams stream =
+      cricket::CreateSimWithRtxStreamParams("cname",
+                                            MAKE_VECTOR(kSsrcs3),
+                                            MAKE_VECTOR(kRtxSsrcs3));
+  EXPECT_FALSE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_LegacyStream) {
+  EXPECT_FALSE(
+      cricket::IsSimulcastStream(cricket::StreamParams::CreateLegacy(13)));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SingleRtxStream) {
+  cricket::StreamParams stream;
+  stream.add_ssrc(13);
+  EXPECT_TRUE(stream.AddFidSsrc(13, 14));
+  EXPECT_FALSE(cricket::IsSimulcastStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SimulcastStream) {
+  EXPECT_TRUE(cricket::IsSimulcastStream(
+      cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
+  EXPECT_TRUE(cricket::IsSimulcastStream(
+      cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SimRtxStream) {
+  cricket::StreamParams stream =
+      cricket::CreateSimWithRtxStreamParams("cname",
+                                            MAKE_VECTOR(kSsrcs3),
+                                            MAKE_VECTOR(kRtxSsrcs3));
+  EXPECT_TRUE(cricket::IsSimulcastStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_InvalidStreams) {
+  // stream1 has extra non-sim, non-fid ssrc.
+  cricket::StreamParams stream1 =
+      cricket::CreateSimWithRtxStreamParams("cname",
+                                            MAKE_VECTOR(kSsrcs3),
+                                            MAKE_VECTOR(kRtxSsrcs3));
+  stream1.add_ssrc(25);
+  EXPECT_FALSE(cricket::IsSimulcastStream(stream1));
+
+  // stream2 has invalid fid-group (no primary).
+  cricket::StreamParams stream2;
+  stream2.add_ssrc(13);
+  EXPECT_TRUE(stream2.AddFidSsrc(13, 14));
+  std::remove(stream2.ssrcs.begin(), stream2.ssrcs.end(), 13);
+  EXPECT_FALSE(cricket::IsSimulcastStream(stream2));
+
+  // stream3 has two SIM groups.
+  cricket::StreamParams stream3 =
+      cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2));
+  std::vector<uint32> sim_ssrcs = MAKE_VECTOR(kRtxSsrcs3);
+  cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, sim_ssrcs);
+  for (size_t i = 0; i < sim_ssrcs.size(); i++) {
+    stream3.add_ssrc(sim_ssrcs[i]);
+  }
+  stream3.ssrc_groups.push_back(sg);
+  EXPECT_FALSE(cricket::IsSimulcastStream(stream3));
+}
diff --git a/talk/media/base/testutils.cc b/talk/media/base/testutils.cc
index 3edb5c7..9b1b16d 100644
--- a/talk/media/base/testutils.cc
+++ b/talk/media/base/testutils.cc
@@ -336,4 +336,30 @@
   return true;
 }
 
+cricket::StreamParams CreateSimStreamParams(
+    const std::string& cname, const std::vector<uint32>& ssrcs) {
+  cricket::StreamParams sp;
+  cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, ssrcs);
+  sp.ssrcs = ssrcs;
+  sp.ssrc_groups.push_back(sg);
+  sp.cname = cname;
+  return sp;
+}
+
+// There should be an rtx_ssrc per ssrc.
+cricket::StreamParams CreateSimWithRtxStreamParams(
+    const std::string& cname, const std::vector<uint32>& ssrcs,
+    const std::vector<uint32>& rtx_ssrcs) {
+  cricket::StreamParams sp = CreateSimStreamParams(cname, ssrcs);
+  for (size_t i = 0; i < ssrcs.size(); ++i) {
+    sp.ssrcs.push_back(rtx_ssrcs[i]);
+    std::vector<uint32> fid_ssrcs;
+    fid_ssrcs.push_back(ssrcs[i]);
+    fid_ssrcs.push_back(rtx_ssrcs[i]);
+    cricket::SsrcGroup fid_group(cricket::kFidSsrcGroupSemantics, fid_ssrcs);
+    sp.ssrc_groups.push_back(fid_group);
+  }
+  return sp;
+}
+
 }  // namespace cricket
diff --git a/talk/media/base/testutils.h b/talk/media/base/testutils.h
index 136b7b9..dd13d5a 100644
--- a/talk/media/base/testutils.h
+++ b/talk/media/base/testutils.h
@@ -237,6 +237,16 @@
   }
   return false;
 }
+
+// Create Simulcast StreamParams with given |ssrcs| and |cname|.
+cricket::StreamParams CreateSimStreamParams(
+    const std::string& cname, const std::vector<uint32>& ssrcs);
+// Create Simulcast stream with given |ssrcs| and |rtx_ssrcs|.
+// The number of |rtx_ssrcs| must match number of |ssrcs|.
+cricket::StreamParams CreateSimWithRtxStreamParams(
+    const std::string& cname, const std::vector<uint32>& ssrcs,
+    const std::vector<uint32>& rtx_ssrcs);
+
 }  // namespace cricket
 
 #endif  // TALK_MEDIA_BASE_TESTUTILS_H_
diff --git a/talk/media/base/videoengine_unittest.h b/talk/media/base/videoengine_unittest.h
index d8b9bcb..f50a765 100644
--- a/talk/media/base/videoengine_unittest.h
+++ b/talk/media/base/videoengine_unittest.h
@@ -69,11 +69,13 @@
       IsEqualRes(a, b.width, b.height, b.framerate);
 }
 
+namespace std {
 inline std::ostream& operator<<(std::ostream& s, const cricket::VideoCodec& c) {
   s << "{" << c.name << "(" << c.id << "), "
     << c.width << "x" << c.height << "x" << c.framerate << "}";
   return s;
 }
+}  // namespace std
 
 inline int TimeBetweenSend(const cricket::VideoCodec& codec) {
   return static_cast<int>(
@@ -788,9 +790,9 @@
     EXPECT_GT(info.senders[0].framerate_sent, 0);
 
     ASSERT_EQ(1U, info.receivers.size());
-    EXPECT_EQ(1U, info.senders[0].ssrcs.size());
-    EXPECT_EQ(1U, info.receivers[0].ssrcs.size());
-    EXPECT_EQ(info.senders[0].ssrcs[0], info.receivers[0].ssrcs[0]);
+    EXPECT_EQ(1U, info.senders[0].ssrcs().size());
+    EXPECT_EQ(1U, info.receivers[0].ssrcs().size());
+    EXPECT_EQ(info.senders[0].ssrcs()[0], info.receivers[0].ssrcs()[0]);
     EXPECT_EQ(NumRtpBytes(), info.receivers[0].bytes_rcvd);
     EXPECT_EQ(NumRtpPackets(), info.receivers[0].packets_rcvd);
     EXPECT_EQ(0.0, info.receivers[0].fraction_lost);
@@ -847,8 +849,8 @@
 
     ASSERT_EQ(2U, info.receivers.size());
     for (size_t i = 0; i < info.receivers.size(); ++i) {
-      EXPECT_EQ(1U, info.receivers[i].ssrcs.size());
-      EXPECT_EQ(i + 1, info.receivers[i].ssrcs[0]);
+      EXPECT_EQ(1U, info.receivers[i].ssrcs().size());
+      EXPECT_EQ(i + 1, info.receivers[i].ssrcs()[0]);
       EXPECT_EQ(NumRtpBytes(), info.receivers[i].bytes_rcvd);
       EXPECT_EQ(NumRtpPackets(), info.receivers[i].packets_rcvd);
       EXPECT_EQ(0.0, info.receivers[i].fraction_lost);
@@ -903,12 +905,12 @@
     ASSERT_EQ(2U, info.senders.size());
     EXPECT_EQ(NumRtpPackets(),
         info.senders[0].packets_sent + info.senders[1].packets_sent);
-    EXPECT_EQ(1U, info.senders[0].ssrcs.size());
-    EXPECT_EQ(1234U, info.senders[0].ssrcs[0]);
+    EXPECT_EQ(1U, info.senders[0].ssrcs().size());
+    EXPECT_EQ(1234U, info.senders[0].ssrcs()[0]);
     EXPECT_EQ(DefaultCodec().width, info.senders[0].frame_width);
     EXPECT_EQ(DefaultCodec().height, info.senders[0].frame_height);
-    EXPECT_EQ(1U, info.senders[1].ssrcs.size());
-    EXPECT_EQ(5678U, info.senders[1].ssrcs[0]);
+    EXPECT_EQ(1U, info.senders[1].ssrcs().size());
+    EXPECT_EQ(5678U, info.senders[1].ssrcs()[0]);
     EXPECT_EQ(1024, info.senders[1].frame_width);
     EXPECT_EQ(768, info.senders[1].frame_height);
     // The capturer must be unregistered here as it runs out of it's scope next.
diff --git a/talk/media/webrtc/fakewebrtcvideoengine.h b/talk/media/webrtc/fakewebrtcvideoengine.h
index b3922ff..b81d04b 100644
--- a/talk/media/webrtc/fakewebrtcvideoengine.h
+++ b/talk/media/webrtc/fakewebrtcvideoengine.h
@@ -276,6 +276,8 @@
           send(false),
           receive_(false),
           can_transmit_(true),
+          remote_rtx_ssrc_(-1),
+          rtx_send_payload_type(-1),
           rtcp_status_(webrtc::kRtcpNone),
           key_frame_request_method_(webrtc::kViEKeyFrameRequestNone),
           tmmbr_(false),
@@ -306,6 +308,9 @@
     bool receive_;
     bool can_transmit_;
     std::map<int, int> ssrcs_;
+    std::map<int, int> rtx_ssrcs_;
+    int remote_rtx_ssrc_;
+    int rtx_send_payload_type;
     std::string cname_;
     webrtc::ViERTCPMode rtcp_status_;
     webrtc::ViEKeyFrameRequestMethod key_frame_request_method_;
@@ -500,10 +505,23 @@
     return static_cast<int>(
         channels_.find(channel)->second->ssrcs_.size());
   }
+  int GetNumRtxSsrcs(int channel) const {
+    WEBRTC_ASSERT_CHANNEL(channel);
+    return static_cast<int>(
+        channels_.find(channel)->second->rtx_ssrcs_.size());
+  }
   bool GetIsTransmitting(int channel) const {
     WEBRTC_ASSERT_CHANNEL(channel);
     return channels_.find(channel)->second->can_transmit_;
   }
+  int GetRtxSsrc(int channel, int simulcast_idx) const {
+    WEBRTC_ASSERT_CHANNEL(channel);
+    if (channels_.find(channel)->second->rtx_ssrcs_.find(simulcast_idx) ==
+        channels_.find(channel)->second->rtx_ssrcs_.end()) {
+      return -1;
+    }
+    return channels_.find(channel)->second->rtx_ssrcs_[simulcast_idx];
+  }
   bool ReceiveCodecRegistered(int channel,
                               const webrtc::VideoCodec& codec) const {
     WEBRTC_ASSERT_CHANNEL(channel);
@@ -557,6 +575,14 @@
     WEBRTC_ASSERT_CHANNEL(channel);
     channels_[channel]->receive_bandwidth_ = receive_bandwidth;
   };
+  int GetRtxSendPayloadType(int channel) {
+    WEBRTC_CHECK_CHANNEL(channel);
+    return channels_[channel]->rtx_send_payload_type;
+  }
+  int GetRemoteRtxSsrc(int channel) {
+    WEBRTC_CHECK_CHANNEL(channel);
+    return channels_.find(channel)->second->remote_rtx_ssrc_;
+  }
 
   WEBRTC_STUB(Release, ());
 
@@ -599,6 +625,9 @@
   }
   WEBRTC_STUB(RegisterCpuOveruseObserver,
       (int channel, webrtc::CpuOveruseObserver* observer));
+#ifdef USE_WEBRTC_DEV_BRANCH
+  WEBRTC_STUB(CpuOveruseMeasures, (int, int*, int*));
+#endif
   WEBRTC_STUB(ConnectAudioChannel, (const int, const int));
   WEBRTC_STUB(DisconnectAudioChannel, (const int));
   WEBRTC_FUNC(StartSend, (const int channel)) {
@@ -716,6 +745,9 @@
   WEBRTC_STUB(WaitForFirstKeyFrame, (const int, const bool));
   WEBRTC_STUB(StartDebugRecording, (int, const char*));
   WEBRTC_STUB(StopDebugRecording, (int));
+#ifdef USE_WEBRTC_DEV_BRANCH
+  WEBRTC_VOID_STUB(SuspendBelowMinBitrate, (int));
+#endif
 
   // webrtc::ViECapture
   WEBRTC_STUB(NumberOfCaptureDevices, ());
@@ -851,11 +883,28 @@
                              const webrtc::StreamType usage,
                              const unsigned char idx)) {
     WEBRTC_CHECK_CHANNEL(channel);
-    channels_[channel]->ssrcs_[idx] = ssrc;
+    switch (usage) {
+      case webrtc::kViEStreamTypeNormal:
+        channels_[channel]->ssrcs_[idx] = ssrc;
+        break;
+      case webrtc::kViEStreamTypeRtx:
+        channels_[channel]->rtx_ssrcs_[idx] = ssrc;
+        break;
+      default:
+        return -1;
+    }
     return 0;
   }
-  WEBRTC_STUB_CONST(SetRemoteSSRCType, (const int,
-        const webrtc::StreamType, const unsigned int));
+
+  WEBRTC_FUNC_CONST(SetRemoteSSRCType, (const int channel,
+        const webrtc::StreamType usage, const unsigned int ssrc)) {
+    WEBRTC_CHECK_CHANNEL(channel);
+    if (usage == webrtc::kViEStreamTypeRtx) {
+      channels_.find(channel)->second->remote_rtx_ssrc_ = ssrc;
+      return 0;
+    }
+    return -1;
+  }
 
   WEBRTC_FUNC_CONST(GetLocalSSRC, (const int channel,
                                    unsigned int& ssrc)) {
@@ -867,7 +916,12 @@
   WEBRTC_STUB_CONST(GetRemoteSSRC, (const int, unsigned int&));
   WEBRTC_STUB_CONST(GetRemoteCSRCs, (const int, unsigned int*));
 
-  WEBRTC_STUB(SetRtxSendPayloadType, (const int, const uint8));
+  WEBRTC_FUNC(SetRtxSendPayloadType, (const int channel,
+                                      const uint8 payload_type)) {
+    WEBRTC_CHECK_CHANNEL(channel);
+    channels_[channel]->rtx_send_payload_type = payload_type;
+    return 0;
+  }
   WEBRTC_STUB(SetRtxReceivePayloadType, (const int, const uint8));
 
   WEBRTC_STUB(SetStartSequenceNumber, (const int, unsigned short));
@@ -979,6 +1033,14 @@
       unsigned int&, unsigned int&, unsigned int&, int&));
   WEBRTC_STUB_CONST(GetRTPStatistics, (const int, unsigned int&, unsigned int&,
       unsigned int&, unsigned int&));
+#ifdef USE_WEBRTC_DEV_BRANCH
+  WEBRTC_STUB_CONST(GetReceiveChannelRtcpStatistics, (const int,
+      webrtc::RtcpStatistics&, int&));
+  WEBRTC_STUB_CONST(GetSendChannelRtcpStatistics, (const int,
+      webrtc::RtcpStatistics&, int&));
+  WEBRTC_STUB_CONST(GetRtpStatistics, (const int, webrtc::StreamDataCounters&,
+      webrtc::StreamDataCounters&));
+#endif
   WEBRTC_FUNC_CONST(GetBandwidthUsage, (const int channel,
       unsigned int& total_bitrate, unsigned int& video_bitrate,
       unsigned int& fec_bitrate, unsigned int& nack_bitrate)) {
@@ -1021,6 +1083,32 @@
     }
     return 0;
   }
+#ifdef USE_WEBRTC_DEV_BRANCH
+  WEBRTC_STUB(RegisterSendChannelRtcpStatisticsCallback,
+                    (int, webrtc::RtcpStatisticsCallback*));
+  WEBRTC_STUB(DeregisterSendChannelRtcpStatisticsCallback,
+                    (int, webrtc::RtcpStatisticsCallback*));
+  WEBRTC_STUB(RegisterReceiveChannelRtcpStatisticsCallback,
+                    (int, webrtc::RtcpStatisticsCallback*));
+  WEBRTC_STUB(DeregisterReceiveChannelRtcpStatisticsCallback,
+                    (int, webrtc::RtcpStatisticsCallback*));
+  WEBRTC_STUB(RegisterSendChannelRtpStatisticsCallback,
+                    (int, webrtc::StreamDataCountersCallback*));
+  WEBRTC_STUB(DeregisterSendChannelRtpStatisticsCallback,
+                    (int, webrtc::StreamDataCountersCallback*));
+  WEBRTC_STUB(RegisterReceiveChannelRtpStatisticsCallback,
+                    (int, webrtc::StreamDataCountersCallback*));
+  WEBRTC_STUB(DeregisterReceiveChannelRtpStatisticsCallback,
+                    (int, webrtc::StreamDataCountersCallback*));
+  WEBRTC_STUB(RegisterSendBitrateObserver,
+                    (int, webrtc::BitrateStatisticsObserver*));
+  WEBRTC_STUB(DeregisterSendBitrateObserver,
+                    (int, webrtc::BitrateStatisticsObserver*));
+  WEBRTC_STUB(RegisterSendFrameCountObserver,
+                    (int, webrtc::FrameCountObserver*));
+  WEBRTC_STUB(DeregisterSendFrameCountObserver,
+                    (int, webrtc::FrameCountObserver*));
+#endif
 
   WEBRTC_STUB(StartRTPDump, (const int, const char*, webrtc::RTPDirections));
   WEBRTC_STUB(StopRTPDump, (const int, webrtc::RTPDirections));
diff --git a/talk/media/webrtc/fakewebrtcvoiceengine.h b/talk/media/webrtc/fakewebrtcvoiceengine.h
index 4ecefff..ece0339 100644
--- a/talk/media/webrtc/fakewebrtcvoiceengine.h
+++ b/talk/media/webrtc/fakewebrtcvoiceengine.h
@@ -39,6 +39,8 @@
 #include "talk/media/base/voiceprocessor.h"
 #include "talk/media/webrtc/fakewebrtccommon.h"
 #include "talk/media/webrtc/webrtcvoe.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
+#include "webrtc/common.h"
 
 namespace cricket {
 
@@ -75,7 +77,7 @@
     int dtmf_length_ms;
   };
   struct Channel {
-    Channel()
+    explicit Channel(bool use_experimental_acm)
         : external_transport(false),
           send(false),
           playout(false),
@@ -95,7 +97,8 @@
           fec_type(117),
           nack_max_packets(0),
           send_ssrc(0),
-          level_header_ext_(-1) {
+          level_header_ext_(-1),
+          using_experimental_acm(use_experimental_acm) {
       memset(&send_codec, 0, sizeof(send_codec));
       memset(&rx_agc_config, 0, sizeof(rx_agc_config));
     }
@@ -124,6 +127,7 @@
     std::vector<webrtc::CodecInst> recv_codecs;
     webrtc::CodecInst send_codec;
     std::list<std::string> packets;
+    bool using_experimental_acm;
   };
 
   FakeWebRtcVoiceEngine(const cricket::AudioCodec* const* codecs,
@@ -199,6 +203,10 @@
   int GetNACKMaxPackets(int channel) {
     return channels_[channel]->nack_max_packets;
   }
+  bool IsUsingExperimentalAcm(int channel) {
+    WEBRTC_ASSERT_CHANNEL(channel);
+    return channels_[channel]->using_experimental_acm;
+  }
   int GetSendCNPayloadType(int channel, bool wideband) {
     return (wideband) ?
         channels_[channel]->cn16_type :
@@ -252,11 +260,11 @@
                                 true);
     }
   }
-  int AddChannel() {
+  int AddChannel(bool use_experimental_acm) {
     if (fail_create_channel_) {
       return -1;
     }
-    Channel* ch = new Channel();
+    Channel* ch = new Channel(use_experimental_acm);
     for (int i = 0; i < NumOfCodecs(); ++i) {
       webrtc::CodecInst codec;
       GetCodec(i, codec);
@@ -288,11 +296,14 @@
     return NULL;
   }
   WEBRTC_FUNC(CreateChannel, ()) {
-    return AddChannel();
+    return AddChannel(false);
   }
 #ifdef USE_WEBRTC_DEV_BRANCH
-  WEBRTC_FUNC(CreateChannel, (const webrtc::Config& /*config*/)) {
-    return AddChannel();
+  WEBRTC_FUNC(CreateChannel, (const webrtc::Config& config)) {
+    talk_base::scoped_ptr<webrtc::AudioCodingModule> acm(
+        config.Get<webrtc::AudioCodingModuleFactory>().Create(0));
+    return AddChannel(strcmp(acm->Version(), webrtc::kExperimentalAcmVersion)
+                      == 0);
   }
 #endif
   WEBRTC_FUNC(DeleteChannel, (int channel)) {
diff --git a/talk/media/webrtc/webrtcvideoengine.cc b/talk/media/webrtc/webrtcvideoengine.cc
index 6032134..397deb0 100644
--- a/talk/media/webrtc/webrtcvideoengine.cc
+++ b/talk/media/webrtc/webrtcvideoengine.cc
@@ -393,7 +393,8 @@
   explicit WebRtcEncoderObserver(int video_channel)
       : video_channel_(video_channel),
         framerate_(0),
-        bitrate_(0) {
+        bitrate_(0),
+        suspended_(false) {
   }
 
   // virtual functions from VieEncoderObserver.
@@ -406,6 +407,12 @@
     bitrate_ = bitrate;
   }
 
+  virtual void SuspendChange(int video_channel, bool is_suspended) {
+    talk_base::CritScope cs(&crit_);
+    ASSERT(video_channel_ == video_channel);
+    suspended_ = is_suspended;
+  }
+
   int framerate() const {
     talk_base::CritScope cs(&crit_);
     return framerate_;
@@ -414,12 +421,17 @@
     talk_base::CritScope cs(&crit_);
     return bitrate_;
   }
+  bool suspended() const {
+    talk_base::CritScope cs(&crit_);
+    return suspended_;
+  }
 
  private:
   mutable talk_base::CriticalSection crit_;
   int video_channel_;
   int framerate_;
   int bitrate_;
+  bool suspended_;
 };
 
 class WebRtcLocalStreamInfo {
@@ -757,9 +769,10 @@
 
 const WebRtcVideoEngine::VideoCodecPref
     WebRtcVideoEngine::kVideoCodecPrefs[] = {
-    {kVp8PayloadName, 100, 0},
-    {kRedPayloadName, 116, 1},
-    {kFecPayloadName, 117, 2},
+    {kVp8PayloadName, 100, -1, 0},
+    {kRedPayloadName, 116, -1, 1},
+    {kFecPayloadName, 117, -1, 2},
+    {kRtxCodecName, 96, 100, 3},
 };
 
 // The formats are sorted by the descending order of width. We use the order to
@@ -1319,6 +1332,10 @@
       if (_stricmp(kVp8PayloadName, codec.name.c_str()) == 0) {
         AddDefaultFeedbackParams(&codec);
       }
+      if (pref.associated_payload_type != -1) {
+        codec.SetParam(kCodecParamAssociatedPayloadType,
+                       pref.associated_payload_type);
+      }
       video_codecs_.push_back(codec);
       internal_codec_names.insert(codec.name);
     }
@@ -1488,6 +1505,7 @@
       remb_enabled_(false),
       render_started_(false),
       first_receive_ssrc_(0),
+      send_rtx_type_(-1),
       send_red_type_(-1),
       send_fec_type_(-1),
       send_min_bitrate_(kMinVideoBitrate),
@@ -1564,12 +1582,19 @@
   if (sending_) {
     ConvertToCricketVideoCodec(*send_codec_, &current);
   }
+  std::map<int, int> primary_rtx_pt_mapping;
   for (std::vector<VideoCodec>::const_iterator iter = codecs.begin();
       iter != codecs.end(); ++iter) {
     if (_stricmp(iter->name.c_str(), kRedPayloadName) == 0) {
       send_red_type_ = iter->id;
     } else if (_stricmp(iter->name.c_str(), kFecPayloadName) == 0) {
       send_fec_type_ = iter->id;
+    } else if (_stricmp(iter->name.c_str(), kRtxCodecName) == 0) {
+      int rtx_type = iter->id;
+      int rtx_primary_type = -1;
+      if (iter->GetParam(kCodecParamAssociatedPayloadType, &rtx_primary_type)) {
+        primary_rtx_pt_mapping[rtx_primary_type] = rtx_type;
+      }
     } else if (engine()->CanSendCodec(*iter, current, &checked_codec)) {
       webrtc::VideoCodec wcodec;
       if (engine()->ConvertFromCricketVideoCodec(checked_codec, &wcodec)) {
@@ -1625,6 +1650,14 @@
   // Select the first matched codec.
   webrtc::VideoCodec& codec(send_codecs[0]);
 
+  // Set RTX payload type if primary now active. This value will be used  in
+  // SetSendCodec.
+  std::map<int, int>::const_iterator rtx_it =
+    primary_rtx_pt_mapping.find(static_cast<int>(codec.plType));
+  if (rtx_it != primary_rtx_pt_mapping.end()) {
+    send_rtx_type_ = rtx_it->second;
+  }
+
   if (!SetSendCodec(
           codec, codec.minBitrate, codec.startBitrate, codec.maxBitrate)) {
     return false;
@@ -1726,9 +1759,9 @@
 bool WebRtcVideoMediaChannel::AddSendStream(const StreamParams& sp) {
   LOG(LS_INFO) << "AddSendStream " << sp.ToString();
 
-  if (!IsOneSsrcStream(sp)) {
-      LOG(LS_ERROR) << "AddSendStream: bad local stream parameters";
-      return false;
+  if (!IsOneSsrcStream(sp) && !IsSimulcastStream(sp)) {
+    LOG(LS_ERROR) << "AddSendStream: bad local stream parameters";
+    return false;
   }
 
   uint32 ssrc_key;
@@ -1758,6 +1791,11 @@
     return false;
   }
 
+  // Set the corresponding RTX SSRC.
+  if (!SetLocalRtxSsrc(channel_id, sp, sp.first_ssrc(), 0)) {
+    return false;
+  }
+
   // Set RTCP CName.
   if (engine()->vie()->rtp()->SetRTCPCName(channel_id,
                                            sp.cname.c_str()) != 0) {
@@ -1862,10 +1900,11 @@
     return false;
   }
 
-  // TODO(perkj): Implement recv media from multiple SSRCs per stream.
-  if (sp.ssrcs.size() != 1) {
-    LOG(LS_ERROR) << "WebRtcVideoMediaChannel supports one receiving SSRC per"
-                  << " stream";
+  // TODO(perkj): Implement recv media from multiple media SSRCs per stream.
+  // NOTE: We have two SSRCs per stream when RTX is enabled.
+  if (!IsOneSsrcStream(sp)) {
+    LOG(LS_ERROR) << "WebRtcVideoMediaChannel supports one primary SSRC per"
+                  << " stream and one FID SSRC per primary SSRC.";
     return false;
   }
 
@@ -1878,6 +1917,16 @@
     return false;
   }
 
+  // Set the corresponding RTX SSRC.
+  uint32 rtx_ssrc;
+  bool has_rtx = sp.GetFidSsrc(sp.first_ssrc(), &rtx_ssrc);
+  if (has_rtx && engine()->vie()->rtp()->SetRemoteSSRCType(
+      channel_id, webrtc::kViEStreamTypeRtx, rtx_ssrc) != 0) {
+    LOG_RTCERR3(SetRemoteSSRCType, channel_id, webrtc::kViEStreamTypeRtx,
+                rtx_ssrc);
+    return false;
+  }
+
   // Get the default renderer.
   VideoRenderer* default_renderer = NULL;
   if (InConferenceMode()) {
@@ -2030,10 +2079,6 @@
   return success;
 }
 
-bool WebRtcVideoMediaChannel::IsOneSsrcStream(const StreamParams& sp) {
-  return (sp.ssrcs.size() == 1 && sp.ssrc_groups.size() == 0);
-}
-
 bool WebRtcVideoMediaChannel::HasReadySendChannels() {
   return !send_channels_.empty() &&
       ((send_channels_.size() > 1) ||
@@ -2235,7 +2280,9 @@
       WebRtcLocalStreamInfo* channel_stream_info =
           send_channel->local_stream_info();
 
-      sinfo.ssrcs = send_params->ssrcs;
+      for (size_t i = 0; i < send_params->ssrcs.size(); ++i) {
+        sinfo.add_ssrc(send_params->ssrcs[i]);
+      }
       sinfo.codec_name = send_codec_->plName;
       sinfo.bytes_sent = bytes_sent;
       sinfo.packets_sent = packets_sent;
@@ -2252,7 +2299,38 @@
       sinfo.nominal_bitrate = send_channel->encoder_observer()->bitrate();
       sinfo.preferred_bitrate = send_max_bitrate_;
       sinfo.adapt_reason = send_channel->CurrentAdaptReason();
+      sinfo.capture_jitter_ms = -1;
+      sinfo.avg_encode_ms = -1;
 
+#ifdef USE_WEBRTC_DEV_BRANCH
+      int capture_jitter_ms = 0;
+      int avg_encode_time_ms = 0;
+      if (engine()->vie()->base()->CpuOveruseMeasures(
+          channel_id, &capture_jitter_ms, &avg_encode_time_ms) == 0) {
+        sinfo.capture_jitter_ms = capture_jitter_ms;
+        sinfo.avg_encode_ms = avg_encode_time_ms;
+      }
+#endif
+
+#ifdef USE_WEBRTC_DEV_BRANCH
+      // Get received RTCP statistics for the sender (reported by the remote
+      // client in a RTCP packet), if available.
+      // It's not a fatal error if we can't, since RTCP may not have arrived
+      // yet.
+      webrtc::RtcpStatistics outgoing_stream_rtcp_stats;
+      int outgoing_stream_rtt_ms;
+
+      if (engine_->vie()->rtp()->GetSendChannelRtcpStatistics(
+          channel_id,
+          outgoing_stream_rtcp_stats,
+          outgoing_stream_rtt_ms) == 0) {
+        // Convert Q8 to float.
+        sinfo.packets_lost = outgoing_stream_rtcp_stats.cumulative_lost;
+        sinfo.fraction_lost = static_cast<float>(
+            outgoing_stream_rtcp_stats.fraction_lost) / (1 << 8);
+        sinfo.rtt_ms = outgoing_stream_rtt_ms;
+      }
+#else
       // Get received RTCP statistics for the sender, if available.
       // It's not a fatal error if we can't, since RTCP may not have arrived
       // yet.
@@ -2273,6 +2351,7 @@
         sinfo.fraction_lost = static_cast<float>(r_fraction_lost) / (1 << 8);
         sinfo.rtt_ms = r_rtt_ms;
       }
+#endif
       info->senders.push_back(sinfo);
 
       unsigned int channel_total_bitrate_sent = 0;
@@ -2327,6 +2406,19 @@
         ssrc == 0)
       continue;
 
+#ifdef USE_WEBRTC_DEV_BRANCH
+    webrtc::StreamDataCounters sent;
+    webrtc::StreamDataCounters received;
+    if (engine_->vie()->rtp()->GetRtpStatistics(channel->channel_id(),
+                                                sent, received) != 0) {
+      LOG_RTCERR1(GetRTPStatistics, channel->channel_id());
+      return false;
+    }
+    VideoReceiverInfo rinfo;
+    rinfo.add_ssrc(ssrc);
+    rinfo.bytes_rcvd = received.bytes;
+    rinfo.packets_rcvd = received.packets;
+#else
     unsigned int bytes_sent, packets_sent, bytes_recv, packets_recv;
     if (engine_->vie()->rtp()->GetRTPStatistics(
         channel->channel_id(), bytes_sent, packets_sent, bytes_recv,
@@ -2335,9 +2427,10 @@
       return false;
     }
     VideoReceiverInfo rinfo;
-    rinfo.ssrcs.push_back(ssrc);
+    rinfo.add_ssrc(ssrc);
     rinfo.bytes_rcvd = bytes_recv;
     rinfo.packets_rcvd = packets_recv;
+#endif
     rinfo.packets_lost = -1;
     rinfo.packets_concealed = -1;
     rinfo.fraction_lost = -1;  // from SentRTCP
@@ -2349,6 +2442,20 @@
     rinfo.framerate_output = fps;
     channel->decoder_observer()->ExportTo(&rinfo);
 
+#ifdef USE_WEBRTC_DEV_BRANCH
+    // Get our locally created statistics of the received RTP stream.
+    webrtc::RtcpStatistics incoming_stream_rtcp_stats;
+    int incoming_stream_rtt_ms;
+    if (engine_->vie()->rtp()->GetReceiveChannelRtcpStatistics(
+        channel->channel_id(),
+        incoming_stream_rtcp_stats,
+        incoming_stream_rtt_ms) == 0) {
+      // Convert Q8 to float.
+      rinfo.packets_lost = incoming_stream_rtcp_stats.cumulative_lost;
+      rinfo.fraction_lost = static_cast<float>(
+          incoming_stream_rtcp_stats.fraction_lost) / (1 << 8);
+    }
+#else
     // Get sent RTCP statistics.
     uint16 s_fraction_lost;
     unsigned int s_cumulative_lost;
@@ -2362,6 +2469,7 @@
       rinfo.packets_lost = s_cumulative_lost;
       rinfo.fraction_lost = static_cast<float>(s_fraction_lost) / (1 << 8);
     }
+#endif
     info->receivers.push_back(rinfo);
 
     unsigned int estimated_recv_stream_bandwidth = 0;
@@ -2616,6 +2724,10 @@
 
   bool dscp_option_changed = (options_.dscp != options.dscp);
 
+  bool suspend_below_min_bitrate_changed =
+      options.suspend_below_min_bitrate.IsSet() &&
+      (options_.suspend_below_min_bitrate != options.suspend_below_min_bitrate);
+
   bool conference_mode_turned_off = false;
   if (options_.conference_mode.IsSet() && options.conference_mode.IsSet() &&
       options_.conference_mode.GetWithDefaultIfUnset(false) &&
@@ -2722,6 +2834,19 @@
       LOG(LS_WARNING) << "Failed to set DSCP settings for video channel";
     }
   }
+  if (suspend_below_min_bitrate_changed) {
+#ifdef USE_WEBRTC_DEV_BRANCH
+    if (options_.suspend_below_min_bitrate.GetWithDefaultIfUnset(false)) {
+      for (SendChannelMap::iterator it = send_channels_.begin();
+           it != send_channels_.end(); ++it) {
+        engine()->vie()->codec()->SuspendBelowMinBitrate(
+            it->second->channel_id());
+      }
+    } else {
+      LOG(LS_WARNING) << "Cannot disable video suspension once it is enabled";
+    }
+#endif
+  }
   return true;
 }
 
@@ -3317,6 +3442,15 @@
       return false;
     }
 
+    // NOTE: SetRtxSendPayloadType must be called after all simulcast SSRCs
+    // are configured. Otherwise ssrc's configured after this point will use
+    // the primary PT for RTX.
+    if (send_rtx_type_ != -1 &&
+        engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
+                                                      send_rtx_type_) != 0) {
+        LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
+        return false;
+    }
   }
   send_channel->set_interval(
       cricket::VideoFormat::FpsToInterval(target_codec.maxFramerate));
@@ -3392,6 +3526,9 @@
                  << vie_codec.codecSpecific.VP8.keyFrameInterval;
   }
 
+  if (send_rtx_type_ != -1) {
+    LOG(LS_INFO) << "RTX payload type: " << send_rtx_type_;
+  }
 }
 
 bool WebRtcVideoMediaChannel::SetReceiveCodecs(
@@ -3656,6 +3793,22 @@
       header_extension_uri);
   return SetHeaderExtension(setter, channel_id, extension);
 }
+
+bool WebRtcVideoMediaChannel::SetLocalRtxSsrc(int channel_id,
+                                              const StreamParams& send_params,
+                                              uint32 primary_ssrc,
+                                              int stream_idx) {
+  uint32 rtx_ssrc = 0;
+  bool has_rtx = send_params.GetFidSsrc(primary_ssrc, &rtx_ssrc);
+  if (has_rtx && engine()->vie()->rtp()->SetLocalSSRC(
+      channel_id, rtx_ssrc, webrtc::kViEStreamTypeRtx, stream_idx) != 0) {
+    LOG_RTCERR4(SetLocalSSRC, channel_id, rtx_ssrc,
+                webrtc::kViEStreamTypeRtx, stream_idx);
+    return false;
+  }
+  return true;
+}
+
 }  // namespace cricket
 
 #endif  // HAVE_WEBRTC_VIDEO
diff --git a/talk/media/webrtc/webrtcvideoengine.h b/talk/media/webrtc/webrtcvideoengine.h
index dba8cc9..af4d627 100644
--- a/talk/media/webrtc/webrtcvideoengine.h
+++ b/talk/media/webrtc/webrtcvideoengine.h
@@ -180,6 +180,9 @@
   struct VideoCodecPref {
     const char* name;
     int payload_type;
+    // For RTX, this field is the payload-type that RTX applies to.
+    // For other codecs, it should be set to -1.
+    int associated_payload_type;
     int pref;
   };
 
@@ -356,9 +359,6 @@
   bool StopSend(WebRtcVideoChannelSendInfo* send_channel);
   bool SendIntraFrame(int channel_id);
 
-  // Send with one local SSRC. Normal case.
-  bool IsOneSsrcStream(const StreamParams& sp);
-
   bool HasReadySendChannels();
 
   // Send channel key returns the key corresponding to the provided local SSRC
@@ -400,6 +400,10 @@
   // Signal when cpu adaptation has no further scope to adapt.
   void OnCpuAdaptationUnable();
 
+  // Set the local (send-side) RTX SSRC corresponding to primary_ssrc.
+  bool SetLocalRtxSsrc(int channel_id, const StreamParams& send_params,
+                       uint32 primary_ssrc, int stream_idx);
+
   // Global state.
   WebRtcVideoEngine* engine_;
   VoiceMediaChannel* voice_channel_;
@@ -423,6 +427,7 @@
   // Global send side state.
   SendChannelMap send_channels_;
   talk_base::scoped_ptr<webrtc::VideoCodec> send_codec_;
+  int send_rtx_type_;
   int send_red_type_;
   int send_fec_type_;
   int send_min_bitrate_;
diff --git a/talk/media/webrtc/webrtcvideoengine_unittest.cc b/talk/media/webrtc/webrtcvideoengine_unittest.cc
index 05fac8e..e37bc3c 100644
--- a/talk/media/webrtc/webrtcvideoengine_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine_unittest.cc
@@ -656,6 +656,45 @@
   EXPECT_TRUE(vie_.GetRembStatusContribute(new_channel_num));
 }
 
+TEST_F(WebRtcVideoEngineTestFake, RecvStreamWithRtx) {
+  EXPECT_TRUE(SetupEngine());
+  int default_channel = vie_.GetLastChannel();
+  cricket::VideoOptions options;
+  options.conference_mode.Set(true);
+  EXPECT_TRUE(channel_->SetOptions(options));
+  EXPECT_TRUE(channel_->AddSendStream(
+      cricket::CreateSimWithRtxStreamParams("cname",
+                                            MAKE_VECTOR(kSsrcs3),
+                                            MAKE_VECTOR(kRtxSsrcs3))));
+  EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
+  EXPECT_TRUE(channel_->SetSend(true));
+  EXPECT_TRUE(channel_->AddRecvStream(
+      cricket::CreateSimWithRtxStreamParams("cname",
+                                            MAKE_VECTOR(kSsrcs1),
+                                            MAKE_VECTOR(kRtxSsrc1))));
+  int new_channel_num = vie_.GetLastChannel();
+  EXPECT_NE(default_channel, new_channel_num);
+  EXPECT_EQ(4, vie_.GetRemoteRtxSsrc(new_channel_num));
+}
+
+TEST_F(WebRtcVideoEngineTestFake, RecvStreamNoRtx) {
+  EXPECT_TRUE(SetupEngine());
+  int default_channel = vie_.GetLastChannel();
+  cricket::VideoOptions options;
+  options.conference_mode.Set(true);
+  EXPECT_TRUE(channel_->SetOptions(options));
+  EXPECT_TRUE(channel_->AddSendStream(
+      cricket::CreateSimWithRtxStreamParams("cname",
+                                            MAKE_VECTOR(kSsrcs3),
+                                            MAKE_VECTOR(kRtxSsrcs3))));
+  EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
+  EXPECT_TRUE(channel_->SetSend(true));
+  EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
+  int new_channel_num = vie_.GetLastChannel();
+  EXPECT_NE(default_channel, new_channel_num);
+  EXPECT_EQ(-1, vie_.GetRemoteRtxSsrc(new_channel_num));
+}
+
 // Test support for RTP timestamp offset header extension.
 TEST_F(WebRtcVideoEngineTestFake, RtpTimestampOffsetHeaderExtensions) {
   EXPECT_TRUE(SetupEngine());
@@ -1319,7 +1358,7 @@
 TEST_F(WebRtcVideoEngineTest, FindCodec) {
   // We should not need to init engine in order to get codecs.
   const std::vector<cricket::VideoCodec>& c = engine_.codecs();
-  EXPECT_EQ(3U, c.size());
+  EXPECT_EQ(4U, c.size());
 
   cricket::VideoCodec vp8(104, "VP8", 320, 200, 30, 0);
   EXPECT_TRUE(engine_.FindCodec(vp8));
@@ -1354,6 +1393,24 @@
 
   cricket::VideoCodec fec_ci(102, "ulpfec", 0, 0, 30, 0);
   EXPECT_TRUE(engine_.FindCodec(fec));
+
+  cricket::VideoCodec rtx(96, "rtx", 0, 0, 30, 0);
+  EXPECT_TRUE(engine_.FindCodec(rtx));
+}
+
+TEST_F(WebRtcVideoEngineTest, RtxCodecHasAptSet) {
+  std::vector<cricket::VideoCodec>::const_iterator it;
+  bool apt_checked = false;
+  for (it = engine_.codecs().begin(); it != engine_.codecs().end(); ++it) {
+    if (_stricmp(cricket::kRtxCodecName, it->name.c_str()) && it->id != 96) {
+      continue;
+    }
+    int apt;
+    EXPECT_TRUE(it->GetParam("apt", &apt));
+    EXPECT_EQ(100, apt);
+    apt_checked = true;
+  }
+  EXPECT_TRUE(apt_checked);
 }
 
 TEST_F(WebRtcVideoEngineTest, StartupShutdown) {
diff --git a/talk/media/webrtc/webrtcvoiceengine.cc b/talk/media/webrtc/webrtcvoiceengine.cc
index 4911c59..1b77671 100644
--- a/talk/media/webrtc/webrtcvoiceengine.cc
+++ b/talk/media/webrtc/webrtcvoiceengine.cc
@@ -230,6 +230,7 @@
   options.experimental_agc.Set(false);
   options.experimental_aec.Set(false);
   options.aec_dump.Set(false);
+  options.experimental_acm.Set(false);
   return options;
 }
 
@@ -260,7 +261,7 @@
     if (!engine_->voe_sc()) {
       return false;
     }
-    webrtc_channel_ = engine_->voe_sc()->base()->CreateChannel();
+    webrtc_channel_ = engine_->CreateSoundclipVoiceChannel();
     if (webrtc_channel_ == -1) {
       LOG_RTCERR0(CreateChannel);
       return false;
@@ -333,6 +334,7 @@
       log_filter_(SeverityToFilter(kDefaultLogSeverity)),
       is_dumping_aec_(false),
       desired_local_monitor_enable_(false),
+      use_experimental_acm_(false),
       tx_processor_ssrc_(0),
       rx_processor_ssrc_(0) {
   Construct();
@@ -350,6 +352,7 @@
       log_filter_(SeverityToFilter(kDefaultLogSeverity)),
       is_dumping_aec_(false),
       desired_local_monitor_enable_(false),
+      use_experimental_acm_(false),
       tx_processor_ssrc_(0),
       rx_processor_ssrc_(0) {
   Construct();
@@ -377,6 +380,10 @@
       RtpHeaderExtension(kRtpAudioLevelHeaderExtension,
                          kRtpAudioLevelHeaderExtensionId));
   options_ = GetDefaultEngineOptions();
+
+  // Initialize the VoE Configuration to the default ACM.
+  voe_config_.Set<webrtc::AudioCodingModuleFactory>(
+      new webrtc::AudioCodingModuleFactory);
 }
 
 static bool IsOpus(const AudioCodec& codec) {
@@ -714,6 +721,12 @@
 
   LOG(LS_INFO) << "Applying audio options: " << options.ToString();
 
+  // Configure whether ACM1 or ACM2 is used.
+  bool enable_acm2 = false;
+  if (options.experimental_acm.Get(&enable_acm2)) {
+    EnableExperimentalAcm(enable_acm2);
+  }
+
   webrtc::VoEAudioProcessing* voep = voe_wrapper_->processing();
 
   bool echo_cancellation;
@@ -940,7 +953,7 @@
   }
   if (ret) {
     if (voe_wrapper_->hw()->SetRecordingDevice(in_id) == -1) {
-      LOG_RTCERR2(SetRecordingDevice, in_device->name, in_id);
+      LOG_RTCERR2(SetRecordingDevice, in_name, in_id);
       ret = false;
     }
   }
@@ -952,7 +965,7 @@
   }
   if (ret) {
     if (voe_wrapper_->hw()->SetPlayoutDevice(out_id) == -1) {
-      LOG_RTCERR2(SetPlayoutDevice, out_device->name, out_id);
+      LOG_RTCERR2(SetPlayoutDevice, out_name, out_id);
       ret = false;
     }
   }
@@ -1248,6 +1261,21 @@
   return false;
 }
 
+void WebRtcVoiceEngine::EnableExperimentalAcm(bool enable) {
+  if (enable == use_experimental_acm_)
+    return;
+  if (enable) {
+    LOG(LS_INFO) << "VoiceEngine is set to use new ACM (ACM2 + NetEq4).";
+    voe_config_.Set<webrtc::AudioCodingModuleFactory>(
+        new webrtc::NewAudioCodingModuleFactory());
+  } else {
+    LOG(LS_INFO) << "VoiceEngine is set to use legacy ACM (ACM1 + Neteq3).";
+    voe_config_.Set<webrtc::AudioCodingModuleFactory>(
+        new webrtc::AudioCodingModuleFactory());
+  }
+  use_experimental_acm_ = enable;
+}
+
 void WebRtcVoiceEngine::Print(webrtc::TraceLevel level, const char* trace,
                               int length) {
   talk_base::LoggingSeverity sev = talk_base::LS_VERBOSE;
@@ -1580,6 +1608,22 @@
   }
 }
 
+int WebRtcVoiceEngine::CreateVoiceChannel(VoEWrapper* voice_engine_wrapper) {
+#ifdef USE_WEBRTC_DEV_BRANCH
+  return voice_engine_wrapper->base()->CreateChannel(voe_config_);
+#else
+  return voice_engine_wrapper->base()->CreateChannel();
+#endif
+}
+
+int WebRtcVoiceEngine::CreateMediaVoiceChannel() {
+  return CreateVoiceChannel(voe_wrapper_.get());
+}
+
+int WebRtcVoiceEngine::CreateSoundclipVoiceChannel() {
+  return CreateVoiceChannel(voe_wrapper_sc_.get());
+}
+
 // This struct relies on the generated copy constructor and assignment operator
 // since it is used in an stl::map.
 struct WebRtcVoiceMediaChannel::WebRtcVoiceChannelInfo {
@@ -1597,7 +1641,7 @@
 WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel(WebRtcVoiceEngine *engine)
     : WebRtcMediaChannel<VoiceMediaChannel, WebRtcVoiceEngine>(
           engine,
-          engine->voe()->base()->CreateChannel()),
+          engine->CreateMediaVoiceChannel()),
       send_bw_setting_(false),
       send_autobw_(false),
       send_bw_bps_(0),
@@ -2231,7 +2275,7 @@
     channel = voe_channel();
   } else {
     // Create a new channel for sending audio data.
-    channel = engine()->voe()->base()->CreateChannel();
+    channel = engine()->CreateMediaVoiceChannel();
     if (channel == -1) {
       LOG_RTCERR0(CreateChannel);
       return false;
@@ -2346,7 +2390,7 @@
   }
 
   // Create a new channel for receiving audio data.
-  int channel = engine()->voe()->base()->CreateChannel();
+  int channel = engine()->CreateMediaVoiceChannel();
   if (channel == -1) {
     LOG_RTCERR0(CreateChannel);
     return false;
@@ -2966,7 +3010,7 @@
       continue;
     }
 
-    sinfo.ssrc = ssrc;
+    sinfo.add_ssrc(ssrc);
     sinfo.codec_name = send_codec_.get() ? send_codec_->plname : "";
     sinfo.bytes_sent = cs.bytesSent;
     sinfo.packets_sent = cs.packetsSent;
@@ -2988,7 +3032,7 @@
       for (iter = receive_blocks.begin(); iter != receive_blocks.end();
            ++iter) {
         // Lookup report for send ssrc only.
-        if (iter->source_SSRC == sinfo.ssrc) {
+        if (iter->source_SSRC == sinfo.ssrc()) {
           // Convert Q8 to floating point.
           sinfo.fraction_lost = static_cast<float>(iter->fraction_lost) / 256;
           // Convert samples to milliseconds.
@@ -3041,7 +3085,7 @@
         engine()->voe()->rtp()->GetRTCPStatistics(*it, cs) != -1 &&
         engine()->voe()->codec()->GetRecCodec(*it, codec) != -1) {
       VoiceReceiverInfo rinfo;
-      rinfo.ssrc = ssrc;
+      rinfo.add_ssrc(ssrc);
       rinfo.bytes_rcvd = cs.bytesReceived;
       rinfo.packets_rcvd = cs.packetsReceived;
       // The next four fields are from the most recently sent RTCP report.
diff --git a/talk/media/webrtc/webrtcvoiceengine.h b/talk/media/webrtc/webrtcvoiceengine.h
index b8b50b0..29807ef 100644
--- a/talk/media/webrtc/webrtcvoiceengine.h
+++ b/talk/media/webrtc/webrtcvoiceengine.h
@@ -43,6 +43,8 @@
 #include "talk/media/webrtc/webrtcexport.h"
 #include "talk/media/webrtc/webrtcvoe.h"
 #include "talk/session/media/channel.h"
+#include "webrtc/common.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
 
 #if !defined(LIBPEERCONNECTION_LIB) && \
     !defined(LIBPEERCONNECTION_IMPLEMENTATION)
@@ -175,6 +177,10 @@
   // Check whether the supplied trace should be ignored.
   bool ShouldIgnoreTrace(const std::string& trace);
 
+  // Create a VoiceEngine Channel.
+  int CreateMediaVoiceChannel();
+  int CreateSoundclipVoiceChannel();
+
  private:
   typedef std::vector<WebRtcSoundclipMedia *> SoundclipList;
   typedef std::vector<WebRtcVoiceMediaChannel *> ChannelList;
@@ -192,6 +198,9 @@
   // allows us to selectively turn on and off different options easily
   // at any time.
   bool ApplyOptions(const AudioOptions& options);
+  // Configure for using ACM2, if |enable| is true, otherwise configure for
+  // ACM1.
+  void EnableExperimentalAcm(bool enable);
   virtual void Print(webrtc::TraceLevel level, const char* trace, int length);
   virtual void CallbackOnError(int channel, int errCode);
   // Given the device type, name, and id, find device id. Return true and
@@ -215,6 +224,7 @@
 
   void StartAecDump(const std::string& filename);
   void StopAecDump();
+  int CreateVoiceChannel(VoEWrapper* voe);
 
   // When a voice processor registers with the engine, it is connected
   // to either the Rx or Tx signals, based on the direction parameter.
@@ -246,6 +256,10 @@
   // callback as well as the RegisterChannel/UnregisterChannel.
   talk_base::CriticalSection channels_cs_;
   webrtc::AgcConfig default_agc_config_;
+
+  webrtc::Config voe_config_;
+  bool use_experimental_acm_;
+
   bool initialized_;
   // See SetOptions and SetOptionOverrides for a description of the
   // difference between options and overrides.
diff --git a/talk/media/webrtc/webrtcvoiceengine_unittest.cc b/talk/media/webrtc/webrtcvoiceengine_unittest.cc
index d0b06bb..cf8a54f 100644
--- a/talk/media/webrtc/webrtcvoiceengine_unittest.cc
+++ b/talk/media/webrtc/webrtcvoiceengine_unittest.cc
@@ -1653,7 +1653,7 @@
 
   // Verify the statistic information is correct.
   for (unsigned int i = 0; i < ARRAY_SIZE(kSsrcs4); ++i) {
-    EXPECT_EQ(kSsrcs4[i], info.senders[i].ssrc);
+    EXPECT_EQ(kSsrcs4[i], info.senders[i].ssrc());
     EXPECT_EQ(kPcmuCodec.name, info.senders[i].codec_name);
     EXPECT_EQ(cricket::kIntStatValue, info.senders[i].bytes_sent);
     EXPECT_EQ(cricket::kIntStatValue, info.senders[i].packets_sent);
@@ -1978,7 +1978,7 @@
   cricket::VoiceMediaInfo info;
   EXPECT_EQ(true, channel_->GetStats(&info));
   EXPECT_EQ(1u, info.senders.size());
-  EXPECT_EQ(kSsrc1, info.senders[0].ssrc);
+  EXPECT_EQ(kSsrc1, info.senders[0].ssrc());
   EXPECT_EQ(kPcmuCodec.name, info.senders[0].codec_name);
   EXPECT_EQ(cricket::kIntStatValue, info.senders[0].bytes_sent);
   EXPECT_EQ(cricket::kIntStatValue, info.senders[0].packets_sent);
@@ -2982,3 +2982,40 @@
 #endif
 
 
+TEST_F(WebRtcVoiceEngineTestFake, SetExperimentalAcm) {
+  EXPECT_TRUE(SetupEngine());
+
+  // By default experimental ACM should not be used.
+  int media_channel = engine_.CreateMediaVoiceChannel();
+  ASSERT_GE(media_channel, 0);
+  EXPECT_FALSE(voe_.IsUsingExperimentalAcm(media_channel));
+
+  int soundclip_channel = engine_.CreateSoundclipVoiceChannel();
+  ASSERT_GE(soundclip_channel, 0);
+  EXPECT_FALSE(voe_sc_.IsUsingExperimentalAcm(soundclip_channel));
+
+#ifdef USE_WEBRTC_DEV_BRANCH
+  // Set options to use experimental ACM.
+  cricket::AudioOptions options;
+  options.experimental_acm.Set(true);
+  ASSERT_TRUE(engine_.SetOptions(options));
+  media_channel = engine_.CreateMediaVoiceChannel();
+  ASSERT_GE(media_channel, 0);
+  EXPECT_TRUE(voe_.IsUsingExperimentalAcm(media_channel));
+
+  soundclip_channel = engine_.CreateSoundclipVoiceChannel();
+  ASSERT_GE(soundclip_channel, 0);
+  EXPECT_TRUE(voe_sc_.IsUsingExperimentalAcm(soundclip_channel));
+
+  // Set option to use legacy ACM.
+  options.experimental_acm.Set(false);
+  ASSERT_TRUE(engine_.SetOptions(options));
+  media_channel = engine_.CreateMediaVoiceChannel();
+  ASSERT_GE(media_channel, 0);
+  EXPECT_FALSE(voe_.IsUsingExperimentalAcm(media_channel));
+
+  soundclip_channel = engine_.CreateSoundclipVoiceChannel();
+  ASSERT_GE(soundclip_channel, 0);
+  EXPECT_FALSE(voe_sc_.IsUsingExperimentalAcm(soundclip_channel));
+#endif
+}
diff --git a/talk/p2p/base/turnport.cc b/talk/p2p/base/turnport.cc
index 14388e3..92f62c8 100644
--- a/talk/p2p/base/turnport.cc
+++ b/talk/p2p/base/turnport.cc
@@ -206,14 +206,6 @@
     return;
   }
 
-  // If protocol family of server address doesn't match with local, return.
-  if (!IsCompatibleAddress(server_address_.address)) {
-    LOG(LS_ERROR) << "Server IP address family does not match with "
-                  << "local host address family type";
-    OnAllocateError();
-    return;
-  }
-
   if (!server_address_.address.port()) {
     // We will set default TURN port, if no port is set in the address.
     server_address_.address.SetPort(TURN_DEFAULT_PORT);
@@ -222,6 +214,14 @@
   if (server_address_.address.IsUnresolved()) {
     ResolveTurnAddress(server_address_.address);
   } else {
+    // If protocol family of server address doesn't match with local, return.
+    if (!IsCompatibleAddress(server_address_.address)) {
+      LOG(LS_ERROR) << "Server IP address family does not match with "
+                    << "local host address family type";
+      OnAllocateError();
+      return;
+    }
+
     LOG_J(LS_INFO, this) << "Trying to connect to TURN server via "
                          << ProtoToString(server_address_.proto) << " @ "
                          << server_address_.address.ToSensitiveString();
diff --git a/talk/session/media/channelmanager.cc b/talk/session/media/channelmanager.cc
index 47f0fc5..d4fcc79 100644
--- a/talk/session/media/channelmanager.cc
+++ b/talk/session/media/channelmanager.cc
@@ -939,4 +939,12 @@
       Bind(&MediaEngineInterface::GetStartCaptureFormat, media_engine_.get()));
 }
 
+bool ChannelManager::SetAudioOptions(const AudioOptions& options) {
+  if (!media_engine_->SetAudioOptions(options)) {
+    return false;
+  }
+  audio_options_ = options;
+  return true;
+}
+
 }  // namespace cricket
diff --git a/talk/session/media/channelmanager.h b/talk/session/media/channelmanager.h
index af95f92..fdb8f73 100644
--- a/talk/session/media/channelmanager.h
+++ b/talk/session/media/channelmanager.h
@@ -225,6 +225,11 @@
   // TODO(hellner): Remove this function once the engine capturer has been
   // removed.
   VideoFormat GetStartCaptureFormat();
+
+  // TODO(turajs): Remove this function when ACM2 is in use. Used mainly to
+  // choose between ACM1 and ACM2.
+  bool SetAudioOptions(const AudioOptions& options);
+
  protected:
   // Adds non-transient parameters which can only be changed through the
   // options store.