Merge from Chromium at DEPS revision 278856

This commit was generated by merge_to_master.py.

Change-Id: I7dc08db577ee87432079e1e7508ea87dd8d0bc6c
diff --git a/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java b/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java
index b171b58..240e996 100644
--- a/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java
+++ b/app/webrtc/javatests/src/org/webrtc/PeerConnectionTest.java
@@ -525,7 +525,7 @@
   private void doTest() throws Exception {
     CountDownLatch testDone = new CountDownLatch(1);
     System.gc();  // Encourage any GC-related threads to start up.
-    //TreeSet<String> threadsBeforeTest = allThreads();
+    TreeSet<String> threadsBeforeTest = allThreads();
 
     PeerConnectionFactory factory = new PeerConnectionFactory();
     // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
@@ -742,11 +742,8 @@
     factory.dispose();
     System.gc();
 
-    // TODO(ldixon): the usrsctp threads are not cleaned up (issue 2749) and
-    // caused the assert to fail. We should reenable the assert once issue 2749
-    // is fixed.
-    //TreeSet<String> threadsAfterTest = allThreads();
-    //assertEquals(threadsBeforeTest, threadsAfterTest);
+    TreeSet<String> threadsAfterTest = allThreads();
+    assertEquals(threadsBeforeTest, threadsAfterTest);
     Thread.sleep(100);
   }
 
diff --git a/app/webrtc/mediaconstraintsinterface.cc b/app/webrtc/mediaconstraintsinterface.cc
index b19604b..0ecadd6 100644
--- a/app/webrtc/mediaconstraintsinterface.cc
+++ b/app/webrtc/mediaconstraintsinterface.cc
@@ -118,6 +118,9 @@
     "googHighBitrate";
 const char MediaConstraintsInterface::kVeryHighBitrate[] =
     "googVeryHighBitrate";
+const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding";
+const char MediaConstraintsInterface::kOpusFec[] = "googOpusFec";
+
 
 // Set |value| to the value associated with the first appearance of |key|, or
 // return false if |key| is not found.
diff --git a/app/webrtc/mediaconstraintsinterface.h b/app/webrtc/mediaconstraintsinterface.h
index 36cf20d..36257db 100644
--- a/app/webrtc/mediaconstraintsinterface.h
+++ b/app/webrtc/mediaconstraintsinterface.h
@@ -132,6 +132,12 @@
   static const char kHighStartBitrate[];  // googHighStartBitrate
   static const char kHighBitrate[];  // googHighBitrate
   static const char kVeryHighBitrate[];  // googVeryHighBitrate
+  static const char kPayloadPadding[];  // googPayloadPadding
+
+  // PeerConnection codec constraint keys. This should be combined with the
+  // values above.
+  // kOpusFec controls whether we ask the other side to turn on FEC for Opus.
+  static const char kOpusFec[];  // googOpusFec
 
   // The prefix of internal-only constraints whose JS set values should be
   // stripped by Chrome before passed down to Libjingle.
diff --git a/app/webrtc/mediastreamsignaling.cc b/app/webrtc/mediastreamsignaling.cc
index c7fa673..99f627a 100644
--- a/app/webrtc/mediastreamsignaling.cc
+++ b/app/webrtc/mediastreamsignaling.cc
@@ -274,6 +274,23 @@
   return true;
 }
 
+void MediaStreamSignaling::RemoveSctpDataChannel(int sid) {
+  for (SctpDataChannels::iterator iter = sctp_data_channels_.begin();
+       iter != sctp_data_channels_.end();
+       ++iter) {
+    if ((*iter)->id() == sid) {
+      sctp_data_channels_.erase(iter);
+
+      if (talk_base::IsEven(sid) && sid <= last_allocated_sctp_even_sid_) {
+        last_allocated_sctp_even_sid_ = sid - 2;
+      } else if (talk_base::IsOdd(sid) && sid <= last_allocated_sctp_odd_sid_) {
+        last_allocated_sctp_odd_sid_ = sid - 2;
+      }
+      return;
+    }
+  }
+}
+
 bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) {
   if (local_streams_->find(local_stream->label()) != NULL) {
     LOG(LS_WARNING) << "MediaStream with label " << local_stream->label()
@@ -481,12 +498,19 @@
 }
 
 void MediaStreamSignaling::OnDataChannelClose() {
-  RtpDataChannels::iterator it1 = rtp_data_channels_.begin();
-  for (; it1 != rtp_data_channels_.end(); ++it1) {
+  // Use a temporary copy of the RTP/SCTP DataChannel list because the
+  // DataChannel may callback to us and try to modify the list.
+  RtpDataChannels temp_rtp_dcs;
+  temp_rtp_dcs.swap(rtp_data_channels_);
+  RtpDataChannels::iterator it1 = temp_rtp_dcs.begin();
+  for (; it1 != temp_rtp_dcs.end(); ++it1) {
     it1->second->OnDataEngineClose();
   }
-  SctpDataChannels::iterator it2 = sctp_data_channels_.begin();
-  for (; it2 != sctp_data_channels_.end(); ++it2) {
+
+  SctpDataChannels temp_sctp_dcs;
+  temp_sctp_dcs.swap(sctp_data_channels_);
+  SctpDataChannels::iterator it2 = temp_sctp_dcs.begin();
+  for (; it2 != temp_sctp_dcs.end(); ++it2) {
     (*it2)->OnDataEngineClose();
   }
 }
diff --git a/app/webrtc/mediastreamsignaling.h b/app/webrtc/mediastreamsignaling.h
index 8051289..7378166 100644
--- a/app/webrtc/mediastreamsignaling.h
+++ b/app/webrtc/mediastreamsignaling.h
@@ -198,6 +198,7 @@
   // After we receive an OPEN message, create a data channel and add it.
   bool AddDataChannelFromOpenMessage(const cricket::ReceiveDataParams& params,
                                      const talk_base::Buffer& payload);
+  void RemoveSctpDataChannel(int sid);
 
   // Returns a MediaSessionOptions struct with options decided by |constraints|,
   // the local MediaStreams and DataChannels.
diff --git a/app/webrtc/mediastreamsignaling_unittest.cc b/app/webrtc/mediastreamsignaling_unittest.cc
index 14b68e9..150058e 100644
--- a/app/webrtc/mediastreamsignaling_unittest.cc
+++ b/app/webrtc/mediastreamsignaling_unittest.cc
@@ -1141,6 +1141,47 @@
   EXPECT_NE(old_id, new_id);
 }
 
+// Verifies that SCTP ids of removed DataChannels can be reused.
+TEST_F(MediaStreamSignalingTest, SctpIdReusedForRemovedDataChannel) {
+  int odd_id = 1;
+  int even_id = 0;
+  AddDataChannel(cricket::DCT_SCTP, "a", odd_id);
+  AddDataChannel(cricket::DCT_SCTP, "a", even_id);
+
+  int allocated_id = -1;
+  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER,
+                                          &allocated_id));
+  EXPECT_EQ(odd_id + 2, allocated_id);
+  AddDataChannel(cricket::DCT_SCTP, "a", allocated_id);
+
+  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT,
+                                          &allocated_id));
+  EXPECT_EQ(even_id + 2, allocated_id);
+  AddDataChannel(cricket::DCT_SCTP, "a", allocated_id);
+
+  signaling_->RemoveSctpDataChannel(odd_id);
+  signaling_->RemoveSctpDataChannel(even_id);
+
+  // Verifies that removed DataChannel ids are reused.
+  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER,
+                                          &allocated_id));
+  EXPECT_EQ(odd_id, allocated_id);
+
+  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT,
+                                          &allocated_id));
+  EXPECT_EQ(even_id, allocated_id);
+
+  // Verifies that used higher DataChannel ids are not reused.
+  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER,
+                                          &allocated_id));
+  EXPECT_NE(odd_id + 2, allocated_id);
+
+  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT,
+                                          &allocated_id));
+  EXPECT_NE(even_id + 2, allocated_id);
+
+}
+
 // Verifies that duplicated label is not allowed for RTP data channel.
 TEST_F(MediaStreamSignalingTest, RtpDuplicatedLabelNotAllowed) {
   AddDataChannel(cricket::DCT_RTP, "a", -1);
diff --git a/app/webrtc/webrtcsession.cc b/app/webrtc/webrtcsession.cc
index 17177bd..1b1c287 100644
--- a/app/webrtc/webrtcsession.cc
+++ b/app/webrtc/webrtcsession.cc
@@ -371,14 +371,15 @@
 
 // Set |option| to the highest-priority value of |key| in the optional
 // constraints if the key is found and has a valid value.
+template<typename T>
 static void SetOptionFromOptionalConstraint(
     const MediaConstraintsInterface* constraints,
-    const std::string& key, cricket::Settable<int>* option) {
+    const std::string& key, cricket::Settable<T>* option) {
   if (!constraints) {
     return;
   }
   std::string string_value;
-  int value;
+  T value;
   if (constraints->GetOptional().FindFirst(key, &string_value)) {
     if (talk_base::FromString(string_value, &value)) {
       option->Set(value);
@@ -565,26 +566,22 @@
       MediaConstraintsInterface::kCpuOveruseThreshold,
       &video_options_.cpu_overuse_threshold);
   SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kCpuOveruseDetection,
+      &video_options_.cpu_overuse_detection);
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kCpuOveruseEncodeUsage,
+      &video_options_.cpu_overuse_encode_usage);
+  SetOptionFromOptionalConstraint(constraints,
       MediaConstraintsInterface::kCpuUnderuseEncodeRsdThreshold,
       &video_options_.cpu_underuse_encode_rsd_threshold);
   SetOptionFromOptionalConstraint(constraints,
       MediaConstraintsInterface::kCpuOveruseEncodeRsdThreshold,
       &video_options_.cpu_overuse_encode_rsd_threshold);
 
-  if (FindConstraint(
-      constraints,
-      MediaConstraintsInterface::kCpuOveruseDetection,
-      &value,
-      NULL)) {
-    video_options_.cpu_overuse_detection.Set(value);
-  }
-  if (FindConstraint(
-      constraints,
-      MediaConstraintsInterface::kCpuOveruseEncodeUsage,
-      &value,
-      NULL)) {
-    video_options_.cpu_overuse_encode_usage.Set(value);
-  }
+  // Find payload padding constraint.
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kPayloadPadding,
+      &video_options_.use_payload_padding);
 
   // Find improved wifi bwe constraint.
   if (FindConstraint(
@@ -598,13 +595,9 @@
     video_options_.use_improved_wifi_bandwidth_estimator.Set(true);
   }
 
-  if (FindConstraint(
-        constraints,
-        MediaConstraintsInterface::kHighStartBitrate,
-        &value,
-        NULL)) {
-    video_options_.video_start_bitrate.Set(cricket::kHighStartBitrate);
-  }
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kHighStartBitrate,
+      &video_options_.video_start_bitrate);
 
   if (FindConstraint(
       constraints,
@@ -622,6 +615,10 @@
         cricket::VideoOptions::HIGH);
   }
 
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kOpusFec,
+      &audio_options_.opus_fec);
+
   const cricket::VideoCodec default_codec(
       JsepSessionDescription::kDefaultVideoCodecId,
       JsepSessionDescription::kDefaultVideoCodecName,
@@ -1151,6 +1148,8 @@
 }
 
 void WebRtcSession::RemoveSctpDataStream(uint32 sid) {
+  mediastream_signaling_->RemoveSctpDataChannel(static_cast<int>(sid));
+
   if (!data_channel_.get()) {
     LOG(LS_ERROR) << "RemoveDataChannelStreams called when data_channel_ is "
                   << "NULL.";
diff --git a/base/common.cc b/base/common.cc
index 40755dd..9f63aa4 100644
--- a/base/common.cc
+++ b/base/common.cc
@@ -78,4 +78,12 @@
   }
 }
 
+bool IsOdd(int n) {
+  return (n & 0x1);
+}
+
+bool IsEven(int n) {
+  return !IsOdd(n);
+}
+
 } // namespace talk_base
diff --git a/base/common.h b/base/common.h
index 381b6b5..ed7d59e 100644
--- a/base/common.h
+++ b/base/common.h
@@ -116,6 +116,10 @@
 // only by one component.
 void SetCustomAssertLogger(AssertLogger logger);
 
+bool IsOdd(int n);
+
+bool IsEven(int n);
+
 }  // namespace talk_base
 
 #if ENABLE_DEBUG
diff --git a/base/profiler_unittest.cc b/base/profiler_unittest.cc
index f451e5f..a39b32c 100644
--- a/base/profiler_unittest.cc
+++ b/base/profiler_unittest.cc
@@ -47,13 +47,15 @@
 
 TEST(ProfilerTest, TestFunction) {
   ASSERT_TRUE(Profiler::Instance()->Clear());
+
   // Profile a long-running function.
   const char* function_name = TestFunc();
   const ProfilerEvent* event = Profiler::Instance()->GetEvent(function_name);
   ASSERT_TRUE(event != NULL);
   EXPECT_FALSE(event->is_started());
   EXPECT_EQ(1, event->event_count());
-  EXPECT_NEAR(kWaitSec, event->mean(), kTolerance);
+  EXPECT_NEAR(kWaitSec, event->mean(), kTolerance * 3);
+
   // Run it a second time.
   TestFunc();
   EXPECT_FALSE(event->is_started());
@@ -95,7 +97,9 @@
   // Check the result.
   EXPECT_FALSE(event2->is_started());
   EXPECT_EQ(1, event2->event_count());
-  EXPECT_NEAR(kEvent2WaitSec, event2->mean(), kTolerance);
+
+  // The difference here can be as much as 0.33, so we need high tolerance.
+  EXPECT_NEAR(kEvent2WaitSec, event2->mean(), kTolerance * 4);
   // Make sure event1 is unchanged.
   EXPECT_FALSE(event1->is_started());
   EXPECT_EQ(1, event1->event_count());
diff --git a/libjingle_tests.gyp b/libjingle_tests.gyp
index 00e2230..016f0a5 100755
--- a/libjingle_tests.gyp
+++ b/libjingle_tests.gyp
@@ -287,9 +287,7 @@
         'media/base/videoengine_unittest.h',
         'media/devices/dummydevicemanager_unittest.cc',
         'media/devices/filevideocapturer_unittest.cc',
-        # TODO(jiayl): Enable the SCTP test once the memcheck and tsan bots
-        # failures are fixed (issue 2846).
-        #'media/sctp/sctpdataengine_unittest.cc',
+        'media/sctp/sctpdataengine_unittest.cc',
         'media/webrtc/webrtcpassthroughrender_unittest.cc',
         'media/webrtc/webrtcvideocapturer_unittest.cc',
         # Omitted because depends on non-open-source testdata files.
diff --git a/media/base/mediachannel.h b/media/base/mediachannel.h
index 49902ee..34d2def 100644
--- a/media/base/mediachannel.h
+++ b/media/base/mediachannel.h
@@ -182,6 +182,7 @@
     recording_sample_rate.SetFrom(change.recording_sample_rate);
     playout_sample_rate.SetFrom(change.playout_sample_rate);
     dscp.SetFrom(change.dscp);
+    opus_fec.SetFrom(change.opus_fec);
   }
 
   bool operator==(const AudioOptions& o) const {
@@ -207,7 +208,8 @@
         rx_agc_limiter == o.rx_agc_limiter &&
         recording_sample_rate == o.recording_sample_rate &&
         playout_sample_rate == o.playout_sample_rate &&
-        dscp == o.dscp;
+        dscp == o.dscp &&
+        opus_fec == o.opus_fec;
   }
 
   std::string ToString() const {
@@ -238,6 +240,7 @@
     ost << ToStringIfSet("recording_sample_rate", recording_sample_rate);
     ost << ToStringIfSet("playout_sample_rate", playout_sample_rate);
     ost << ToStringIfSet("dscp", dscp);
+    ost << ToStringIfSet("opus_fec", opus_fec);
     ost << "}";
     return ost.str();
   }
@@ -275,6 +278,8 @@
   Settable<uint32> playout_sample_rate;
   // Set DSCP value for packet sent from audio channel.
   Settable<bool> dscp;
+  // Set Opus FEC
+  Settable<bool> opus_fec;
 };
 
 // Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.
@@ -335,6 +340,7 @@
     screencast_min_bitrate.SetFrom(change.screencast_min_bitrate);
     use_improved_wifi_bandwidth_estimator.SetFrom(
         change.use_improved_wifi_bandwidth_estimator);
+    use_payload_padding.SetFrom(change.use_payload_padding);
   }
 
   bool operator==(const VideoOptions& o) const {
@@ -374,7 +380,8 @@
         skip_encoding_unused_streams == o.skip_encoding_unused_streams &&
         screencast_min_bitrate == o.screencast_min_bitrate &&
         use_improved_wifi_bandwidth_estimator ==
-            o.use_improved_wifi_bandwidth_estimator;
+            o.use_improved_wifi_bandwidth_estimator &&
+        use_payload_padding == o.use_payload_padding;
   }
 
   std::string ToString() const {
@@ -421,6 +428,7 @@
     ost << ToStringIfSet("screencast min bitrate", screencast_min_bitrate);
     ost << ToStringIfSet("improved wifi bwe",
                          use_improved_wifi_bandwidth_estimator);
+    ost << ToStringIfSet("payload padding", use_payload_padding);
     ost << "}";
     return ost.str();
   }
@@ -501,6 +509,8 @@
   Settable<int> screencast_min_bitrate;
   // Enable improved bandwidth estiamtor on wifi.
   Settable<bool> use_improved_wifi_bandwidth_estimator;
+  // Enable payload padding.
+  Settable<bool> use_payload_padding;
 };
 
 // A class for playing out soundclips.
diff --git a/media/base/videocapturer_unittest.cc b/media/base/videocapturer_unittest.cc
index 75da236..9f025e3 100644
--- a/media/base/videocapturer_unittest.cc
+++ b/media/base/videocapturer_unittest.cc
@@ -31,6 +31,12 @@
 const int kMinHdHeight = 720;
 const uint32 kTimeout = 5000U;
 
+void NormalizeVideoSize(int* expected_width, int* expected_height) {
+  // WebRtcVideoFrame truncates the frame size to a multiple of 4.
+  *expected_width = *expected_width & ~3;
+  *expected_height = *expected_height & ~3;
+}
+
 }  // namespace
 
 // Sets the elapsed time in the video frame to 0.
@@ -228,6 +234,59 @@
   EXPECT_EQ(2, renderer_.num_rendered_frames());
 }
 
+TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
+  capturer_.SetScreencast(true);
+
+  int kWidth = 1281;
+  int kHeight = 720;
+
+  std::vector<cricket::VideoFormat> formats;
+  formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+      cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
+  capturer_.ResetSupportedFormats(formats);
+
+  EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
+      kWidth,
+      kHeight,
+      cricket::VideoFormat::FpsToInterval(30),
+      cricket::FOURCC_ARGB)));
+  EXPECT_TRUE(capturer_.IsRunning());
+  EXPECT_EQ(0, renderer_.num_rendered_frames());
+  int expected_width = kWidth;
+  int expected_height = kHeight;
+  NormalizeVideoSize(&expected_width, &expected_height);
+  renderer_.SetSize(expected_width, expected_height, 0);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(1, renderer_.num_rendered_frames());
+}
+
+TEST_F(VideoCapturerTest, ScreencastScaledSuperLarge) {
+  capturer_.SetScreencast(true);
+
+  const int kMaxWidth = 4096;
+  const int kMaxHeight = 3072;
+  int kWidth = kMaxWidth + 4;
+  int kHeight = kMaxHeight + 4;
+
+  std::vector<cricket::VideoFormat> formats;
+  formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+      cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
+  capturer_.ResetSupportedFormats(formats);
+
+  EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
+      kWidth,
+      kHeight,
+      cricket::VideoFormat::FpsToInterval(30),
+      cricket::FOURCC_ARGB)));
+  EXPECT_TRUE(capturer_.IsRunning());
+  EXPECT_EQ(0, renderer_.num_rendered_frames());
+  int expected_width = 2050;
+  int expected_height = 1538;
+  NormalizeVideoSize(&expected_width, &expected_height);
+  renderer_.SetSize(expected_width, expected_height, 0);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(1, renderer_.num_rendered_frames());
+}
 
 TEST_F(VideoCapturerTest, TestFourccMatch) {
   cricket::VideoFormat desired(640, 480,
diff --git a/media/sctp/sctpdataengine.cc b/media/sctp/sctpdataengine.cc
index 46b2ece..3647d21 100644
--- a/media/sctp/sctpdataengine.cc
+++ b/media/sctp/sctpdataengine.cc
@@ -277,18 +277,20 @@
 }
 
 SctpDataEngine::~SctpDataEngine() {
-  // TODO(ldixon): There is currently a bug in teardown of usrsctp that blocks
-  // indefintely if a finish call made too soon after close calls. So teardown
-  // has been skipped. Once the bug is fixed, retest and enable teardown.
-  // Tracked in webrtc issue 2749.
-  //
-  // usrsctp_engines_count--;
-  // LOG(LS_VERBOSE) << "usrsctp_engines_count:" << usrsctp_engines_count;
-  // if (usrsctp_engines_count == 0) {
-  //   if (usrsctp_finish() != 0) {
-  //     LOG(LS_WARNING) << "usrsctp_finish.";
-  //   }
-  // }
+  usrsctp_engines_count--;
+  LOG(LS_VERBOSE) << "usrsctp_engines_count:" << usrsctp_engines_count;
+
+  if (usrsctp_engines_count == 0) {
+    // usrsctp_finish() may fail if it's called too soon after the channels are
+    // closed. Wait and try again until it succeeds for up to 3 seconds.
+    for (size_t i = 0; i < 300; ++i) {
+      if (usrsctp_finish() == 0)
+        return;
+
+      talk_base::Thread::SleepMs(10);
+    }
+    LOG(LS_ERROR) << "Failed to shutdown usrsctp.";
+  }
 }
 
 DataMediaChannel* SctpDataEngine::CreateChannel(
diff --git a/media/sctp/sctpdataengine_unittest.cc b/media/sctp/sctpdataengine_unittest.cc
index 092524b..cf410e5 100644
--- a/media/sctp/sctpdataengine_unittest.cc
+++ b/media/sctp/sctpdataengine_unittest.cc
@@ -81,13 +81,13 @@
   // an SCTP packet.
   virtual void OnMessage(talk_base::Message* msg) {
     LOG(LS_VERBOSE) << "SctpFakeNetworkInterface::OnMessage";
-    talk_base::Buffer* buffer =
+    talk_base::scoped_ptr<talk_base::Buffer> buffer(
         static_cast<talk_base::TypedMessageData<talk_base::Buffer*>*>(
-            msg->pdata)->data();
+            msg->pdata)->data());
     if (dest_) {
-      dest_->OnPacketReceived(buffer, talk_base::PacketTime());
+      dest_->OnPacketReceived(buffer.get(), talk_base::PacketTime());
     }
-    delete buffer;
+    delete msg->pdata;
   }
 
   // Unsupported functions required to exist by NetworkInterface.
@@ -295,7 +295,7 @@
     params.ssrc = ssrc;
 
     return chan->SendData(params, talk_base::Buffer(
-        msg.data(), msg.length()), result);
+        &msg[0], msg.length()), result);
   }
 
   bool ReceivedData(const SctpFakeDataReceiver* recv, uint32 ssrc,
@@ -364,26 +364,26 @@
   EXPECT_EQ(cricket::SDR_SUCCESS, result);
   EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
   LOG(LS_VERBOSE) << "recv2.received=" << receiver2()->received()
-                  << "recv2.last_params.ssrc="
+                  << ", recv2.last_params.ssrc="
                   << receiver2()->last_params().ssrc
-                  << "recv2.last_params.timestamp="
+                  << ", recv2.last_params.timestamp="
                   << receiver2()->last_params().ssrc
-                  << "recv2.last_params.seq_num="
+                  << ", recv2.last_params.seq_num="
                   << receiver2()->last_params().seq_num
-                  << "recv2.last_data=" << receiver2()->last_data();
+                  << ", recv2.last_data=" << receiver2()->last_data();
 
   LOG(LS_VERBOSE) << "chan2 sending: 'hi chan1' -----------------------------";
   ASSERT_TRUE(SendData(channel2(), 2, "hi chan1", &result));
   EXPECT_EQ(cricket::SDR_SUCCESS, result);
   EXPECT_TRUE_WAIT(ReceivedData(receiver1(), 2, "hi chan1"), 1000);
   LOG(LS_VERBOSE) << "recv1.received=" << receiver1()->received()
-                  << "recv1.last_params.ssrc="
+                  << ", recv1.last_params.ssrc="
                   << receiver1()->last_params().ssrc
-                  << "recv1.last_params.timestamp="
+                  << ", recv1.last_params.timestamp="
                   << receiver1()->last_params().ssrc
-                  << "recv1.last_params.seq_num="
+                  << ", recv1.last_params.seq_num="
                   << receiver1()->last_params().seq_num
-                  << "recv1.last_data=" << receiver1()->last_data();
+                  << ", recv1.last_data=" << receiver1()->last_data();
 }
 
 // Sends a lot of large messages at once and verifies SDR_BLOCK is returned.
@@ -398,7 +398,7 @@
 
   for (size_t i = 0; i < 100; ++i) {
     channel1()->SendData(
-        params, talk_base::Buffer(buffer.data(), buffer.size()), &result);
+        params, talk_base::Buffer(&buffer[0], buffer.size()), &result);
     if (result == cricket::SDR_BLOCK)
       break;
   }
diff --git a/media/webrtc/fakewebrtcvideoengine.h b/media/webrtc/fakewebrtcvideoengine.h
index 3eba47b..85c59d8 100644
--- a/media/webrtc/fakewebrtcvideoengine.h
+++ b/media/webrtc/fakewebrtcvideoengine.h
@@ -1000,6 +1000,10 @@
     return 0;
   }
 
+#ifdef USE_WEBRTC_DEV_BRANCH
+  WEBRTC_STUB(SetPadWithRedundantPayloads, (int, bool));
+#endif
+
   WEBRTC_FUNC(SetRtxReceivePayloadType, (const int channel,
                                          const uint8 payload_type)) {
     WEBRTC_CHECK_CHANNEL(channel);
@@ -1111,7 +1115,7 @@
     channels_[channel]->transmission_smoothing_ = enable;
     return 0;
   }
-  WEBRTC_FUNC(SetReservedTransmitBitrate, (int channel, 
+  WEBRTC_FUNC(SetReservedTransmitBitrate, (int channel,
       unsigned int reserved_transmit_bitrate_bps)) {
     WEBRTC_CHECK_CHANNEL(channel);
     channels_[channel]->reserved_transmit_bitrate_bps_ =
diff --git a/media/webrtc/fakewebrtcvoiceengine.h b/media/webrtc/fakewebrtcvoiceengine.h
index ff2079b..25c952d 100644
--- a/media/webrtc/fakewebrtcvoiceengine.h
+++ b/media/webrtc/fakewebrtcvoiceengine.h
@@ -536,6 +536,7 @@
   }
   WEBRTC_STUB(GetVADStatus, (int channel, bool& enabled,
                              webrtc::VadModes& mode, bool& disabledDTX));
+#ifdef USE_WEBRTC_DEV_BRANCH
   WEBRTC_FUNC(SetFECStatus, (int channel, bool enable)) {
     WEBRTC_CHECK_CHANNEL(channel);
     channels_[channel]->codec_fec = enable;
@@ -546,6 +547,7 @@
     enable = channels_[channel]->codec_fec;
     return 0;
   }
+#endif  // USE_WEBRTC_DEV_BRANCH
 
   // webrtc::VoEDtmf
   WEBRTC_FUNC(SendTelephoneEvent, (int channel, int event_code,
diff --git a/media/webrtc/webrtcvideoengine.cc b/media/webrtc/webrtcvideoengine.cc
index f1810bf..fd609e9 100644
--- a/media/webrtc/webrtcvideoengine.cc
+++ b/media/webrtc/webrtcvideoengine.cc
@@ -558,6 +558,7 @@
   }
 
   void Enable(bool enable) {
+    LOG(LS_INFO) << "WebRtcOveruseObserver enable: " << enable;
     talk_base::CritScope cs(&crit_);
     enabled_ = enable;
   }
@@ -586,8 +587,7 @@
         external_capture_(external_capture),
         capturer_updated_(false),
         interval_(0),
-        cpu_monitor_(cpu_monitor),
-        overuse_observer_enabled_(false) {
+        cpu_monitor_(cpu_monitor) {
   }
 
   int channel_id() const { return channel_id_; }
@@ -679,7 +679,8 @@
     vie_wrapper->base()->RegisterCpuOveruseObserver(channel_id_,
                                                     overuse_observer_.get());
     // (Dis)connect the video adapter from the cpu monitor as appropriate.
-    SetCpuOveruseDetection(overuse_observer_enabled_);
+    SetCpuOveruseDetection(
+        video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false));
 
     SignalCpuAdaptationUnable.repeat(adapter->SignalCpuAdaptationUnable);
   }
@@ -698,10 +699,18 @@
   }
 
   void ApplyCpuOptions(const VideoOptions& video_options) {
+    bool cpu_overuse_detection_changed =
+        video_options.cpu_overuse_detection.IsSet() &&
+        (video_options.cpu_overuse_detection.GetWithDefaultIfUnset(false) !=
+         video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false));
     // Use video_options_.SetAll() instead of assignment so that unset value in
     // video_options will not overwrite the previous option value.
     video_options_.SetAll(video_options);
     UpdateAdapterCpuOptions();
+    if (cpu_overuse_detection_changed) {
+      SetCpuOveruseDetection(
+          video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false));
+    }
   }
 
   void UpdateAdapterCpuOptions() {
@@ -709,15 +718,19 @@
       return;
     }
 
-    bool cpu_adapt, cpu_smoothing, adapt_third;
+    bool cpu_smoothing, adapt_third;
     float low, med, high;
+    bool cpu_adapt =
+        video_options_.adapt_input_to_cpu_usage.GetWithDefaultIfUnset(false);
+    bool cpu_overuse_detection =
+        video_options_.cpu_overuse_detection.GetWithDefaultIfUnset(false);
 
     // TODO(thorcarpenter): Have VideoAdapter be responsible for setting
     // all these video options.
     CoordinatedVideoAdapter* video_adapter = video_capturer_->video_adapter();
-    if (video_options_.adapt_input_to_cpu_usage.Get(&cpu_adapt) ||
-        overuse_observer_enabled_) {
-      video_adapter->set_cpu_adaptation(cpu_adapt || overuse_observer_enabled_);
+    if (video_options_.adapt_input_to_cpu_usage.IsSet() ||
+        video_options_.cpu_overuse_detection.IsSet()) {
+      video_adapter->set_cpu_adaptation(cpu_adapt || cpu_overuse_detection);
     }
     if (video_options_.adapt_cpu_with_smoothing.Get(&cpu_smoothing)) {
       video_adapter->set_cpu_smoothing(cpu_smoothing);
@@ -737,8 +750,6 @@
   }
 
   void SetCpuOveruseDetection(bool enable) {
-    overuse_observer_enabled_ = enable;
-
     if (overuse_observer_) {
       overuse_observer_->Enable(enable);
     }
@@ -747,10 +758,6 @@
     // it will be signaled by cpu monitor.
     CoordinatedVideoAdapter* adapter = video_adapter();
     if (adapter) {
-      bool cpu_adapt = false;
-      video_options_.adapt_input_to_cpu_usage.Get(&cpu_adapt);
-      adapter->set_cpu_adaptation(
-          adapter->cpu_adaptation() || cpu_adapt || enable);
       if (cpu_monitor_) {
         if (enable) {
           cpu_monitor_->SignalUpdate.disconnect(adapter);
@@ -815,7 +822,6 @@
 
   talk_base::CpuMonitor* cpu_monitor_;
   talk_base::scoped_ptr<WebRtcOveruseObserver> overuse_observer_;
-  bool overuse_observer_enabled_;
 
   VideoOptions video_options_;
 };
@@ -2967,9 +2973,6 @@
   bool buffer_latency_changed = options.buffered_mode_latency.IsSet() &&
       (options_.buffered_mode_latency != options.buffered_mode_latency);
 
-  bool cpu_overuse_detection_changed = options.cpu_overuse_detection.IsSet() &&
-      (options_.cpu_overuse_detection != options.cpu_overuse_detection);
-
   bool dscp_option_changed = (options_.dscp != options.dscp);
 
   bool suspend_below_min_bitrate_changed =
@@ -2988,6 +2991,11 @@
       options_.use_improved_wifi_bandwidth_estimator !=
           options.use_improved_wifi_bandwidth_estimator;
 
+#ifdef USE_WEBRTC_DEV_BRANCH
+  bool payload_padding_changed = options.use_payload_padding.IsSet() &&
+      options_.use_payload_padding != options.use_payload_padding;
+#endif
+
 
   // Save the options, to be interpreted where appropriate.
   // Use options_.SetAll() instead of assignment so that unset value in options
@@ -3076,17 +3084,6 @@
       }
     }
   }
-  if (cpu_overuse_detection_changed) {
-    bool cpu_overuse_detection =
-        options_.cpu_overuse_detection.GetWithDefaultIfUnset(false);
-    LOG(LS_INFO) << "CPU overuse detection is enabled? "
-                 << cpu_overuse_detection;
-    for (SendChannelMap::iterator iter = send_channels_.begin();
-         iter != send_channels_.end(); ++iter) {
-      WebRtcVideoChannelSendInfo* send_channel = iter->second;
-      send_channel->SetCpuOveruseDetection(cpu_overuse_detection);
-    }
-  }
   if (dscp_option_changed) {
     talk_base::DiffServCodePoint dscp = talk_base::DSCP_DEFAULT;
     if (options_.dscp.GetWithDefaultIfUnset(false))
@@ -3120,6 +3117,17 @@
           it->second->channel_id(), config);
     }
   }
+#ifdef USE_WEBRTC_DEV_BRANCH
+  if (payload_padding_changed) {
+    LOG(LS_INFO) << "Payload-based padding called.";
+    for (SendChannelMap::iterator it = send_channels_.begin();
+            it != send_channels_.end(); ++it) {
+      engine()->vie()->rtp()->SetPadWithRedundantPayloads(
+          it->second->channel_id(),
+          options_.use_payload_padding.GetWithDefaultIfUnset(false));
+    }
+  }
+#endif
   webrtc::CpuOveruseOptions overuse_options;
   if (GetCpuOveruseOptions(options_, &overuse_options)) {
     for (SendChannelMap::iterator it = send_channels_.begin();
@@ -3560,10 +3568,6 @@
   send_channel->SignalCpuAdaptationUnable.connect(this,
       &WebRtcVideoMediaChannel::OnCpuAdaptationUnable);
 
-  if (options_.cpu_overuse_detection.GetWithDefaultIfUnset(false)) {
-    send_channel->SetCpuOveruseDetection(true);
-  }
-
   webrtc::CpuOveruseOptions overuse_options;
   if (GetCpuOveruseOptions(options_, &overuse_options)) {
     if (engine()->vie()->base()->SetCpuOveruseOptions(channel_id,
@@ -3703,7 +3707,7 @@
     target_codec.codecSpecific.VP8.resilience = webrtc::kResilienceOff;
 
     bool enable_denoising =
-        options_.video_noise_reduction.GetWithDefaultIfUnset(false);
+        options_.video_noise_reduction.GetWithDefaultIfUnset(true);
     target_codec.codecSpecific.VP8.denoisingOn = enable_denoising;
   }
 
@@ -3976,17 +3980,21 @@
   // Turn off VP8 frame dropping when screensharing as the current model does
   // not work well at low fps.
   bool vp8_frame_dropping = !is_screencast;
-  // Disable denoising for screencasting.
+  // TODO(pbos): Remove |video_noise_reduction| and enable it for all
+  // non-screencast.
   bool enable_denoising =
-      options_.video_noise_reduction.GetWithDefaultIfUnset(false);
+      options_.video_noise_reduction.GetWithDefaultIfUnset(true);
+  // Disable denoising for screencasting.
+  if (is_screencast) {
+    enable_denoising = false;
+  }
   int screencast_min_bitrate =
       options_.screencast_min_bitrate.GetWithDefaultIfUnset(0);
   bool leaky_bucket = options_.video_leaky_bucket.GetWithDefaultIfUnset(true);
-  bool denoising = !is_screencast && enable_denoising;
   bool reset_send_codec =
       target_width != cur_width || target_height != cur_height ||
       automatic_resize != vie_codec.codecSpecific.VP8.automaticResizeOn ||
-      denoising != vie_codec.codecSpecific.VP8.denoisingOn ||
+      enable_denoising != vie_codec.codecSpecific.VP8.denoisingOn ||
       vp8_frame_dropping != vie_codec.codecSpecific.VP8.frameDroppingOn;
 
   if (reset_send_codec) {
@@ -3999,7 +4007,7 @@
     vie_codec.maxBitrate = target_codec.maxBitrate;
     vie_codec.targetBitrate = 0;
     vie_codec.codecSpecific.VP8.automaticResizeOn = automatic_resize;
-    vie_codec.codecSpecific.VP8.denoisingOn = denoising;
+    vie_codec.codecSpecific.VP8.denoisingOn = enable_denoising;
     vie_codec.codecSpecific.VP8.frameDroppingOn = vp8_frame_dropping;
     MaybeChangeBitrates(channel_id, &vie_codec);
 
diff --git a/media/webrtc/webrtcvideoengine2.cc b/media/webrtc/webrtcvideoengine2.cc
index d6d1354..716c5a8 100644
--- a/media/webrtc/webrtcvideoengine2.cc
+++ b/media/webrtc/webrtcvideoengine2.cc
@@ -278,6 +278,13 @@
 
   video_codecs_ = DefaultVideoCodecs();
   default_codec_format_ = VideoFormat(kDefaultVideoFormat);
+
+  rtp_header_extensions_.push_back(
+      RtpHeaderExtension(kRtpTimestampOffsetHeaderExtension,
+                         kRtpTimestampOffsetHeaderExtensionDefaultId));
+  rtp_header_extensions_.push_back(
+      RtpHeaderExtension(kRtpAbsoluteSenderTimeHeaderExtension,
+                         kRtpAbsoluteSenderTimeHeaderExtensionDefaultId));
 }
 
 WebRtcVideoEngine2::~WebRtcVideoEngine2() {
@@ -774,6 +781,20 @@
   return true;
 }
 
+static std::string RtpExtensionsToString(
+    const std::vector<RtpHeaderExtension>& extensions) {
+  std::stringstream out;
+  out << '{';
+  for (size_t i = 0; i < extensions.size(); ++i) {
+    out << "{" << extensions[i].uri << ": " << extensions[i].id << "}";
+    if (i != extensions.size() - 1) {
+      out << ", ";
+    }
+  }
+  out << '}';
+  return out.str();
+}
+
 }  // namespace
 
 bool WebRtcVideoChannel2::SetRecvCodecs(const std::vector<VideoCodec>& codecs) {
@@ -967,6 +988,8 @@
     config.rtp.rtx.payload_type = codec_settings.rtx_payload_type;
   }
 
+  config.rtp.extensions = send_rtp_extensions_;
+
   if (IsNackEnabled(codec_settings.codec)) {
     config.rtp.nack.rtp_history_ms = kNackHistoryMs;
   }
@@ -1047,6 +1070,7 @@
     config.rtp.nack.rtp_history_ms = kNackHistoryMs;
   }
   config.rtp.remb = true;
+  config.rtp.extensions = recv_rtp_extensions_;
   // TODO(pbos): This protection is against setting the same local ssrc as
   // remote which is not permitted by the lower-level API. RTCP requires a
   // corresponding sender SSRC. Figure out what to do when we don't have
@@ -1280,15 +1304,31 @@
 
 bool WebRtcVideoChannel2::SetRecvRtpHeaderExtensions(
     const std::vector<RtpHeaderExtension>& extensions) {
-  // TODO(pbos): Implement.
-  LOG(LS_VERBOSE) << "SetRecvRtpHeaderExtensions()";
+  LOG(LS_INFO) << "SetRecvRtpHeaderExtensions: "
+               << RtpExtensionsToString(extensions);
+  std::vector<webrtc::RtpExtension> webrtc_extensions;
+  for (size_t i = 0; i < extensions.size(); ++i) {
+    // TODO(pbos): Make sure we don't pass unsupported extensions!
+    webrtc::RtpExtension webrtc_extension(extensions[i].uri.c_str(),
+                                          extensions[i].id);
+    webrtc_extensions.push_back(webrtc_extension);
+  }
+  recv_rtp_extensions_ = webrtc_extensions;
   return true;
 }
 
 bool WebRtcVideoChannel2::SetSendRtpHeaderExtensions(
     const std::vector<RtpHeaderExtension>& extensions) {
-  // TODO(pbos): Implement.
-  LOG(LS_VERBOSE) << "SetSendRtpHeaderExtensions()";
+  LOG(LS_INFO) << "SetSendRtpHeaderExtensions: "
+               << RtpExtensionsToString(extensions);
+  std::vector<webrtc::RtpExtension> webrtc_extensions;
+  for (size_t i = 0; i < extensions.size(); ++i) {
+    // TODO(pbos): Make sure we don't pass unsupported extensions!
+    webrtc::RtpExtension webrtc_extension(extensions[i].uri.c_str(),
+                                          extensions[i].id);
+    webrtc_extensions.push_back(webrtc_extension);
+  }
+  send_rtp_extensions_ = webrtc_extensions;
   return true;
 }
 
diff --git a/media/webrtc/webrtcvideoengine2.h b/media/webrtc/webrtcvideoengine2.h
index d1a784d..81466eb 100644
--- a/media/webrtc/webrtcvideoengine2.h
+++ b/media/webrtc/webrtcvideoengine2.h
@@ -236,6 +236,9 @@
       OVERRIDE;
   virtual void OnReadyToSend(bool ready) OVERRIDE;
   virtual bool MuteStream(uint32 ssrc, bool mute) OVERRIDE;
+
+  // Set send/receive RTP header extensions. This must be done before creating
+  // streams as it only has effect on future streams.
   virtual bool SetRecvRtpHeaderExtensions(
       const std::vector<RtpHeaderExtension>& extensions) OVERRIDE;
   virtual bool SetSendRtpHeaderExtensions(
@@ -351,8 +354,11 @@
   std::map<uint32, webrtc::VideoReceiveStream*> receive_streams_;
 
   Settable<VideoCodecSettings> send_codec_;
+  std::vector<webrtc::RtpExtension> send_rtp_extensions_;
+
   WebRtcVideoEncoderFactory2* const encoder_factory_;
   std::vector<VideoCodecSettings> recv_codecs_;
+  std::vector<webrtc::RtpExtension> recv_rtp_extensions_;
   VideoOptions options_;
 };
 
diff --git a/media/webrtc/webrtcvideoengine2_unittest.cc b/media/webrtc/webrtcvideoengine2_unittest.cc
index c9ff182..6886300 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -396,6 +396,31 @@
   FAIL() << "No RTX codec found among default codecs.";
 }
 
+TEST_F(WebRtcVideoEngine2Test, SupportsTimestampOffsetHeaderExtension) {
+  std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
+  ASSERT_FALSE(extensions.empty());
+  for (size_t i = 0; i < extensions.size(); ++i) {
+    if (extensions[i].uri == kRtpTimestampOffsetHeaderExtension) {
+      EXPECT_EQ(kRtpTimestampOffsetHeaderExtensionDefaultId, extensions[i].id);
+      return;
+    }
+  }
+  FAIL() << "Timestamp offset extension not in header-extension list.";
+}
+
+TEST_F(WebRtcVideoEngine2Test, SupportsAbsoluteSenderTimeHeaderExtension) {
+  std::vector<RtpHeaderExtension> extensions = engine_.rtp_header_extensions();
+  ASSERT_FALSE(extensions.empty());
+  for (size_t i = 0; i < extensions.size(); ++i) {
+    if (extensions[i].uri == kRtpAbsoluteSenderTimeHeaderExtension) {
+      EXPECT_EQ(kRtpAbsoluteSenderTimeHeaderExtensionDefaultId,
+                extensions[i].id);
+      return;
+    }
+  }
+  FAIL() << "Absolute Sender Time extension not in header-extension list.";
+}
+
 class WebRtcVideoChannel2BaseTest
     : public VideoMediaChannelTest<WebRtcVideoEngine2, WebRtcVideoChannel2> {
  protected:
@@ -598,6 +623,67 @@
     EXPECT_EQ(video_codec.height, webrtc_codec.height);
     EXPECT_EQ(video_codec.framerate, webrtc_codec.maxFramerate);
   }
+
+  void TestSetSendRtpHeaderExtensions(const std::string& cricket_ext,
+                                      const std::string& webrtc_ext) {
+    // Enable extension.
+    const int id = 1;
+    std::vector<cricket::RtpHeaderExtension> extensions;
+    extensions.push_back(cricket::RtpHeaderExtension(cricket_ext, id));
+    EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(extensions));
+
+    FakeVideoSendStream* send_stream =
+        AddSendStream(cricket::StreamParams::CreateLegacy(123));
+
+    // Verify the send extension id.
+    ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size());
+    EXPECT_EQ(id, send_stream->GetConfig().rtp.extensions[0].id);
+    EXPECT_EQ(webrtc_ext, send_stream->GetConfig().rtp.extensions[0].name);
+    // Verify call with same set of extensions returns true.
+    EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(extensions));
+    // Verify that SetSendRtpHeaderExtensions doesn't implicitly add them for
+    // receivers.
+    EXPECT_TRUE(AddRecvStream(cricket::StreamParams::CreateLegacy(123))
+                    ->GetConfig()
+                    .rtp.extensions.empty());
+
+    // Remove the extension id, verify that this doesn't reset extensions as
+    // they should be set before creating channels.
+    std::vector<cricket::RtpHeaderExtension> empty_extensions;
+    EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(empty_extensions));
+    EXPECT_FALSE(send_stream->GetConfig().rtp.extensions.empty());
+  }
+
+  void TestSetRecvRtpHeaderExtensions(const std::string& cricket_ext,
+                                      const std::string& webrtc_ext) {
+    // Enable extension.
+    const int id = 1;
+    std::vector<cricket::RtpHeaderExtension> extensions;
+    extensions.push_back(cricket::RtpHeaderExtension(cricket_ext, id));
+    EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions));
+
+    FakeVideoReceiveStream* recv_stream =
+        AddRecvStream(cricket::StreamParams::CreateLegacy(123));
+
+    // Verify the recv extension id.
+    ASSERT_EQ(1u, recv_stream->GetConfig().rtp.extensions.size());
+    EXPECT_EQ(id, recv_stream->GetConfig().rtp.extensions[0].id);
+    EXPECT_EQ(webrtc_ext, recv_stream->GetConfig().rtp.extensions[0].name);
+    // Verify call with same set of extensions returns true.
+    EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions));
+    // Verify that SetRecvRtpHeaderExtensions doesn't implicitly add them for
+    // senders.
+    EXPECT_TRUE(AddSendStream(cricket::StreamParams::CreateLegacy(123))
+                    ->GetConfig()
+                    .rtp.extensions.empty());
+
+    // Remove the extension id, verify that this doesn't reset extensions as
+    // they should be set before creating channels.
+    std::vector<cricket::RtpHeaderExtension> empty_extensions;
+    EXPECT_TRUE(channel_->SetSendRtpHeaderExtensions(empty_extensions));
+    EXPECT_FALSE(recv_stream->GetConfig().rtp.extensions.empty());
+  }
+
   talk_base::scoped_ptr<VideoMediaChannel> channel_;
   FakeWebRtcVideoChannel2* fake_channel_;
   uint32 last_ssrc_;
@@ -723,12 +809,34 @@
   ASSERT_TRUE(recv_stream->GetConfig().rtp.rtx.empty());
 }
 
-TEST_F(WebRtcVideoChannel2Test, DISABLED_RtpTimestampOffsetHeaderExtensions) {
-  FAIL() << "Not implemented.";  // TODO(pbos): Implement.
+TEST_F(WebRtcVideoChannel2Test, NoHeaderExtesionsByDefault) {
+  FakeVideoSendStream* send_stream =
+      AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcs1[0]));
+  ASSERT_TRUE(send_stream->GetConfig().rtp.extensions.empty());
+
+  FakeVideoReceiveStream* recv_stream =
+      AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrcs1[0]));
+  ASSERT_TRUE(recv_stream->GetConfig().rtp.extensions.empty());
 }
 
-TEST_F(WebRtcVideoChannel2Test, DISABLED_AbsoluteSendTimeHeaderExtensions) {
-  FAIL() << "Not implemented.";  // TODO(pbos): Implement.
+// Test support for RTP timestamp offset header extension.
+TEST_F(WebRtcVideoChannel2Test, SendRtpTimestampOffsetHeaderExtensions) {
+  TestSetSendRtpHeaderExtensions(kRtpTimestampOffsetHeaderExtension,
+                                 webrtc::RtpExtension::kTOffset);
+}
+TEST_F(WebRtcVideoChannel2Test, RecvRtpTimestampOffsetHeaderExtensions) {
+  TestSetRecvRtpHeaderExtensions(kRtpTimestampOffsetHeaderExtension,
+                                 webrtc::RtpExtension::kTOffset);
+}
+
+// Test support for absolute send time header extension.
+TEST_F(WebRtcVideoChannel2Test, SendAbsoluteSendTimeHeaderExtensions) {
+  TestSetSendRtpHeaderExtensions(kRtpAbsoluteSenderTimeHeaderExtension,
+                                 webrtc::RtpExtension::kAbsSendTime);
+}
+TEST_F(WebRtcVideoChannel2Test, RecvAbsoluteSendTimeHeaderExtensions) {
+  TestSetRecvRtpHeaderExtensions(kRtpAbsoluteSenderTimeHeaderExtension,
+                                 webrtc::RtpExtension::kAbsSendTime);
 }
 
 TEST_F(WebRtcVideoChannel2Test, DISABLED_LeakyBucketTest) {
diff --git a/media/webrtc/webrtcvoiceengine.cc b/media/webrtc/webrtcvoiceengine.cc
index 785cdf1..f1460a6 100644
--- a/media/webrtc/webrtcvoiceengine.cc
+++ b/media/webrtc/webrtcvoiceengine.cc
@@ -237,6 +237,7 @@
   options.experimental_aec.Set(false);
   options.experimental_ns.Set(false);
   options.aec_dump.Set(false);
+  options.opus_fec.Set(false);
   return options;
 }
 
@@ -399,12 +400,8 @@
 
 // True if params["stereo"] == "1"
 static bool IsOpusStereoEnabled(const AudioCodec& codec) {
-  CodecParameterMap::const_iterator param =
-      codec.params.find(kCodecParamStereo);
-  if (param == codec.params.end()) {
-    return false;
-  }
-  return param->second == kParamValueTrue;
+  int value;
+  return codec.GetParam(kCodecParamStereo, &value) && value == 1;
 }
 
 static bool IsValidOpusBitrate(int bitrate) {
@@ -426,14 +423,20 @@
   return bitrate;
 }
 
-// True if params["useinbandfec"] == "1"
+// Return true params[kCodecParamUseInbandFec] == kParamValueTrue, false
+// otherwise.
 static bool IsOpusFecEnabled(const AudioCodec& codec) {
-  CodecParameterMap::const_iterator param =
-      codec.params.find(kCodecParamUseInbandFec);
-  if (param == codec.params.end())
-    return false;
+  int value;
+  return codec.GetParam(kCodecParamUseInbandFec, &value) && value == 1;
+}
 
-  return param->second == kParamValueTrue;
+// Set params[kCodecParamUseInbandFec]. Caller should make sure codec is Opus.
+static void SetOpusFec(AudioCodec *codec, bool opus_fec) {
+  if (opus_fec) {
+    codec->params[kCodecParamUseInbandFec] = kParamValueTrue;
+  } else {
+    codec->params.erase(kCodecParamUseInbandFec);
+  }
 }
 
 void WebRtcVoiceEngine::ConstructCodecs() {
@@ -480,6 +483,7 @@
           }
           // TODO(hellner): Add ptime, sprop-stereo, stereo and useinbandfec
           // when they can be set to values other than the default.
+          SetOpusFec(&codec, false);
         }
         codecs_.push_back(codec);
       } else {
@@ -905,6 +909,16 @@
     }
   }
 
+  bool opus_fec = false;
+  if (options.opus_fec.Get(&opus_fec)) {
+    LOG(LS_INFO) << "Opus FEC is enabled? " << opus_fec;
+    for (std::vector<AudioCodec>::iterator it = codecs_.begin();
+        it != codecs_.end(); ++it) {
+      if (IsOpus(*it))
+        SetOpusFec(&(*it), opus_fec);
+    }
+  }
+
   return true;
 }
 
@@ -2022,7 +2036,7 @@
         voe_codec.rate = bitrate_from_params;
       }
 
-      // If FEC is enabled.
+      // For Opus, we also enable inband FEC if it is requested.
       if (IsOpusFecEnabled(*it)) {
         LOG(LS_INFO) << "Enabling Opus FEC on channel " << channel;
 #ifdef USE_WEBRTC_DEV_BRANCH
diff --git a/media/webrtc/webrtcvoiceengine_unittest.cc b/media/webrtc/webrtcvoiceengine_unittest.cc
index 80a50c5..58893b9 100644
--- a/media/webrtc/webrtcvoiceengine_unittest.cc
+++ b/media/webrtc/webrtcvoiceengine_unittest.cc
@@ -1146,7 +1146,7 @@
 
 #ifdef USE_WEBRTC_DEV_BRANCH
 // Test that without useinbandfec, Opus FEC is off.
-TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecNoOpusFEC) {
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecNoOpusFec) {
   EXPECT_TRUE(SetupEngine());
   int channel_num = voe_.GetLastChannel();
   std::vector<cricket::AudioCodec> codecs;
@@ -1157,7 +1157,7 @@
 }
 
 // Test that with useinbandfec=0, Opus FEC is off.
-TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusDisableFEC) {
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusDisableFec) {
   EXPECT_TRUE(SetupEngine());
   int channel_num = voe_.GetLastChannel();
   std::vector<cricket::AudioCodec> codecs;
@@ -1174,7 +1174,7 @@
 }
 
 // Test that with useinbandfec=1, Opus FEC is on.
-TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusEnableFEC) {
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusEnableFec) {
   EXPECT_TRUE(SetupEngine());
   int channel_num = voe_.GetLastChannel();
   std::vector<cricket::AudioCodec> codecs;
@@ -1191,7 +1191,7 @@
 }
 
 // Test that with useinbandfec=1, stereo=1, Opus FEC is on.
-TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusEnableFECStereo) {
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusEnableFecStereo) {
   EXPECT_TRUE(SetupEngine());
   int channel_num = voe_.GetLastChannel();
   std::vector<cricket::AudioCodec> codecs;
@@ -1209,7 +1209,7 @@
 }
 
 // Test that with non-Opus, codec FEC is off.
-TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecIsacNoFEC) {
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecIsacNoFec) {
   EXPECT_TRUE(SetupEngine());
   int channel_num = voe_.GetLastChannel();
   std::vector<cricket::AudioCodec> codecs;
@@ -1219,6 +1219,31 @@
 }
 #endif  // USE_WEBRTC_DEV_BRANCH
 
+// Test AudioOptions controls whether opus FEC is supported in codec list.
+TEST_F(WebRtcVoiceEngineTestFake, OpusFecViaOptions) {
+  EXPECT_TRUE(SetupEngine());
+  std::vector<cricket::AudioCodec> codecs = engine_.codecs();
+  int value;
+  for (std::vector<cricket::AudioCodec>::const_iterator it = codecs.begin();
+      it != codecs.end(); ++it) {
+    if (_stricmp(it->name.c_str(), cricket::kOpusCodecName) == 0) {
+      EXPECT_FALSE(it->GetParam(cricket::kCodecParamUseInbandFec, &value));
+    }
+  }
+
+  cricket::AudioOptions options;
+  options.opus_fec.Set(true);
+  EXPECT_TRUE(engine_.SetOptions(options));
+  codecs = engine_.codecs();
+  for (std::vector<cricket::AudioCodec>::const_iterator it = codecs.begin();
+      it != codecs.end(); ++it) {
+    if (_stricmp(it->name.c_str(), cricket::kOpusCodecName) == 0) {
+      EXPECT_TRUE(it->GetParam(cricket::kCodecParamUseInbandFec, &value));
+      EXPECT_EQ(1, value);
+    }
+  }
+}
+
 // Test that we can apply CELT with stereo mode but fail with mono mode.
 TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCelt) {
   EXPECT_TRUE(SetupEngine());
diff --git a/session/media/channelmanager.cc b/session/media/channelmanager.cc
index 3461a9c..88316b5 100644
--- a/session/media/channelmanager.cc
+++ b/session/media/channelmanager.cc
@@ -578,6 +578,29 @@
   return ret;
 }
 
+// Sets Engine-specific audio options according to enabled experiments.
+bool ChannelManager::SetEngineAudioOptions(const AudioOptions& options) {
+  // If we're initialized, pass the settings to the media engine.
+  bool ret = false;
+  if (initialized_) {
+    ret = worker_thread_->Invoke<bool>(
+        Bind(&ChannelManager::SetEngineAudioOptions_w, this, options));
+  }
+
+  // If all worked well, save the audio options.
+  if (ret) {
+    audio_options_ = options;
+  }
+  return ret;
+}
+
+bool ChannelManager::SetEngineAudioOptions_w(const AudioOptions& options) {
+  ASSERT(worker_thread_ == talk_base::Thread::Current());
+  ASSERT(initialized_);
+
+  return media_engine_->SetAudioOptions(options);
+}
+
 bool ChannelManager::GetOutputVolume(int* level) {
   if (!initialized_) {
     return false;
diff --git a/session/media/channelmanager.h b/session/media/channelmanager.h
index e811bb7..e8d6c0e 100644
--- a/session/media/channelmanager.h
+++ b/session/media/channelmanager.h
@@ -143,6 +143,8 @@
   bool SetAudioOptions(const std::string& wave_in_device,
                        const std::string& wave_out_device,
                        const AudioOptions& options);
+  // Sets Engine-specific audio options according to enabled experiments.
+  bool SetEngineAudioOptions(const AudioOptions& options);
   bool GetOutputVolume(int* level);
   bool SetOutputVolume(int level);
   bool IsSameCapturer(const std::string& capturer_name,
@@ -266,6 +268,7 @@
   void DestroySoundclip_w(Soundclip* soundclip);
   bool SetAudioOptions_w(const AudioOptions& options, int delay_offset,
                          const Device* in_dev, const Device* out_dev);
+  bool SetEngineAudioOptions_w(const AudioOptions& options);
   bool SetCaptureDevice_w(const Device* cam_device);
   void OnVideoCaptureStateChange(VideoCapturer* capturer,
                                  CaptureState result);
diff --git a/session/media/channelmanager_unittest.cc b/session/media/channelmanager_unittest.cc
index 055e1ad..cbf19f8 100644
--- a/session/media/channelmanager_unittest.cc
+++ b/session/media/channelmanager_unittest.cc
@@ -321,6 +321,25 @@
   EXPECT_FALSE(cm_->SetAudioOptions("audio-in9", "audio-out2", options));
 }
 
+TEST_F(ChannelManagerTest, SetEngineAudioOptions) {
+  EXPECT_TRUE(cm_->Init());
+  // Test setting specific values.
+  AudioOptions options;
+  options.experimental_ns.Set(true);
+  EXPECT_TRUE(cm_->SetEngineAudioOptions(options));
+  bool experimental_ns = false;
+  EXPECT_TRUE(fme_->audio_options().experimental_ns.Get(&experimental_ns));
+  EXPECT_TRUE(experimental_ns);
+}
+
+TEST_F(ChannelManagerTest, SetEngineAudioOptionsBeforeInitFails) {
+  // Test that values that we set before Init are not applied.
+  AudioOptions options;
+  options.experimental_ns.Set(true);
+  EXPECT_FALSE(cm_->SetEngineAudioOptions(options));
+  EXPECT_FALSE(fme_->audio_options().experimental_ns.IsSet());
+}
+
 TEST_F(ChannelManagerTest, SetCaptureDeviceBeforeInit) {
   // Test that values that we set before Init are applied.
   EXPECT_TRUE(cm_->SetCaptureDevice("video-in2"));