Reformat the WebRTC code base

Running clang-format with chromium's style guide.

The goal is n-fold:
 * providing consistency and readability (that's what code guidelines are for)
 * preventing noise with presubmit checks and git cl format
 * building on the previous point: making it easier to automatically fix format issues
 * you name it

Please consider using git-hyper-blame to ignore this commit.

Bug: webrtc:9340
Change-Id: I694567c4cdf8cee2860958cfe82bfaf25848bb87
Reviewed-on: https://webrtc-review.googlesource.com/81185
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23660}
diff --git a/video/call_stats.h b/video/call_stats.h
index 5ca44fa..00feb53 100644
--- a/video/call_stats.h
+++ b/video/call_stats.h
@@ -53,8 +53,7 @@
 
   // Helper struct keeping track of the time a rtt value is reported.
   struct RttTime {
-    RttTime(int64_t new_rtt, int64_t rtt_time)
-        : rtt(new_rtt), time(rtt_time) {}
+    RttTime(int64_t new_rtt, int64_t rtt_time) : rtt(new_rtt), time(rtt_time) {}
     const int64_t rtt;
     const int64_t time;
   };
diff --git a/video/overuse_frame_detector.cc b/video/overuse_frame_detector.cc
index 0c56be8..d6055c8 100644
--- a/video/overuse_frame_detector.cc
+++ b/video/overuse_frame_detector.cc
@@ -195,7 +195,8 @@
   float InitialUsageInPercent() const {
     // Start in between the underuse and overuse threshold.
     return (options_.low_encode_usage_threshold_percent +
-            options_.high_encode_usage_threshold_percent) / 2.0f;
+            options_.high_encode_usage_threshold_percent) /
+           2.0f;
   }
 
   float InitialProcessingMs() const {
@@ -442,8 +443,7 @@
 }
 
 std::unique_ptr<OveruseFrameDetector::ProcessingUsage>
-OveruseFrameDetector::CreateProcessingUsage(
-    const CpuOveruseOptions& options) {
+OveruseFrameDetector::CreateProcessingUsage(const CpuOveruseOptions& options) {
   std::unique_ptr<ProcessingUsage> instance;
   if (options.filter_time_ms > 0) {
     instance = rtc::MakeUnique<SendProcessingUsage2>(options);
@@ -461,8 +461,8 @@
       if (normal_period_ms > 0 && overuse_period_ms > 0 &&
           underuse_period_ms > 0) {
         instance = rtc::MakeUnique<OverdoseInjector>(
-            std::move(instance), normal_period_ms,
-            overuse_period_ms, underuse_period_ms);
+            std::move(instance), normal_period_ms, overuse_period_ms,
+            underuse_period_ms);
       } else {
         RTC_LOG(LS_WARNING)
             << "Invalid (non-positive) normal/overuse/underuse periods: "
@@ -572,7 +572,7 @@
   if (last_capture_time_us_ == -1)
     return false;
   return (now_us - last_capture_time_us_) >
-      options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec;
+         options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec;
 }
 
 void OveruseFrameDetector::ResetAll(int num_pixels) {
diff --git a/video/overuse_frame_detector.h b/video/overuse_frame_detector.h
index 800e53f..c3f3cfa 100644
--- a/video/overuse_frame_detector.h
+++ b/video/overuse_frame_detector.h
@@ -34,7 +34,7 @@
   // General settings.
   int frame_timeout_interval_ms;  // The maximum allowed interval between two
                                   // frames before resetting estimations.
-  int min_frame_samples;  // The minimum number of frames required.
+  int min_frame_samples;          // The minimum number of frames required.
   int min_process_count;  // The number of initial process times required before
                           // triggering an overuse/underuse.
   int high_threshold_consecutive_count;  // The number of consecutive checks
diff --git a/video/overuse_frame_detector_unittest.cc b/video/overuse_frame_detector_unittest.cc
index bce5642..3fc6c17 100644
--- a/video/overuse_frame_detector_unittest.cc
+++ b/video/overuse_frame_detector_unittest.cc
@@ -26,11 +26,11 @@
 using ::testing::_;
 
 namespace {
-  const int kWidth = 640;
-  const int kHeight = 480;
-  // Corresponds to load of 15%
-  const int kFrameIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
-  const int kProcessTimeUs = 5 * rtc::kNumMicrosecsPerMillisec;
+const int kWidth = 640;
+const int kHeight = 480;
+// Corresponds to load of 15%
+const int kFrameIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
+const int kProcessTimeUs = 5 * rtc::kNumMicrosecsPerMillisec;
 }  // namespace
 
 class MockCpuOveruseObserver : public AdaptationObserverInterface {
@@ -44,9 +44,7 @@
 
 class CpuOveruseObserverImpl : public AdaptationObserverInterface {
  public:
-  CpuOveruseObserverImpl() :
-    overuse_(0),
-    normaluse_(0) {}
+  CpuOveruseObserverImpl() : overuse_(0), normaluse_(0) {}
   virtual ~CpuOveruseObserverImpl() {}
 
   void AdaptDown(AdaptReason) { ++overuse_; }
@@ -85,7 +83,9 @@
 
   int InitialUsage() {
     return ((options_.low_encode_usage_threshold_percent +
-             options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
+             options_.high_encode_usage_threshold_percent) /
+            2.0f) +
+           0.5;
   }
 
   virtual void InsertAndSendFramesWithInterval(int num_frames,
@@ -143,14 +143,14 @@
     // the usage. From the tests where these are used, adding another sample
     // doesn't affect the expected outcome (this is mainly to check initial
     // values and whether the overuse detector has been reset or not).
-    InsertAndSendFramesWithInterval(2, rtc::kNumMicrosecsPerSec,
-                                    width, height, kFrameIntervalUs);
+    InsertAndSendFramesWithInterval(2, rtc::kNumMicrosecsPerSec, width, height,
+                                    kFrameIntervalUs);
   }
   void TriggerOveruse(int num_times) {
     const int kDelayUs = 32 * rtc::kNumMicrosecsPerMillisec;
     for (int i = 0; i < num_times; ++i) {
-      InsertAndSendFramesWithInterval(
-          1000, kFrameIntervalUs, kWidth, kHeight, kDelayUs);
+      InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                      kDelayUs);
       overuse_detector_->CheckForOveruse(observer_);
     }
   }
@@ -158,10 +158,10 @@
   void TriggerUnderuse() {
     const int kDelayUs1 = 5000;
     const int kDelayUs2 = 6000;
-    InsertAndSendFramesWithInterval(
-        1300, kFrameIntervalUs, kWidth, kHeight, kDelayUs1);
-    InsertAndSendFramesWithInterval(
-        1, kFrameIntervalUs, kWidth, kHeight, kDelayUs2);
+    InsertAndSendFramesWithInterval(1300, kFrameIntervalUs, kWidth, kHeight,
+                                    kDelayUs1);
+    InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight,
+                                    kDelayUs2);
     overuse_detector_->CheckForOveruse(observer_);
   }
 
@@ -191,7 +191,6 @@
   static const auto reason_ = AdaptationObserverInterface::AdaptReason::kCpu;
 };
 
-
 // UsagePercent() > high_encode_usage_threshold_percent => overuse.
 // UsagePercent() < low_encode_usage_threshold_percent => underuse.
 TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
@@ -226,8 +225,8 @@
   CpuOveruseObserverImpl overuse_observer;
   observer_ = nullptr;
   overuse_detector_->SetOptions(options_);
-  InsertAndSendFramesWithInterval(
-      1200, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1200, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   overuse_detector_->CheckForOveruse(&overuse_observer);
   EXPECT_EQ(0, overuse_observer.normaluse_);
   clock_.AdvanceTimeMicros(kProcessIntervalUs);
@@ -262,8 +261,8 @@
 
 TEST_F(OveruseFrameDetectorTest, ProcessingUsage) {
   overuse_detector_->SetOptions(options_);
-  InsertAndSendFramesWithInterval(
-      1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_EQ(kProcessTimeUs * 100 / kFrameIntervalUs, UsagePercent());
 }
 
@@ -271,8 +270,8 @@
   overuse_detector_->SetOptions(options_);
   ForceUpdate(kWidth, kHeight);
   EXPECT_EQ(InitialUsage(), UsagePercent());
-  InsertAndSendFramesWithInterval(
-      1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
   // Verify reset (with new width/height).
   ForceUpdate(kWidth, kHeight + 1);
@@ -283,17 +282,18 @@
   overuse_detector_->SetOptions(options_);
   ForceUpdate(kWidth, kHeight);
   EXPECT_EQ(InitialUsage(), UsagePercent());
-  InsertAndSendFramesWithInterval(
-      1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
   InsertAndSendFramesWithInterval(
-      2, options_.frame_timeout_interval_ms *
-      rtc::kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs);
+      2, options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec,
+      kWidth, kHeight, kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
   // Verify reset.
   InsertAndSendFramesWithInterval(
-      2, (options_.frame_timeout_interval_ms + 1) *
-      rtc::kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs);
+      2,
+      (options_.frame_timeout_interval_ms + 1) * rtc::kNumMicrosecsPerMillisec,
+      kWidth, kHeight, kProcessTimeUs);
   ForceUpdate(kWidth, kHeight);
   EXPECT_EQ(InitialUsage(), UsagePercent());
 }
@@ -301,8 +301,8 @@
 TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdating) {
   options_.min_frame_samples = 40;
   overuse_detector_->SetOptions(options_);
-  InsertAndSendFramesWithInterval(
-      40, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(40, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_EQ(InitialUsage(), UsagePercent());
   // Pass time far enough to digest all previous samples.
   clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec);
@@ -313,8 +313,8 @@
 
   // Pass time far enough to digest all previous samples, 41 in total.
   clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec);
-  InsertAndSendFramesWithInterval(
-      1, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
 }
 
@@ -326,8 +326,7 @@
 
 TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) {
   overuse_detector_->SetOptions(options_);
-  EXPECT_CALL(mock_observer_, AdaptDown(reason_))
-      .Times(testing::AtLeast(1));
+  EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const size_t kNumFramesEncodingDelay = 3;
   VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
@@ -350,8 +349,7 @@
 TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) {
   // >85% encoding time should trigger overuse.
   overuse_detector_->SetOptions(options_);
-  EXPECT_CALL(mock_observer_, AdaptDown(reason_))
-      .Times(testing::AtLeast(1));
+  EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec;
   VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
@@ -541,9 +539,9 @@
 
   overuse_detector_->OnTargetFramerateUpdated(kTargetFramerate);
 
-  InsertAndSendFramesWithRandomInterval(kNumFrames,
-                                        kMinIntervalUs, kMaxIntervalUs,
-                                        kWidth, kHeight, kEncodeTimeUs);
+  InsertAndSendFramesWithRandomInterval(kNumFrames, kMinIntervalUs,
+                                        kMaxIntervalUs, kWidth, kHeight,
+                                        kEncodeTimeUs);
   // Average usage 19%. Check that estimate is in the right ball park.
   // EXPECT_NEAR(UsagePercent(), 20, 10);
   EXPECT_NEAR(UsagePercent(), 20, 35);
@@ -569,9 +567,9 @@
 
   overuse_detector_->OnTargetFramerateUpdated(kTargetFramerate);
 
-  InsertAndSendFramesWithRandomInterval(kNumFrames,
-                                        kMinIntervalUs, kMaxIntervalUs,
-                                        kWidth, kHeight, kEncodeTimeUs);
+  InsertAndSendFramesWithRandomInterval(kNumFrames, kMinIntervalUs,
+                                        kMaxIntervalUs, kWidth, kHeight,
+                                        kEncodeTimeUs);
 
   // Average usage 6.6%, but since the frame_timeout_interval_ms is
   // only 1500 ms, we often reset the estimate to the initial value.
@@ -670,8 +668,8 @@
   CpuOveruseObserverImpl overuse_observer;
   observer_ = nullptr;
   overuse_detector_->SetOptions(options_);
-  InsertAndSendFramesWithInterval(
-      1200, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1200, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   overuse_detector_->CheckForOveruse(&overuse_observer);
   EXPECT_EQ(0, overuse_observer.normaluse_);
   clock_.AdvanceTimeMicros(kProcessIntervalUs);
@@ -704,8 +702,8 @@
 
 TEST_F(OveruseFrameDetectorTest2, ProcessingUsage) {
   overuse_detector_->SetOptions(options_);
-  InsertAndSendFramesWithInterval(
-      1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_EQ(kProcessTimeUs * 100 / kFrameIntervalUs, UsagePercent());
 }
 
@@ -713,8 +711,8 @@
   overuse_detector_->SetOptions(options_);
   ForceUpdate(kWidth, kHeight);
   EXPECT_EQ(InitialUsage(), UsagePercent());
-  InsertAndSendFramesWithInterval(
-      1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
   // Verify reset (with new width/height).
   ForceUpdate(kWidth, kHeight + 1);
@@ -725,17 +723,18 @@
   overuse_detector_->SetOptions(options_);
   ForceUpdate(kWidth, kHeight);
   EXPECT_EQ(InitialUsage(), UsagePercent());
-  InsertAndSendFramesWithInterval(
-      1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
   InsertAndSendFramesWithInterval(
-      2, options_.frame_timeout_interval_ms *
-      rtc::kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs);
+      2, options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec,
+      kWidth, kHeight, kProcessTimeUs);
   EXPECT_NE(InitialUsage(), UsagePercent());
   // Verify reset.
   InsertAndSendFramesWithInterval(
-      2, (options_.frame_timeout_interval_ms + 1) *
-      rtc::kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs);
+      2,
+      (options_.frame_timeout_interval_ms + 1) * rtc::kNumMicrosecsPerMillisec,
+      kWidth, kHeight, kProcessTimeUs);
   ForceUpdate(kWidth, kHeight);
   EXPECT_EQ(InitialUsage(), UsagePercent());
 }
@@ -749,16 +748,16 @@
 
   // Total time approximately 40 * 33ms = 1.3s, significantly less
   // than the 5s time constant.
-  InsertAndSendFramesWithInterval(
-      40, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(40, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
 
   // Should have started to approach correct load of 15%, but not very far.
   EXPECT_LT(UsagePercent(), InitialUsage());
   EXPECT_GT(UsagePercent(), (InitialUsage() * 3 + 15) / 4);
 
   // Run for roughly 10s more, should now be closer.
-  InsertAndSendFramesWithInterval(
-      300, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
+  InsertAndSendFramesWithInterval(300, kFrameIntervalUs, kWidth, kHeight,
+                                  kProcessTimeUs);
   EXPECT_NEAR(UsagePercent(), 20, 5);
 }
 
@@ -770,8 +769,7 @@
 
 TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) {
   overuse_detector_->SetOptions(options_);
-  EXPECT_CALL(mock_observer_, AdaptDown(reason_))
-      .Times(testing::AtLeast(1));
+  EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const size_t kNumFramesEncodingDelay = 3;
   VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
@@ -794,8 +792,7 @@
 TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) {
   // >85% encoding time should trigger overuse.
   overuse_detector_->SetOptions(options_);
-  EXPECT_CALL(mock_observer_, AdaptDown(reason_))
-      .Times(testing::AtLeast(1));
+  EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec;
   VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
@@ -854,8 +851,7 @@
 TEST_F(OveruseFrameDetectorTest2, NoOveruseForLargeRandomFrameInterval) {
   overuse_detector_->SetOptions(options_);
   EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0);
-  EXPECT_CALL(mock_observer_, AdaptUp(reason_))
-      .Times(testing::AtLeast(1));
+  EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(testing::AtLeast(1));
 
   const int kNumFrames = 500;
   const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec;
@@ -863,9 +859,9 @@
   const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec;
   const int kMaxIntervalUs = 1000 * rtc::kNumMicrosecsPerMillisec;
 
-  InsertAndSendFramesWithRandomInterval(kNumFrames,
-                                        kMinIntervalUs, kMaxIntervalUs,
-                                        kWidth, kHeight, kEncodeTimeUs);
+  InsertAndSendFramesWithRandomInterval(kNumFrames, kMinIntervalUs,
+                                        kMaxIntervalUs, kWidth, kHeight,
+                                        kEncodeTimeUs);
   // Average usage 19%. Check that estimate is in the right ball park.
   EXPECT_NEAR(UsagePercent(), 20, 10);
 }
@@ -875,8 +871,7 @@
 TEST_F(OveruseFrameDetectorTest2, NoOveruseForRandomFrameIntervalWithReset) {
   overuse_detector_->SetOptions(options_);
   EXPECT_CALL(mock_observer_, AdaptDown(_)).Times(0);
-  EXPECT_CALL(mock_observer_, AdaptUp(reason_))
-      .Times(testing::AtLeast(1));
+  EXPECT_CALL(mock_observer_, AdaptUp(reason_)).Times(testing::AtLeast(1));
 
   const int kNumFrames = 500;
   const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec;
@@ -884,9 +879,9 @@
   const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec;
   const int kMaxIntervalUs = 3000 * rtc::kNumMicrosecsPerMillisec;
 
-  InsertAndSendFramesWithRandomInterval(kNumFrames,
-                                        kMinIntervalUs, kMaxIntervalUs,
-                                        kWidth, kHeight, kEncodeTimeUs);
+  InsertAndSendFramesWithRandomInterval(kNumFrames, kMinIntervalUs,
+                                        kMaxIntervalUs, kWidth, kHeight,
+                                        kEncodeTimeUs);
 
   // Average usage 6.6%, but since the frame_timeout_interval_ms is
   // only 1500 ms, we often reset the estimate to the initial value.
diff --git a/video/payload_router_unittest.cc b/video/payload_router_unittest.cc
index 0eb898c..54bdc46 100644
--- a/video/payload_router_unittest.cc
+++ b/video/payload_router_unittest.cc
@@ -144,18 +144,15 @@
                                       encoded_image._length, nullptr, _, _))
       .Times(1)
       .WillOnce(Return(true));
-  EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _))
-      .Times(0);
+  EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
   EXPECT_EQ(EncodedImageCallback::Result::OK,
             payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
                 .error);
 
   // Inactive.
   payload_router.SetActive(false);
-  EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _))
-      .Times(0);
-  EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _))
-      .Times(0);
+  EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
+  EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
   EXPECT_NE(EncodedImageCallback::Result::OK,
             payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
                 .error);
diff --git a/video/picture_id_tests.cc b/video/picture_id_tests.cc
index c07cf8f..dd746f9 100644
--- a/video/picture_id_tests.cc
+++ b/video/picture_id_tests.cc
@@ -259,8 +259,9 @@
 
     // Use the same total bitrates when sending a single stream to avoid
     // lowering the bitrate estimate and requiring a subsequent rampup.
-    const int encoder_stream_bps = kEncoderBitrateBps / rtc::checked_cast<int>(
-        encoder_config.number_of_streams);
+    const int encoder_stream_bps =
+        kEncoderBitrateBps /
+        rtc::checked_cast<int>(encoder_config.number_of_streams);
 
     for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
       streams[i].min_bitrate_bps = encoder_stream_bps;
diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc
index 53903cf..dada445 100644
--- a/video/receive_statistics_proxy.cc
+++ b/video/receive_statistics_proxy.cc
@@ -801,8 +801,7 @@
 }
 
 void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate,
-                                                   uint32_t frameRate) {
-}
+                                                   uint32_t frameRate) {}
 
 void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
                                              size_t size_bytes,
diff --git a/video/replay.cc b/video/replay.cc
index 88a5c58..8362f59 100644
--- a/video/replay.cc
+++ b/video/replay.cc
@@ -107,8 +107,8 @@
 
 // Flag for SSRC.
 const std::string& DefaultSsrc() {
-  static const std::string ssrc = std::to_string(
-      test::CallTest::kVideoSendSsrcs[0]);
+  static const std::string ssrc =
+      std::to_string(test::CallTest::kVideoSendSsrcs[0]);
   return ssrc;
 }
 DEFINE_string(ssrc, DefaultSsrc().c_str(), "Incoming SSRC");
@@ -117,8 +117,8 @@
 }
 
 const std::string& DefaultSsrcRtx() {
-  static const std::string ssrc_rtx = std::to_string(
-      test::CallTest::kSendRtxSsrcs[0]);
+  static const std::string ssrc_rtx =
+      std::to_string(test::CallTest::kSendRtxSsrcs[0]);
   return ssrc_rtx;
 }
 DEFINE_string(ssrc_rtx, DefaultSsrcRtx().c_str(), "Incoming RTX SSRC");
@@ -128,7 +128,9 @@
 
 // Flag for abs-send-time id.
 DEFINE_int(abs_send_time_id, -1, "RTP extension ID for abs-send-time");
-static int AbsSendTimeId() { return static_cast<int>(FLAG_abs_send_time_id); }
+static int AbsSendTimeId() {
+  return static_cast<int>(FLAG_abs_send_time_id);
+}
 
 // Flag for transmission-offset id.
 DEFINE_int(transmission_offset_id,
@@ -157,7 +159,9 @@
 
 // Flag for video codec.
 DEFINE_string(codec, "VP8", "Video codec");
-static std::string Codec() { return static_cast<std::string>(FLAG_codec); }
+static std::string Codec() {
+  return static_cast<std::string>(FLAG_codec);
+}
 
 DEFINE_bool(help, false, "Print this message.");
 }  // namespace flags
@@ -254,8 +258,8 @@
         flags::DecoderBitstreamFilename().c_str()));
     receive_config.pre_decode_callback = bitstream_writer.get();
   }
-  decoder = test::CreateMatchingDecoder(flags::MediaPayloadType(),
-                                        flags::Codec());
+  decoder =
+      test::CreateMatchingDecoder(flags::MediaPayloadType(), flags::Codec());
   if (!flags::DecoderBitstreamFilename().empty()) {
     // Replace with a null decoder if we're writing the bitstream to a file
     // instead.
@@ -334,10 +338,9 @@
   fprintf(stderr, "num_packets: %d\n", num_packets);
 
   for (std::map<uint32_t, int>::const_iterator it = unknown_packets.begin();
-       it != unknown_packets.end();
-       ++it) {
-    fprintf(
-        stderr, "Packets for unknown ssrc '%u': %d\n", it->first, it->second);
+       it != unknown_packets.end(); ++it) {
+    fprintf(stderr, "Packets for unknown ssrc '%u': %d\n", it->first,
+            it->second);
   }
 
   call->DestroyVideoReceiveStream(receive_stream);
@@ -366,8 +369,8 @@
   RTC_CHECK(ValidateSsrc(webrtc::flags::FLAG_ssrc));
   RTC_CHECK(ValidateSsrc(webrtc::flags::FLAG_ssrc_rtx));
   RTC_CHECK(ValidateRtpHeaderExtensionId(webrtc::flags::FLAG_abs_send_time_id));
-  RTC_CHECK(ValidateRtpHeaderExtensionId(
-      webrtc::flags::FLAG_transmission_offset_id));
+  RTC_CHECK(
+      ValidateRtpHeaderExtensionId(webrtc::flags::FLAG_transmission_offset_id));
   RTC_CHECK(ValidateInputFilenameNotEmpty(webrtc::flags::FLAG_input_file));
 
   webrtc::test::RunTest(webrtc::RtpReplay);
diff --git a/video/report_block_stats.cc b/video/report_block_stats.cc
index 4726a46..42cd2ca 100644
--- a/video/report_block_stats.cc
+++ b/video/report_block_stats.cc
@@ -19,16 +19,13 @@
     return 0;
   }
   return ((num_lost_sequence_numbers * 255) + (num_sequence_numbers / 2)) /
-      num_sequence_numbers;
+         num_sequence_numbers;
 }
 }  // namespace
 
-
 // Helper class for rtcp statistics.
 ReportBlockStats::ReportBlockStats()
-    : num_sequence_numbers_(0),
-      num_lost_sequence_numbers_(0) {
-}
+    : num_sequence_numbers_(0), num_lost_sequence_numbers_(0) {}
 
 void ReportBlockStats::Store(const RtcpStatistics& rtcp_stats,
                              uint32_t remote_ssrc,
@@ -43,8 +40,8 @@
   block.source_ssrc = source_ssrc;
   uint32_t num_sequence_numbers = 0;
   uint32_t num_lost_sequence_numbers = 0;
-  StoreAndAddPacketIncrement(
-      block, &num_sequence_numbers, &num_lost_sequence_numbers);
+  StoreAndAddPacketIncrement(block, &num_sequence_numbers,
+                             &num_lost_sequence_numbers);
 }
 
 RTCPReportBlock ReportBlockStats::AggregateAndStore(
@@ -59,8 +56,7 @@
   for (; report_block != report_blocks.end(); ++report_block) {
     aggregate.packets_lost += report_block->packets_lost;
     aggregate.jitter += report_block->jitter;
-    StoreAndAddPacketIncrement(*report_block,
-                               &num_sequence_numbers,
+    StoreAndAddPacketIncrement(*report_block, &num_sequence_numbers,
                                &num_lost_sequence_numbers);
   }
 
@@ -105,9 +101,8 @@
   if (num_sequence_numbers_ == 0) {
     return -1;
   }
-  return FractionLost(
-      num_lost_sequence_numbers_, num_sequence_numbers_) * 100 / 255;
+  return FractionLost(num_lost_sequence_numbers_, num_sequence_numbers_) * 100 /
+         255;
 }
 
 }  // namespace webrtc
-
diff --git a/video/report_block_stats.h b/video/report_block_stats.h
index 033ba7d..b3c7cf2 100644
--- a/video/report_block_stats.h
+++ b/video/report_block_stats.h
@@ -59,4 +59,3 @@
 }  // namespace webrtc
 
 #endif  // VIDEO_REPORT_BLOCK_STATS_H_
-
diff --git a/video/report_block_stats_unittest.cc b/video/report_block_stats_unittest.cc
index 983dcfc..3880c4b 100644
--- a/video/report_block_stats_unittest.cc
+++ b/video/report_block_stats_unittest.cc
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#include "test/gtest.h"
 #include "video/report_block_stats.h"
+#include "test/gtest.h"
 
 namespace webrtc {
 
@@ -54,8 +54,7 @@
     ssrc12block2_.push_back(block2_2_);
   }
 
-  RtcpStatistics RtcpReportBlockToRtcpStatistics(
-      const RTCPReportBlock& stats) {
+  RtcpStatistics RtcpReportBlockToRtcpStatistics(const RTCPReportBlock& stats) {
     RtcpStatistics block;
     block.packets_lost = stats.packets_lost;
     block.fraction_lost = stats.fraction_lost;
@@ -143,4 +142,3 @@
 }
 
 }  // namespace webrtc
-
diff --git a/video/rtp_streams_synchronizer.cc b/video/rtp_streams_synchronizer.cc
index cc7f893..e46a220 100644
--- a/video/rtp_streams_synchronizer.cc
+++ b/video/rtp_streams_synchronizer.cc
@@ -25,10 +25,9 @@
   stream->latest_timestamp = info.latest_received_capture_timestamp;
   stream->latest_receive_time_ms = info.latest_receive_time_ms;
   bool new_rtcp_sr = false;
-  if (!stream->rtp_to_ntp.UpdateMeasurements(info.capture_time_ntp_secs,
-                                             info.capture_time_ntp_frac,
-                                             info.capture_time_source_clock,
-                                             &new_rtcp_sr)) {
+  if (!stream->rtp_to_ntp.UpdateMeasurements(
+          info.capture_time_ntp_secs, info.capture_time_ntp_frac,
+          info.capture_time_source_clock, &new_rtcp_sr)) {
     return false;
   }
   return true;
@@ -63,7 +62,7 @@
   RTC_DCHECK_RUN_ON(&process_thread_checker_);
   const int64_t kSyncIntervalMs = 1000;
   return kSyncIntervalMs -
-      (rtc::TimeNanos() - last_sync_time_) / rtc::kNumNanosecsPerMillisec;
+         (rtc::TimeNanos() - last_sync_time_) / rtc::kNumNanosecsPerMillisec;
 }
 
 void RtpStreamsSynchronizer::Process() {
@@ -100,18 +99,16 @@
   }
 
   TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay",
-      video_info->current_delay_ms);
+                 video_info->current_delay_ms);
   TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay",
-      audio_info->current_delay_ms);
+                 audio_info->current_delay_ms);
   TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
   int target_audio_delay_ms = 0;
   int target_video_delay_ms = video_info->current_delay_ms;
   // Calculate the necessary extra audio delay and desired total video
   // delay to get the streams in sync.
-  if (!sync_->ComputeDelays(relative_delay_ms,
-                            audio_info->current_delay_ms,
-                            &target_audio_delay_ms,
-                            &target_video_delay_ms)) {
+  if (!sync_->ComputeDelays(relative_delay_ms, audio_info->current_delay_ms,
+                            &target_audio_delay_ms, &target_video_delay_ms)) {
     return;
   }
 
diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc
index 271c56f..7f267ef 100644
--- a/video/rtp_video_stream_receiver.cc
+++ b/video/rtp_video_stream_receiver.cc
@@ -46,7 +46,7 @@
 //                 crbug.com/752886
 constexpr int kPacketBufferStartSize = 512;
 constexpr int kPacketBufferMaxSixe = 2048;
-}
+}  // namespace
 
 std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
     ReceiveStatistics* receive_statistics,
@@ -403,7 +403,9 @@
 }
 
 void RtpVideoStreamReceiver::ParseAndHandleEncapsulatingHeader(
-    const uint8_t* packet, size_t packet_length, const RTPHeader& header) {
+    const uint8_t* packet,
+    size_t packet_length,
+    const RTPHeader& header) {
   RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_task_checker_);
   if (header.payloadType == config_.rtp.red_payload_type) {
     if (packet[header.headerLength] == config_.rtp.ulpfec_payload_type) {
@@ -413,8 +415,8 @@
       NotifyReceiverOfFecPacket(header);
     }
     if (ulpfec_receiver_->AddReceivedRedPacket(
-            header, packet, packet_length,
-            config_.rtp.ulpfec_payload_type) != 0) {
+            header, packet, packet_length, config_.rtp.ulpfec_payload_type) !=
+        0) {
       return;
     }
     ulpfec_receiver_->ProcessReceivedFec();
@@ -543,8 +545,7 @@
       rtp_receive_statistics_->GetStatistician(header.ssrc);
   if (!statistician)
     return false;
-  return !in_order &&
-      statistician->IsRetransmitOfOldPacket(header);
+  return !in_order && statistician->IsRetransmitOfOldPacket(header);
 }
 
 void RtpVideoStreamReceiver::UpdateHistograms() {
diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc
index f2a3cde..2e0d258 100644
--- a/video/rtp_video_stream_receiver_unittest.cc
+++ b/video/rtp_video_stream_receiver_unittest.cc
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#include "test/gtest.h"
 #include "test/gmock.h"
+#include "test/gtest.h"
 
 #include "common_video/h264/h264_common.h"
 #include "media/base/mediaconstants.h"
@@ -129,8 +129,8 @@
     rtp_video_stream_receiver_ = rtc::MakeUnique<RtpVideoStreamReceiver>(
         &mock_transport_, nullptr, &packet_router_, &config_,
         rtp_receive_statistics_.get(), nullptr, process_thread_.get(),
-        &mock_nack_sender_,
-        &mock_key_frame_request_sender_, &mock_on_complete_frame_callback_);
+        &mock_nack_sender_, &mock_key_frame_request_sender_,
+        &mock_on_complete_frame_callback_);
   }
 
   WebRtcRTPHeader GetDefaultPacket() {
@@ -219,13 +219,14 @@
   VideoCodec codec;
   codec.plType = kRedPayloadType;
   rtp_video_stream_receiver_->AddReceiveCodec(codec, {});
-  const std::vector<uint8_t> data({0x80,                // RTP version.
-                                   kRedPayloadType,     // Payload type.
-                                   0, 0, 0, 0, 0, 0,    // Don't care.
-                                   0, 0, 0x4, 0x57,     // SSRC
-                                   kRedPayloadType,     // RED header.
-                                   0, 0, 0, 0, 0        // Don't care.
-                                 });
+  const std::vector<uint8_t> data({
+      0x80,              // RTP version.
+      kRedPayloadType,   // Payload type.
+      0, 0, 0, 0, 0, 0,  // Don't care.
+      0, 0, 0x4, 0x57,   // SSRC
+      kRedPayloadType,   // RED header.
+      0, 0, 0, 0, 0      // Don't care.
+  });
   RtpPacketReceived packet;
   EXPECT_TRUE(packet.Parse(data.data(), data.size()));
   rtp_video_stream_receiver_->StartReceive();
diff --git a/video/screenshare_loopback.cc b/video/screenshare_loopback.cc
index 1b2f394..81bc346 100644
--- a/video/screenshare_loopback.cc
+++ b/video/screenshare_loopback.cc
@@ -235,9 +235,10 @@
   return FLAG_min_transmit_bitrate;
 }
 
-DEFINE_bool(generate_slides,
-           false,
-           "Whether to use randomly generated slides or read them from files.");
+DEFINE_bool(
+    generate_slides,
+    false,
+    "Whether to use randomly generated slides or read them from files.");
 bool GenerateSlides() {
   return static_cast<int>(FLAG_generate_slides);
 }
@@ -306,8 +307,12 @@
   params.screenshare[0] = {true, flags::GenerateSlides(),
                            flags::SlideChangeInterval(),
                            flags::ScrollDuration(), flags::Slides()};
-  params.analyzer = {"screenshare", 0.0, 0.0, flags::DurationSecs(),
-      flags::OutputFilename(), flags::GraphTitle()};
+  params.analyzer = {"screenshare",
+                     0.0,
+                     0.0,
+                     flags::DurationSecs(),
+                     flags::OutputFilename(),
+                     flags::GraphTitle()};
   params.pipe = pipe_config;
   params.logging = {flags::FLAG_logs, flags::RtcEventLogName(),
                     flags::RtpDumpName(), flags::EncodedFramePath()};
diff --git a/video/send_delay_stats.h b/video/send_delay_stats.h
index 71bca2d..9b9e921 100644
--- a/video/send_delay_stats.h
+++ b/video/send_delay_stats.h
@@ -15,13 +15,13 @@
 #include <memory>
 #include <set>
 
+#include "call/video_send_stream.h"
 #include "common_types.h"  // NOLINT(build/include)
 #include "modules/include/module_common_types.h"
 #include "rtc_base/criticalsection.h"
 #include "rtc_base/thread_annotations.h"
 #include "system_wrappers/include/clock.h"
 #include "video/stats_counter.h"
-#include "call/video_send_stream.h"
 
 namespace webrtc {
 
diff --git a/video/send_delay_stats_unittest.cc b/video/send_delay_stats_unittest.cc
index 174e70d..65ae961 100644
--- a/video/send_delay_stats_unittest.cc
+++ b/video/send_delay_stats_unittest.cc
@@ -107,7 +107,8 @@
   const int64_t kDelayMs1 = 5;
   const int64_t kDelayMs2 = 15;
   const int kNumSamples = kMinRequiredPeriodicSamples * kProcessIntervalMs /
-                          (kDelayMs1 + kDelayMs2) + 1;
+                              (kDelayMs1 + kDelayMs2) +
+                          1;
 
   uint16_t id = 0;
   for (int i = 0; i < kNumSamples; ++i) {
diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc
index 85afbc9..7be89b7 100644
--- a/video/send_statistics_proxy.cc
+++ b/video/send_statistics_proxy.cc
@@ -118,7 +118,6 @@
 }
 }  // namespace
 
-
 const int SendStatisticsProxy::kStatsTimeoutMs = 5000;
 
 SendStatisticsProxy::SendStatisticsProxy(
diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h
index 7239e7f..a36e9a8 100644
--- a/video/send_statistics_proxy.h
+++ b/video/send_statistics_proxy.h
@@ -16,6 +16,7 @@
 #include <string>
 #include <vector>
 
+#include "call/video_send_stream.h"
 #include "common_types.h"  // NOLINT(build/include)
 #include "modules/video_coding/include/video_codec_interface.h"
 #include "modules/video_coding/include/video_coding_defines.h"
@@ -28,7 +29,6 @@
 #include "video/report_block_stats.h"
 #include "video/stats_counter.h"
 #include "video/video_stream_encoder.h"
-#include "call/video_send_stream.h"
 
 namespace webrtc {
 
diff --git a/video/stats_counter_unittest.cc b/video/stats_counter_unittest.cc
index 3f00d24..32f8c8e 100644
--- a/video/stats_counter_unittest.cc
+++ b/video/stats_counter_unittest.cc
@@ -32,8 +32,7 @@
 
 class StatsCounterTest : public ::testing::Test {
  protected:
-  StatsCounterTest()
-      : clock_(1234) {}
+  StatsCounterTest() : clock_(1234) {}
 
   void AddSampleAndAdvance(int sample, int interval_ms, AvgCounter* counter) {
     counter->Add(sample);
diff --git a/video/stream_synchronization.cc b/video/stream_synchronization.cc
index 6b800d1..e39f122 100644
--- a/video/stream_synchronization.cc
+++ b/video/stream_synchronization.cc
@@ -31,8 +31,7 @@
     : video_stream_id_(video_stream_id),
       audio_stream_id_(audio_stream_id),
       base_target_delay_ms_(0),
-      avg_diff_ms_(0) {
-}
+      avg_diff_ms_(0) {}
 
 bool StreamSynchronization::ComputeRelativeDelay(
     const Measurements& audio_measurement,
@@ -53,7 +52,8 @@
     return false;
   }
   // Positive diff means that video_measurement is behind audio_measurement.
-  *relative_delay_ms = video_measurement.latest_receive_time_ms -
+  *relative_delay_ms =
+      video_measurement.latest_receive_time_ms -
       audio_measurement.latest_receive_time_ms -
       (video_last_capture_time_ms - audio_last_capture_time_ms);
   if (*relative_delay_ms > kMaxDeltaDelayMs ||
@@ -75,11 +75,11 @@
                       << " for stream " << audio_stream_id_;
   // Calculate the difference between the lowest possible video delay and
   // the current audio delay.
-  int current_diff_ms = current_video_delay_ms - current_audio_delay_ms +
-      relative_delay_ms;
+  int current_diff_ms =
+      current_video_delay_ms - current_audio_delay_ms + relative_delay_ms;
 
-  avg_diff_ms_ = ((kFilterLength - 1) * avg_diff_ms_ +
-      current_diff_ms) / kFilterLength;
+  avg_diff_ms_ =
+      ((kFilterLength - 1) * avg_diff_ms_ + current_diff_ms) / kFilterLength;
   if (abs(avg_diff_ms_) < kMinDeltaMs) {
     // Don't adjust if the diff is within our margin.
     return false;
@@ -124,8 +124,8 @@
   }
 
   // Make sure that video is never below our target.
-  channel_delay_.extra_video_delay_ms = std::max(
-      channel_delay_.extra_video_delay_ms, base_target_delay_ms_);
+  channel_delay_.extra_video_delay_ms =
+      std::max(channel_delay_.extra_video_delay_ms, base_target_delay_ms_);
 
   int new_video_delay_ms;
   if (channel_delay_.extra_video_delay_ms > base_target_delay_ms_) {
@@ -137,8 +137,8 @@
   }
 
   // Make sure that we don't go below the extra video delay.
-  new_video_delay_ms = std::max(
-      new_video_delay_ms, channel_delay_.extra_video_delay_ms);
+  new_video_delay_ms =
+      std::max(new_video_delay_ms, channel_delay_.extra_video_delay_ms);
 
   // Verify we don't go above the maximum allowed video delay.
   new_video_delay_ms =
@@ -154,8 +154,8 @@
   }
 
   // Make sure that we don't go below the extra audio delay.
-  new_audio_delay_ms = std::max(
-      new_audio_delay_ms, channel_delay_.extra_audio_delay_ms);
+  new_audio_delay_ms =
+      std::max(new_audio_delay_ms, channel_delay_.extra_audio_delay_ms);
 
   // Verify we don't go above the maximum allowed audio delay.
   new_audio_delay_ms =
@@ -181,13 +181,11 @@
   // Initial extra delay for audio (accounting for existing extra delay).
   channel_delay_.extra_audio_delay_ms +=
       target_delay_ms - base_target_delay_ms_;
-  channel_delay_.last_audio_delay_ms +=
-      target_delay_ms - base_target_delay_ms_;
+  channel_delay_.last_audio_delay_ms += target_delay_ms - base_target_delay_ms_;
 
   // The video delay is compared to the last value (and how much we can update
   // is limited by that as well).
-  channel_delay_.last_video_delay_ms +=
-      target_delay_ms - base_target_delay_ms_;
+  channel_delay_.last_video_delay_ms += target_delay_ms - base_target_delay_ms_;
 
   channel_delay_.extra_video_delay_ms +=
       target_delay_ms - base_target_delay_ms_;
diff --git a/video/stream_synchronization_unittest.cc b/video/stream_synchronization_unittest.cc
index f9ae37d..d3e8e10 100644
--- a/video/stream_synchronization_unittest.cc
+++ b/video/stream_synchronization_unittest.cc
@@ -31,8 +31,7 @@
 class Time {
  public:
   explicit Time(int64_t offset)
-      : kNtpJan1970(2208988800UL),
-        time_now_ms_(offset) {}
+      : kNtpJan1970(2208988800UL), time_now_ms_(offset) {}
 
   NtpTime GetNowNtp() const {
     uint32_t ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
@@ -46,13 +45,9 @@
     return frequency * time_now_ms_ / 1000 + offset;
   }
 
-  void IncreaseTimeMs(int64_t inc) {
-    time_now_ms_ += inc;
-  }
+  void IncreaseTimeMs(int64_t inc) { time_now_ms_ += inc; }
 
-  int64_t time_now_ms() const {
-    return time_now_ms_;
-  }
+  int64_t time_now_ms() const { return time_now_ms_; }
 
  private:
   // January 1970, in NTP seconds.
@@ -87,11 +82,11 @@
                       int current_audio_delay_ms,
                       int* extra_audio_delay_ms,
                       int* total_video_delay_ms) {
-    int audio_frequency = static_cast<int>(kDefaultAudioFrequency *
-                                           audio_clock_drift_ + 0.5);
+    int audio_frequency =
+        static_cast<int>(kDefaultAudioFrequency * audio_clock_drift_ + 0.5);
     int audio_offset = 0;
-    int video_frequency = static_cast<int>(kDefaultVideoFrequency *
-                                           video_clock_drift_ + 0.5);
+    int video_frequency =
+        static_cast<int>(kDefaultVideoFrequency * video_clock_drift_ + 0.5);
     bool new_sr;
     int video_offset = 0;
     StreamSynchronization::Measurements audio;
@@ -147,10 +142,8 @@
     StreamSynchronization::ComputeRelativeDelay(audio, video,
                                                 &relative_delay_ms);
     EXPECT_EQ(video_delay_ms - audio_delay_ms, relative_delay_ms);
-    return sync_->ComputeDelays(relative_delay_ms,
-                                current_audio_delay_ms,
-                                extra_audio_delay_ms,
-                                total_video_delay_ms);
+    return sync_->ComputeDelays(relative_delay_ms, current_audio_delay_ms,
+                                extra_audio_delay_ms, total_video_delay_ms);
   }
 
   // Simulate audio playback 300 ms after capture and video rendering 100 ms
@@ -170,38 +163,32 @@
     const int kNeteqDelayIncrease = 50;
     const int kNeteqDelayDecrease = 10;
 
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
     EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
     current_audio_delay_ms = extra_audio_delay_ms;
 
     send_time_->IncreaseTimeMs(1000);
-    receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
-                                                  video_delay_ms));
+    receive_time_->IncreaseTimeMs(1000 -
+                                  std::max(audio_delay_ms, video_delay_ms));
     // Simulate base_target_delay minimum delay in the VCM.
     total_video_delay_ms = base_target_delay;
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay + 2 * filtered_move, total_video_delay_ms);
     EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
     current_audio_delay_ms = extra_audio_delay_ms;
 
     send_time_->IncreaseTimeMs(1000);
-    receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
-                                                  video_delay_ms));
+    receive_time_->IncreaseTimeMs(1000 -
+                                  std::max(audio_delay_ms, video_delay_ms));
     // Simulate base_target_delay minimum delay in the VCM.
     total_video_delay_ms = base_target_delay;
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay + 3 * filtered_move, total_video_delay_ms);
     EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
@@ -209,37 +196,33 @@
     // Simulate that NetEQ introduces some audio delay.
     current_audio_delay_ms = base_target_delay + kNeteqDelayIncrease;
     send_time_->IncreaseTimeMs(1000);
-    receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
-                                                  video_delay_ms));
+    receive_time_->IncreaseTimeMs(1000 -
+                                  std::max(audio_delay_ms, video_delay_ms));
     // Simulate base_target_delay minimum delay in the VCM.
     total_video_delay_ms = base_target_delay;
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     filtered_move = 3 * filtered_move +
-        (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) /
-        kSmoothingFilter;
+                    (kNeteqDelayIncrease + audio_delay_ms - video_delay_ms) /
+                        kSmoothingFilter;
     EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
     EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
 
     // Simulate that NetEQ reduces its delay.
     current_audio_delay_ms = base_target_delay + kNeteqDelayDecrease;
     send_time_->IncreaseTimeMs(1000);
-    receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
-                                                  video_delay_ms));
+    receive_time_->IncreaseTimeMs(1000 -
+                                  std::max(audio_delay_ms, video_delay_ms));
     // Simulate base_target_delay minimum delay in the VCM.
     total_video_delay_ms = base_target_delay;
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
 
     filtered_move = filtered_move +
-        (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) /
-        kSmoothingFilter;
+                    (kNeteqDelayDecrease + audio_delay_ms - video_delay_ms) /
+                        kSmoothingFilter;
 
     EXPECT_EQ(base_target_delay + filtered_move, total_video_delay_ms);
     EXPECT_EQ(base_target_delay, extra_audio_delay_ms);
@@ -252,10 +235,8 @@
     int extra_audio_delay_ms = 0;
     int total_video_delay_ms = base_target_delay;
 
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay, total_video_delay_ms);
     // The audio delay is not allowed to change more than this in 1 second.
@@ -265,77 +246,73 @@
 
     send_time_->IncreaseTimeMs(1000);
     receive_time_->IncreaseTimeMs(800);
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay, total_video_delay_ms);
     // The audio delay is not allowed to change more than the half of the
     // required change in delay.
-    EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
-        current_audio_delay_ms,
-        base_target_delay + video_delay_ms - audio_delay_ms),
-        extra_audio_delay_ms);
+    EXPECT_EQ(current_extra_delay_ms +
+                  MaxAudioDelayIncrease(
+                      current_audio_delay_ms,
+                      base_target_delay + video_delay_ms - audio_delay_ms),
+              extra_audio_delay_ms);
     current_audio_delay_ms = extra_audio_delay_ms;
     current_extra_delay_ms = extra_audio_delay_ms;
 
     send_time_->IncreaseTimeMs(1000);
     receive_time_->IncreaseTimeMs(800);
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay, total_video_delay_ms);
     // The audio delay is not allowed to change more than the half of the
     // required change in delay.
-    EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
-        current_audio_delay_ms,
-        base_target_delay + video_delay_ms - audio_delay_ms),
-        extra_audio_delay_ms);
+    EXPECT_EQ(current_extra_delay_ms +
+                  MaxAudioDelayIncrease(
+                      current_audio_delay_ms,
+                      base_target_delay + video_delay_ms - audio_delay_ms),
+              extra_audio_delay_ms);
     current_extra_delay_ms = extra_audio_delay_ms;
 
     // Simulate that NetEQ for some reason reduced the delay.
     current_audio_delay_ms = base_target_delay + 10;
     send_time_->IncreaseTimeMs(1000);
     receive_time_->IncreaseTimeMs(800);
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay, total_video_delay_ms);
     // Since we only can ask NetEQ for a certain amount of extra delay, and
     // we only measure the total NetEQ delay, we will ask for additional delay
     // here to try to stay in sync.
-    EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
-        current_audio_delay_ms,
-        base_target_delay + video_delay_ms - audio_delay_ms),
-        extra_audio_delay_ms);
+    EXPECT_EQ(current_extra_delay_ms +
+                  MaxAudioDelayIncrease(
+                      current_audio_delay_ms,
+                      base_target_delay + video_delay_ms - audio_delay_ms),
+              extra_audio_delay_ms);
     current_extra_delay_ms = extra_audio_delay_ms;
 
     // Simulate that NetEQ for some reason significantly increased the delay.
     current_audio_delay_ms = base_target_delay + 350;
     send_time_->IncreaseTimeMs(1000);
     receive_time_->IncreaseTimeMs(800);
-    EXPECT_TRUE(DelayedStreams(audio_delay_ms,
-                               video_delay_ms,
-                               current_audio_delay_ms,
-                               &extra_audio_delay_ms,
+    EXPECT_TRUE(DelayedStreams(audio_delay_ms, video_delay_ms,
+                               current_audio_delay_ms, &extra_audio_delay_ms,
                                &total_video_delay_ms));
     EXPECT_EQ(base_target_delay, total_video_delay_ms);
     // The audio delay is not allowed to change more than the half of the
     // required change in delay.
-    EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
-        current_audio_delay_ms,
-        base_target_delay + video_delay_ms - audio_delay_ms),
-        extra_audio_delay_ms);
+    EXPECT_EQ(current_extra_delay_ms +
+                  MaxAudioDelayIncrease(
+                      current_audio_delay_ms,
+                      base_target_delay + video_delay_ms - audio_delay_ms),
+              extra_audio_delay_ms);
   }
 
   int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
     return std::min((delay_ms - current_audio_delay_ms) / kSmoothingFilter,
-                     static_cast<int>(kMaxAudioDiffMs));
+                    static_cast<int>(kMaxAudioDiffMs));
   }
 
   int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
@@ -347,7 +324,7 @@
   enum { kReceiveTimeOffsetMs = 43210 };
 
   StreamSynchronization* sync_;
-  Time* send_time_;  // The simulated clock at the sender.
+  Time* send_time_;     // The simulated clock at the sender.
   Time* receive_time_;  // The simulated clock at the receiver.
   double audio_clock_drift_;
   double video_clock_drift_;
@@ -418,7 +395,7 @@
   // The audio delay is not allowed to change more than the half of the required
   // change in delay.
   EXPECT_EQ(current_extra_delay_ms +
-            MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+                MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
             extra_audio_delay_ms);
   current_audio_delay_ms = extra_audio_delay_ms;
   current_extra_delay_ms = extra_audio_delay_ms;
@@ -431,7 +408,7 @@
   // The audio delay is not allowed to change more than the half of the required
   // change in delay.
   EXPECT_EQ(current_extra_delay_ms +
-            MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+                MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
             extra_audio_delay_ms);
   current_extra_delay_ms = extra_audio_delay_ms;
 
@@ -446,7 +423,7 @@
   // we only measure the total NetEQ delay, we will ask for additional delay
   // here to try to
   EXPECT_EQ(current_extra_delay_ms +
-            MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+                MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
             extra_audio_delay_ms);
   current_extra_delay_ms = extra_audio_delay_ms;
 
@@ -460,7 +437,7 @@
   // The audio delay is not allowed to change more than the half of the required
   // change in delay.
   EXPECT_EQ(current_extra_delay_ms +
-            MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
+                MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
             extra_audio_delay_ms);
 }
 
@@ -500,8 +477,8 @@
   sync_->SetTargetBufferingDelay(base_target_delay_ms);
   // We are in sync don't change.
   EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
-                              current_audio_delay_ms,
-                              &extra_audio_delay_ms, &total_video_delay_ms));
+                              current_audio_delay_ms, &extra_audio_delay_ms,
+                              &total_video_delay_ms));
   // Triggering another call with the same values. Delay should not be modified.
   base_target_delay_ms = 2000;
   current_audio_delay_ms = base_target_delay_ms;
@@ -509,8 +486,8 @@
   sync_->SetTargetBufferingDelay(base_target_delay_ms);
   // We are in sync don't change.
   EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
-                              current_audio_delay_ms,
-                              &extra_audio_delay_ms, &total_video_delay_ms));
+                              current_audio_delay_ms, &extra_audio_delay_ms,
+                              &total_video_delay_ms));
   // Changing delay value - intended to test this module only. In practice it
   // would take VoE time to adapt.
   base_target_delay_ms = 5000;
@@ -519,8 +496,8 @@
   sync_->SetTargetBufferingDelay(base_target_delay_ms);
   // We are in sync don't change.
   EXPECT_FALSE(DelayedStreams(base_target_delay_ms, base_target_delay_ms,
-                              current_audio_delay_ms,
-                              &extra_audio_delay_ms, &total_video_delay_ms));
+                              current_audio_delay_ms, &extra_audio_delay_ms,
+                              &total_video_delay_ms));
 }
 
 TEST_F(StreamSynchronizationTest, BothDelayedAudioLaterWithBaseDelay) {
diff --git a/video/test/mock_video_stream_encoder.h b/video/test/mock_video_stream_encoder.h
index 37337f7..049b8c1 100644
--- a/video/test/mock_video_stream_encoder.h
+++ b/video/test/mock_video_stream_encoder.h
@@ -29,8 +29,7 @@
                void(VideoBitrateAllocationObserver*));
   MOCK_METHOD0(Stop, void());
 
-  MOCK_METHOD2(MockedConfigureEncoder,
-               void(const VideoEncoderConfig&, size_t));
+  MOCK_METHOD2(MockedConfigureEncoder, void(const VideoEncoderConfig&, size_t));
   // gtest generates implicit copy which is not allowed on VideoEncoderConfig,
   // so we can't mock ConfigureEncoder directly.
   void ConfigureEncoder(VideoEncoderConfig config,
diff --git a/video/transport_adapter.h b/video/transport_adapter.h
index 0168cc5..dd9964f 100644
--- a/video/transport_adapter.h
+++ b/video/transport_adapter.h
@@ -31,7 +31,7 @@
   void Disable();
 
  private:
-  Transport *transport_;
+  Transport* transport_;
   std::atomic<bool> enabled_;
 };
 }  // namespace internal
diff --git a/video/video_loopback.cc b/video/video_loopback.cc
index 0289c08..6405fc8 100644
--- a/video/video_loopback.cc
+++ b/video/video_loopback.cc
@@ -242,8 +242,10 @@
 
 DEFINE_bool(audio, false, "Add audio stream");
 
-DEFINE_bool(audio_video_sync, false, "Sync audio and video stream (no effect if"
-    " audio is false)");
+DEFINE_bool(audio_video_sync,
+            false,
+            "Sync audio and video stream (no effect if"
+            " audio is false)");
 
 DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
 
@@ -308,8 +310,12 @@
   params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
                     flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
   params.screenshare[0].enabled = false;
-  params.analyzer = {"video", 0.0, 0.0, flags::DurationSecs(),
-      flags::OutputFilename(), flags::GraphTitle()};
+  params.analyzer = {"video",
+                     0.0,
+                     0.0,
+                     flags::DurationSecs(),
+                     flags::OutputFilename(),
+                     flags::GraphTitle()};
   params.pipe = pipe_config;
 
   if (flags::NumStreams() > 1 && flags::Stream0().empty() &&
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index 3a174c4..3dbff9f 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -609,13 +609,11 @@
 
     rtc::CritScope crit(&comparison_lock_);
     if (comparisons_.size() < kMaxComparisons) {
-      comparisons_.push_back(FrameComparison(reference, render, dropped,
-                                             reference.ntp_time_ms(),
-                                             send_time_ms, recv_time_ms,
-                                             render_time_ms, encoded_size));
+      comparisons_.push_back(FrameComparison(
+          reference, render, dropped, reference.ntp_time_ms(), send_time_ms,
+          recv_time_ms, render_time_ms, encoded_size));
     } else {
-      comparisons_.push_back(FrameComparison(dropped,
-                                             reference.ntp_time_ms(),
+      comparisons_.push_back(FrameComparison(dropped, reference.ntp_time_ms(),
                                              send_time_ms, recv_time_ms,
                                              render_time_ms, encoded_size));
     }
@@ -765,7 +763,7 @@
 
     if (worst_frame_) {
       test::PrintResult("min_psnr", "", test_label_.c_str(), worst_frame_->psnr,
-                  "dB", false);
+                        "dB", false);
     }
 
     if (receive_stream_ != nullptr) {
@@ -773,14 +771,14 @@
     }
 
     test::PrintResult("dropped_frames", "", test_label_.c_str(),
-                  dropped_frames_, "frames", false);
+                      dropped_frames_, "frames", false);
     test::PrintResult("cpu_usage", "", test_label_.c_str(),
                       GetCpuUsagePercent(), "%", false);
 
 #if defined(WEBRTC_WIN)
-      // On Linux and Mac in Resident Set some unused pages may be counted.
-      // Therefore this metric will depend on order in which tests are run and
-      // will be flaky.
+    // On Linux and Mac in Resident Set some unused pages may be counted.
+    // Therefore this metric will depend on order in which tests are run and
+    // will be flaky.
     PrintResult("memory_usage", memory_usage_, " bytes");
 #endif
 
@@ -794,8 +792,8 @@
           rtc::Pathname(output_dir, test_label_ + ".jpg").pathname();
       RTC_LOG(LS_INFO) << "Saving worst frame to " << output_path;
       test::JpegFrameWriter frame_writer(output_path);
-      RTC_CHECK(frame_writer.WriteFrame(worst_frame_->frame,
-                                        100 /*best quality*/));
+      RTC_CHECK(
+          frame_writer.WriteFrame(worst_frame_->frame, 100 /*best quality*/));
     }
 
     //  Disable quality check for quick test, as quality checks may fail
@@ -904,8 +902,9 @@
             "ssim "
             "encode_time_ms\n");
     for (const Sample& sample : samples_) {
-      fprintf(out, "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %" PRIuS
-                   " %lf %lf\n",
+      fprintf(out,
+              "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %" PRIuS
+              " %lf %lf\n",
               sample.dropped, sample.input_time_ms, sample.send_time_ms,
               sample.recv_time_ms, sample.render_time_ms,
               sample.encoded_frame_size, sample.psnr, sample.ssim);
@@ -1568,8 +1567,8 @@
                 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
       } else {
         RTC_NOTREACHED() << "Automatic scaling not supported for codec "
-                         << params_.video[video_idx].codec
-                         << ", stream " << video_idx;
+                         << params_.video[video_idx].codec << ", stream "
+                         << video_idx;
       }
     }
     total_streams_used += num_video_substreams;
@@ -2110,9 +2109,10 @@
   }
   audio_send_config_.send_codec_spec = AudioSendStream::Config::SendCodecSpec(
       kAudioSendPayloadType,
-      {"OPUS", 48000, 2,
-       {{"usedtx", (params_.audio.dtx ? "1" : "0")},
-         {"stereo", "1"}}});
+      {"OPUS",
+       48000,
+       2,
+       {{"usedtx", (params_.audio.dtx ? "1" : "0")}, {"stereo", "1"}}});
   audio_send_config_.encoder_factory = audio_encoder_factory_;
   audio_send_stream_ = sender_call_->CreateAudioSendStream(audio_send_config_);
 
diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc
index f5c7e2a..3b3d84f 100644
--- a/video/video_receive_stream.cc
+++ b/video/video_receive_stream.cc
@@ -300,10 +300,9 @@
   // function itself, another in GetChannel() and a third in
   // GetPlayoutTimestamp.  Seems excessive.  Anyhow, I'm assuming the function
   // succeeds most of the time, which leads to grabbing a fourth lock.
-  if (rtp_stream_sync_.GetStreamSyncOffsetInMs(video_frame.timestamp(),
-                                               video_frame.render_time_ms(),
-                                               &sync_offset_ms,
-                                               &estimated_freq_khz)) {
+  if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
+          video_frame.timestamp(), video_frame.render_time_ms(),
+          &sync_offset_ms, &estimated_freq_khz)) {
     // TODO(tommi): OnSyncOffsetUpdated grabs a lock.
     stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms, estimated_freq_khz);
   }
@@ -389,9 +388,7 @@
   RtpRtcp* rtp_rtcp = rtp_video_stream_receiver_.rtp_rtcp();
   RTC_DCHECK(rtp_rtcp);
   if (rtp_rtcp->RemoteNTP(&info.capture_time_ntp_secs,
-                          &info.capture_time_ntp_frac,
-                          nullptr,
-                          nullptr,
+                          &info.capture_time_ntp_frac, nullptr, nullptr,
                           &info.capture_time_source_clock) != 0) {
     return absl::nullopt;
   }
diff --git a/video/video_receive_stream_unittest.cc b/video/video_receive_stream_unittest.cc
index db2ad3e..081bbb5 100644
--- a/video/video_receive_stream_unittest.cc
+++ b/video/video_receive_stream_unittest.cc
@@ -10,8 +10,8 @@
 
 #include <vector>
 
-#include "test/gtest.h"
 #include "test/gmock.h"
+#include "test/gtest.h"
 
 #include "api/video_codecs/video_decoder.h"
 #include "call/rtp_stream_receiver_controller.h"
@@ -90,8 +90,8 @@
     config_.decoders.push_back(null_decoder);
 
     video_receive_stream_.reset(new webrtc::internal::VideoReceiveStream(
-        &rtp_stream_receiver_controller_, kDefaultNumCpuCores,
-        &packet_router_, config_.Copy(), process_thread_.get(), &call_stats_));
+        &rtp_stream_receiver_controller_, kDefaultNumCpuCores, &packet_router_,
+        config_.Copy(), process_thread_.get(), &call_stats_));
   }
 
  protected:
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
index 3eb2d76..bb6bcb5 100644
--- a/video/video_send_stream.cc
+++ b/video/video_send_stream.cc
@@ -16,7 +16,6 @@
 
 namespace webrtc {
 
-
 namespace {
 
 size_t CalculateMaxHeaderSize(const VideoSendStream::Config::Rtp& config) {
@@ -54,7 +53,6 @@
 
 namespace internal {
 
-
 VideoSendStream::VideoSendStream(
     int num_cpu_cores,
     ProcessThread* module_process_thread,
@@ -80,8 +78,7 @@
   RTC_DCHECK(config_.encoder_settings.encoder_factory);
 
   video_stream_encoder_ = rtc::MakeUnique<VideoStreamEncoder>(
-      num_cpu_cores, &stats_proxy_,
-      config_.encoder_settings,
+      num_cpu_cores, &stats_proxy_, config_.encoder_settings,
       config_.pre_encode_callback,
       rtc::MakeUnique<OveruseFrameDetector>(&stats_proxy_));
   // TODO(srte): Initialization should not be done posted on a task queue.
diff --git a/video/video_send_stream.h b/video/video_send_stream.h
index a41add8..a282853 100644
--- a/video/video_send_stream.h
+++ b/video/video_send_stream.h
@@ -23,8 +23,8 @@
 #include "rtc_base/criticalsection.h"
 #include "rtc_base/event.h"
 #include "rtc_base/task_queue.h"
-#include "video/send_delay_stats.h"
 #include "video/payload_router.h"
+#include "video/send_delay_stats.h"
 #include "video/send_statistics_proxy.h"
 #include "video/video_stream_encoder.h"
 
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index a11d05e..2bb1cbd 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -67,7 +67,10 @@
 };
 }  // namespace test
 
-enum VideoFormat { kGeneric, kVP8, };
+enum VideoFormat {
+  kGeneric,
+  kVP8,
+};
 
 void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
                              const std::vector<VideoFrame>& frames2);
@@ -1169,8 +1172,8 @@
 
   // Don't auto increment if FEC is used; continue sending frame size until
   // a FEC packet has been received.
-  FrameFragmentationTest test(
-      kMaxPacketSize, start, stop, format == kGeneric, with_fec);
+  FrameFragmentationTest test(kMaxPacketSize, start, stop, format == kGeneric,
+                              with_fec);
 
   RunBaseTest(&test);
 }
@@ -1357,8 +1360,7 @@
         : SendTest(kDefaultTimeoutMs),
           clock_(Clock::GetRealTimeClock()),
           last_packet_time_ms_(-1),
-          capturer_(nullptr) {
-    }
+          capturer_(nullptr) {}
 
    private:
     Action OnSendRtp(const uint8_t* packet, size_t length) override {
@@ -1530,12 +1532,9 @@
         EXPECT_EQ(1u, stats.substreams.size());
         int total_bitrate_bps =
             stats.substreams.begin()->second.total_bitrate_bps;
-        test::PrintResult("bitrate_stats_",
-                          "min_transmit_bitrate_low_remb",
-                          "bitrate_bps",
-                          static_cast<size_t>(total_bitrate_bps),
-                          "bps",
-                          false);
+        test::PrintResult("bitrate_stats_", "min_transmit_bitrate_low_remb",
+                          "bitrate_bps", static_cast<size_t>(total_bitrate_bps),
+                          "bps", false);
         if (total_bitrate_bps > kHighBitrateBps) {
           rtp_rtcp_->SetRemb(kRembBitrateBps,
                              std::vector<uint32_t>(1, header.ssrc));
@@ -2115,19 +2114,13 @@
 
   EXPECT_TRUE(encoder.WaitForEncoderInit());
 
-  task_queue_.SendTask([this]() {
-    video_send_stream_->Start();
-  });
+  task_queue_.SendTask([this]() { video_send_stream_->Start(); });
   EXPECT_TRUE(encoder.WaitBitrateChanged(true));
 
-  task_queue_.SendTask([this]() {
-    video_send_stream_->Stop();
-  });
+  task_queue_.SendTask([this]() { video_send_stream_->Stop(); });
   EXPECT_TRUE(encoder.WaitBitrateChanged(false));
 
-  task_queue_.SendTask([this]() {
-    video_send_stream_->Start();
-  });
+  task_queue_.SendTask([this]() { video_send_stream_->Start(); });
   EXPECT_TRUE(encoder.WaitBitrateChanged(true));
 
   task_queue_.SendTask([this]() {
@@ -2707,9 +2700,10 @@
 TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) {
   class RtcpSenderReportTest : public test::SendTest {
    public:
-    RtcpSenderReportTest() : SendTest(kDefaultTimeoutMs),
-                             rtp_packets_sent_(0),
-                             media_bytes_sent_(0) {}
+    RtcpSenderReportTest()
+        : SendTest(kDefaultTimeoutMs),
+          rtp_packets_sent_(0),
+          media_bytes_sent_(0) {}
 
    private:
     Action OnSendRtp(const uint8_t* packet, size_t length) override {
@@ -2795,8 +2789,8 @@
       EXPECT_EQ(static_cast<unsigned int>(kScreencastMaxTargetBitrateDeltaKbps),
                 config->maxBitrate - config->targetBitrate);
       observation_complete_.Set();
-      return test::FakeEncoder::InitEncode(
-          config, number_of_cores, max_payload_size);
+      return test::FakeEncoder::InitEncode(config, number_of_cores,
+                                           max_payload_size);
     }
     void ModifyVideoConfigs(
         VideoSendStream::Config* send_config,
@@ -3008,8 +3002,7 @@
   static const struct {
     int width;
     int height;
-  } kEncodedResolution[kNumStreams] = {
-      {241, 181}, {300, 121}, {121, 221}};
+  } kEncodedResolution[kNumStreams] = {{241, 181}, {300, 121}, {121, 221}};
   class ScreencastTargetBitrateTest : public test::SendTest,
                                       public test::FakeEncoder {
    public:
@@ -3142,7 +3135,7 @@
     send_config->rtp.payload_type = kVp9PayloadType;
     ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
     encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
-      VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
+        VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
     EXPECT_EQ(1u, encoder_config->number_of_streams);
     encoder_config->video_stream_factory =
         new rtc::RefCountedObject<VideoStreamFactory>(
diff --git a/video/video_stream_decoder.cc b/video/video_stream_decoder.cc
index 86810dd..ce84c75 100644
--- a/video/video_stream_decoder.cc
+++ b/video/video_stream_decoder.cc
@@ -40,8 +40,7 @@
 
   static const int kMaxPacketAgeToNack = 450;
   static const int kMaxNackListSize = 250;
-  video_receiver_->SetNackSettings(kMaxNackListSize,
-                                   kMaxPacketAgeToNack, 0);
+  video_receiver_->SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack, 0);
   video_receiver_->RegisterReceiveCallback(this);
   video_receiver_->RegisterFrameTypeCallback(vcm_frame_type_callback);
   video_receiver_->RegisterReceiveStatisticsCallback(this);
@@ -84,7 +83,7 @@
 }
 
 int32_t VideoStreamDecoder::ReceivedDecodedReferenceFrame(
-  const uint64_t picture_id) {
+    const uint64_t picture_id) {
   RTC_NOTREACHED();
   return 0;
 }
diff --git a/video/video_stream_decoder_impl.cc b/video/video_stream_decoder_impl.cc
index 1bbf943..19e75a8 100644
--- a/video/video_stream_decoder_impl.cc
+++ b/video/video_stream_decoder_impl.cc
@@ -203,11 +203,10 @@
               Add<kFrameTimestampsMemory>(next_frame_timestamps_index_, 1);
         });
 
-    int32_t decode_result =
-        decoder->Decode(frame->EncodedImage(),
-                        false,    // missing_frame
-                        nullptr,  // codec specific info
-                        frame->RenderTimeMs());
+    int32_t decode_result = decoder->Decode(frame->EncodedImage(),
+                                            false,    // missing_frame
+                                            nullptr,  // codec specific info
+                                            frame->RenderTimeMs());
 
     return decode_result == WEBRTC_VIDEO_CODEC_OK ? kOk : kDecodeFailure;
   }
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index 6aee776..3bf45ca 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -440,15 +440,15 @@
   // when C++14 lambda is allowed.
   struct ConfigureEncoderTask {
     void operator()() {
-      encoder->ConfigureEncoderOnTaskQueue(
-          std::move(config), max_data_payload_length);
+      encoder->ConfigureEncoderOnTaskQueue(std::move(config),
+                                           max_data_payload_length);
     }
     VideoStreamEncoder* encoder;
     VideoEncoderConfig config;
     size_t max_data_payload_length;
   };
-  encoder_queue_.PostTask(ConfigureEncoderTask{
-      this, std::move(config), max_data_payload_length});
+  encoder_queue_.PostTask(
+      ConfigureEncoderTask{this, std::move(config), max_data_payload_length});
 }
 
 void VideoStreamEncoder::ConfigureEncoderOnTaskQueue(
@@ -470,8 +470,9 @@
   // The codec configuration depends on incoming video frame size.
   if (last_frame_info_) {
     ReconfigureEncoder();
-  } else if (settings_.encoder_factory->QueryVideoEncoder(
-      encoder_config_.video_format).has_internal_source) {
+  } else if (settings_.encoder_factory
+                 ->QueryVideoEncoder(encoder_config_.video_format)
+                 .has_internal_source) {
     last_frame_info_ = VideoFrameInfo(176, 144, false);
     ReconfigureEncoder();
   }
@@ -502,8 +503,8 @@
   crop_height_ = last_frame_info_->height - highest_stream_height;
 
   VideoCodec codec;
-  if (!VideoCodecInitializer::SetupCodec(
-          encoder_config_, streams, &codec, &rate_allocator_)) {
+  if (!VideoCodecInitializer::SetupCodec(encoder_config_, streams, &codec,
+                                         &rate_allocator_)) {
     RTC_LOG(LS_ERROR) << "Failed to create encoder configuration.";
   }
 
@@ -573,8 +574,7 @@
   video_sender_.UpdateChannelParameters(rate_allocator_.get(),
                                         bitrate_observer_);
 
-  stats_proxy_->OnEncoderReconfigured(
-      encoder_config_, streams);
+  stats_proxy_->OnEncoderReconfigured(encoder_config_, streams);
 
   pending_encoder_reconfiguration_ = false;
 
@@ -795,7 +795,6 @@
   }
   initial_rampup_ = kMaxInitialFramedrop;
 
-
   if (EncoderPaused()) {
     // Storing references to a native buffer risks blocking frame capture.
     if (video_frame.video_frame_buffer()->type() !=
diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h
index 0ea6dec..76ae07e 100644
--- a/video/video_stream_encoder.h
+++ b/video/video_stream_encoder.h
@@ -104,12 +104,8 @@
 
   class VideoFrameInfo {
    public:
-    VideoFrameInfo(int width,
-                   int height,
-                   bool is_texture)
-        : width(width),
-          height(height),
-          is_texture(is_texture) {}
+    VideoFrameInfo(int width, int height, bool is_texture)
+        : width(width), height(height), is_texture(is_texture) {}
     int width;
     int height;
     bool is_texture;
@@ -204,8 +200,7 @@
   vcm::VideoSender video_sender_ RTC_GUARDED_BY(&encoder_queue_);
   const std::unique_ptr<OveruseFrameDetector> overuse_detector_
       RTC_PT_GUARDED_BY(&encoder_queue_);
-  std::unique_ptr<QualityScaler> quality_scaler_
-      RTC_GUARDED_BY(&encoder_queue_)
+  std::unique_ptr<QualityScaler> quality_scaler_ RTC_GUARDED_BY(&encoder_queue_)
       RTC_PT_GUARDED_BY(&encoder_queue_);
 
   SendStatisticsProxy* const stats_proxy_;
@@ -218,8 +213,7 @@
   std::unique_ptr<VideoEncoder> encoder_ RTC_GUARDED_BY(&encoder_queue_)
       RTC_PT_GUARDED_BY(&encoder_queue_);
   std::unique_ptr<VideoBitrateAllocator> rate_allocator_
-      RTC_GUARDED_BY(&encoder_queue_)
-      RTC_PT_GUARDED_BY(&encoder_queue_);
+      RTC_GUARDED_BY(&encoder_queue_) RTC_PT_GUARDED_BY(&encoder_queue_);
   // The maximum frame rate of the current codec configuration, as determined
   // at the last ReconfigureEncoder() call.
   int max_framerate_ RTC_GUARDED_BY(&encoder_queue_);
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index d83322d..7324b80 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -91,16 +91,16 @@
 
 class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
  public:
-  VideoStreamEncoderUnderTest(SendStatisticsProxy* stats_proxy,
-                      const VideoSendStream::Config::EncoderSettings& settings)
-      : VideoStreamEncoder(
-            1 /* number_of_cores */,
-            stats_proxy,
-            settings,
-            nullptr /* pre_encode_callback */,
-            std::unique_ptr<OveruseFrameDetector>(
-                overuse_detector_proxy_ = new CpuOveruseDetectorProxy(
-                    stats_proxy))) {}
+  VideoStreamEncoderUnderTest(
+      SendStatisticsProxy* stats_proxy,
+      const VideoSendStream::Config::EncoderSettings& settings)
+      : VideoStreamEncoder(1 /* number_of_cores */,
+                           stats_proxy,
+                           settings,
+                           nullptr /* pre_encode_callback */,
+                           std::unique_ptr<OveruseFrameDetector>(
+                               overuse_detector_proxy_ =
+                                   new CpuOveruseDetectorProxy(stats_proxy))) {}
 
   void PostTaskAndWait(bool down, AdaptReason reason) {
     rtc::Event event(false, false);
@@ -115,9 +115,7 @@
   // encoder queue is not blocked before we start sending it frames.
   void WaitUntilTaskQueueIsIdle() {
     rtc::Event event(false, false);
-    encoder_queue()->PostTask([&event] {
-      event.Set();
-    });
+    encoder_queue()->PostTask([&event] { event.Set(); });
     ASSERT_TRUE(event.Wait(5000));
   }
 
@@ -159,7 +157,6 @@
   const int framerate_;
 };
 
-
 class AdaptingFrameForwarder : public test::FrameForwarder {
  public:
   AdaptingFrameForwarder() : adaptation_enabled_(false) {}
@@ -2560,8 +2557,8 @@
         sink_.WaitForEncodedFrame(timestamp_ms);
       }
       timestamp_ms += kFrameIntervalMs;
-      fake_clock_.AdvanceTimeMicros(
-          kFrameIntervalMs * rtc::kNumMicrosecsPerMillisec);
+      fake_clock_.AdvanceTimeMicros(kFrameIntervalMs *
+                                    rtc::kNumMicrosecsPerMillisec);
     }
     // ...and then try to adapt again.
     video_stream_encoder_->TriggerCpuOveruse();
@@ -2969,9 +2966,8 @@
         int width,
         int height,
         const VideoEncoderConfig& encoder_config) override {
-      std::vector<VideoStream> streams =
-          test::CreateVideoStreams(width - width % 4, height - height % 4,
-                                   encoder_config);
+      std::vector<VideoStream> streams = test::CreateVideoStreams(
+          width - width % 4, height - height % 4, encoder_config);
       for (VideoStream& stream : streams) {
         stream.num_temporal_layers = num_temporal_layers_;
         stream.max_framerate = framerate_;