Format all Java in WebRTC.

BUG=webrtc:6419
TBR=henrika@webrtc.org

Review-Url: https://codereview.webrtc.org/2377003002
Cr-Commit-Position: refs/heads/master@{#14432}
diff --git a/webrtc/androidjunit/src/org/webrtc/CameraEnumerationTest.java b/webrtc/androidjunit/src/org/webrtc/CameraEnumerationTest.java
index 5ec2ac4..c9396fc 100644
--- a/webrtc/androidjunit/src/org/webrtc/CameraEnumerationTest.java
+++ b/webrtc/androidjunit/src/org/webrtc/CameraEnumerationTest.java
@@ -31,24 +31,20 @@
   @Test
   public void testGetClosestSupportedFramerateRange() {
     assertEquals(new FramerateRange(10000, 30000),
-        getClosestSupportedFramerateRange(
-            Arrays.asList(new FramerateRange(10000, 30000),
-                          new FramerateRange(30000, 30000)),
-            30 /* requestedFps */));
+        getClosestSupportedFramerateRange(Arrays.asList(new FramerateRange(10000, 30000),
+                                              new FramerateRange(30000, 30000)),
+                     30 /* requestedFps */));
 
     assertEquals(new FramerateRange(10000, 20000),
         getClosestSupportedFramerateRange(
-            Arrays.asList(new FramerateRange(0, 30000),
-                          new FramerateRange(10000, 20000),
-                          new FramerateRange(14000, 16000),
-                          new FramerateRange(15000, 15000)),
-            15 /* requestedFps */));
+                     Arrays.asList(new FramerateRange(0, 30000), new FramerateRange(10000, 20000),
+                         new FramerateRange(14000, 16000), new FramerateRange(15000, 15000)),
+                     15 /* requestedFps */));
 
     assertEquals(new FramerateRange(10000, 20000),
         getClosestSupportedFramerateRange(
-            Arrays.asList(new FramerateRange(15000, 15000),
-                          new FramerateRange(10000, 20000),
-                          new FramerateRange(10000, 30000)),
-            10 /* requestedFps */));
+                     Arrays.asList(new FramerateRange(15000, 15000),
+                         new FramerateRange(10000, 20000), new FramerateRange(10000, 30000)),
+                     10 /* requestedFps */));
   }
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java b/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
index 47b4641..9b53ce4 100644
--- a/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
+++ b/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -33,8 +33,7 @@
     }
   }
 
-  private static native long nativeAddSink(
-      String dirPath, int maxFileSize, int severity);
+  private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
   private static native void nativeDeleteSink(long nativeSink);
   private static native byte[] nativeGetLogData(String dirPath);
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera1Capturer.java b/webrtc/api/android/java/src/org/webrtc/Camera1Capturer.java
index 05c3c29..fba5711 100644
--- a/webrtc/api/android/java/src/org/webrtc/Camera1Capturer.java
+++ b/webrtc/api/android/java/src/org/webrtc/Camera1Capturer.java
@@ -19,20 +19,20 @@
 public class Camera1Capturer extends CameraCapturer {
   private final boolean captureToTexture;
 
-  public Camera1Capturer(String cameraName, CameraEventsHandler eventsHandler,
-      boolean captureToTexture) {
+  public Camera1Capturer(
+      String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
     super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
 
     this.captureToTexture = captureToTexture;
   }
 
   @Override
-  protected void createCameraSession(
-      CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
-      Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
-      String cameraName, int width, int height, int framerate) {
-    Camera1Session.create(
-      createSessionCallback, events, captureToTexture, applicationContext, surfaceTextureHelper,
-      Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
+  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+      CameraSession.Events events, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+      int framerate) {
+    Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
+        surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
+        framerate);
   }
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java b/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java
index d96595e..518ca6c 100644
--- a/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java
+++ b/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java
@@ -70,8 +70,8 @@
   }
 
   @Override
-  public CameraVideoCapturer createCapturer(String deviceName,
-      CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
     return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
   }
 
@@ -135,7 +135,7 @@
 
     final long endTimeMs = SystemClock.elapsedRealtime();
     Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
-        + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+            + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
     return formatList;
   }
 
@@ -181,7 +181,6 @@
 
     String facing =
         (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
-    return "Camera " + index + ", Facing " + facing
-        + ", Orientation " + info.orientation;
+    return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
   }
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera1Session.java b/webrtc/api/android/java/src/org/webrtc/Camera1Session.java
index 9081999..11b8b78 100644
--- a/webrtc/api/android/java/src/org/webrtc/Camera1Session.java
+++ b/webrtc/api/android/java/src/org/webrtc/Camera1Session.java
@@ -34,7 +34,7 @@
   private static final Histogram camera1StopTimeMsHistogram =
       Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
 
-  private static enum SessionState { RUNNING, STOPPED };
+  private static enum SessionState { RUNNING, STOPPED }
 
   private final Handler cameraThreadHandler;
   private final Events events;
@@ -54,11 +54,10 @@
   private SessionState state;
   private boolean firstFrameReported = false;
 
-  public static void create(
-      final CreateSessionCallback callback, final Events events,
+  public static void create(final CreateSessionCallback callback, final Events events,
       final boolean captureToTexture, final Context applicationContext,
-      final SurfaceTextureHelper surfaceTextureHelper,
-      final int cameraId, final int width, final int height, final int framerate) {
+      final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
+      final int height, final int framerate) {
     final long constructionTimeNs = System.nanoTime();
     Logging.d(TAG, "Open camera " + cameraId);
     events.onCameraOpening();
@@ -83,8 +82,8 @@
     android.hardware.Camera.getCameraInfo(cameraId, info);
 
     final android.hardware.Camera.Parameters parameters = camera.getParameters();
-    final CaptureFormat captureFormat = findClosestCaptureFormat(
-        parameters, width, height, framerate);
+    final CaptureFormat captureFormat =
+        findClosestCaptureFormat(parameters, width, height, framerate);
     final Size pictureSize = findClosestPictureSize(parameters, width, height);
 
     updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
@@ -101,10 +100,9 @@
     // Calculate orientation manually and send it as CVO insted.
     camera.setDisplayOrientation(0 /* degrees */);
 
-    callback.onDone(new Camera1Session(
-        events, captureToTexture, applicationContext, surfaceTextureHelper,
-        cameraId, width, height, framerate,
-        camera, info, captureFormat, constructionTimeNs));
+    callback.onDone(
+        new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
+            cameraId, width, height, framerate, camera, info, captureFormat, constructionTimeNs));
   }
 
   private static void updateCameraParameters(android.hardware.Camera camera,
@@ -136,27 +134,22 @@
     Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
 
     final CaptureFormat.FramerateRange fpsRange =
-        CameraEnumerationAndroid.getClosestSupportedFramerateRange(
-            supportedFramerates, framerate);
+        CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
 
     final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
-        Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()),
-        width, height);
+        Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
 
     return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
   }
 
-  private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters,
-    int width, int height) {
+  private static Size findClosestPictureSize(
+      android.hardware.Camera.Parameters parameters, int width, int height) {
     return CameraEnumerationAndroid.getClosestSupportedSize(
-        Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()),
-        width, height);
+        Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
   }
 
-  private Camera1Session(
-      Events events, boolean captureToTexture,
-      Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
-      int cameraId, int width, int height, int framerate,
+  private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, int cameraId, int width, int height, int framerate,
       android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
       CaptureFormat captureFormat, long constructionTimeNs) {
     Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
@@ -186,8 +179,7 @@
       final long stopStartTime = System.nanoTime();
       state = SessionState.STOPPED;
       stopInternal();
-      final int stopTimeMs =
-          (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+      final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
       camera1StopTimeMsHistogram.addSample(stopTimeMs);
     }
   }
@@ -312,9 +304,8 @@
   private int getDeviceOrientation() {
     int orientation = 0;
 
-    WindowManager wm = (WindowManager) applicationContext.getSystemService(
-        Context.WINDOW_SERVICE);
-    switch(wm.getDefaultDisplay().getRotation()) {
+    WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
+    switch (wm.getDefaultDisplay().getRotation()) {
       case Surface.ROTATION_90:
         orientation = 90;
         break;
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java b/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java
index 78d1f04..7718f5a 100644
--- a/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java
+++ b/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java
@@ -27,14 +27,11 @@
   }
 
   @Override
-  protected void createCameraSession(
-      CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
-      Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
-      String cameraName, int width, int height, int framerate) {
-    Camera2Session.create(
-        createSessionCallback, events,
-        applicationContext, cameraManager,
-        surfaceTextureHelper,
-        cameraName, width, height, framerate);
+  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+      CameraSession.Events events, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+      int framerate) {
+    Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
+        surfaceTextureHelper, cameraName, width, height, framerate);
   }
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java b/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java
index 3ce85fe..8171377 100644
--- a/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java
+++ b/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java
@@ -63,22 +63,20 @@
 
   @Override
   public boolean isFrontFacing(String deviceName) {
-    CameraCharacteristics characteristics
-        = getCameraCharacteristics(deviceName);
+    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
 
     return characteristics != null
         && characteristics.get(CameraCharacteristics.LENS_FACING)
-            == CameraMetadata.LENS_FACING_FRONT;
+        == CameraMetadata.LENS_FACING_FRONT;
   }
 
   @Override
   public boolean isBackFacing(String deviceName) {
-    CameraCharacteristics characteristics
-        = getCameraCharacteristics(deviceName);
+    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
 
     return characteristics != null
         && characteristics.get(CameraCharacteristics.LENS_FACING)
-            == CameraMetadata.LENS_FACING_BACK;
+        == CameraMetadata.LENS_FACING_BACK;
   }
 
   @Override
@@ -87,8 +85,8 @@
   }
 
   @Override
-  public CameraVideoCapturer createCapturer(String deviceName,
-      CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
     return new Camera2Capturer(context, deviceName, eventsHandler);
   }
 
@@ -132,9 +130,9 @@
           return false;
         }
       }
-    // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
-    // catch statement with an Exception from a newer API, even if the code is never executed.
-    // https://code.google.com/p/android/issues/detail?id=209129
+      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+      // catch statement with an Exception from a newer API, even if the code is never executed.
+      // https://code.google.com/p/android/issues/detail?id=209129
     } catch (/* CameraAccessException */ AndroidException e) {
       Logging.e(TAG, "Camera access exception: " + e);
       return false;
@@ -149,10 +147,9 @@
     return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
   }
 
-  static List<Size> getSupportedSizes(
-      CameraCharacteristics cameraCharacteristics) {
+  static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
     final StreamConfigurationMap streamMap =
-          cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
     final int supportLevel =
         cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
 
@@ -161,8 +158,8 @@
 
     // Video may be stretched pre LMR1 on legacy implementations.
     // Filter out formats that have different aspect ratio than the sensor array.
-    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1 &&
-        supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
+        && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
       final Rect activeArraySize =
           cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
       final ArrayList<Size> filteredSizes = new ArrayList<Size>();
@@ -184,8 +181,7 @@
         (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
   }
 
-  static List<CaptureFormat> getSupportedFormats(
-      CameraManager cameraManager, String cameraId) {
+  static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
     synchronized (cachedSupportedFormats) {
       if (cachedSupportedFormats.containsKey(cameraId)) {
         return cachedSupportedFormats.get(cameraId);
@@ -220,8 +216,8 @@
       for (Size size : sizes) {
         long minFrameDurationNs = 0;
         try {
-          minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class,
-              new android.util.Size(size.width, size.height));
+          minFrameDurationNs = streamMap.getOutputMinFrameDuration(
+              SurfaceTexture.class, new android.util.Size(size.width, size.height));
         } catch (Exception e) {
           // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
         }
@@ -235,7 +231,7 @@
       cachedSupportedFormats.put(cameraId, formatList);
       final long endTimeMs = SystemClock.elapsedRealtime();
       Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
-          + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+              + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
       return formatList;
     }
   }
@@ -255,8 +251,7 @@
     final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
     for (Range<Integer> range : arrayRanges) {
       ranges.add(new CaptureFormat.FramerateRange(
-          range.getLower() * unitFactor,
-          range.getUpper() * unitFactor));
+          range.getLower() * unitFactor, range.getUpper() * unitFactor));
     }
     return ranges;
   }
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera2Session.java b/webrtc/api/android/java/src/org/webrtc/Camera2Session.java
index 590e271..6ff73c5 100644
--- a/webrtc/api/android/java/src/org/webrtc/Camera2Session.java
+++ b/webrtc/api/android/java/src/org/webrtc/Camera2Session.java
@@ -42,7 +42,7 @@
   private static final Histogram camera2StopTimeMsHistogram =
       Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
 
-  private static enum SessionState { RUNNING, STOPPED };
+  private static enum SessionState { RUNNING, STOPPED }
 
   private final Handler cameraThreadHandler;
   private final CreateSessionCallback callback;
@@ -159,11 +159,11 @@
         final CaptureRequest.Builder captureRequestBuilder =
             cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
         // Set auto exposure fps range.
-        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(
-            captureFormat.framerate.min / fpsUnitFactor,
-            captureFormat.framerate.max / fpsUnitFactor));
-        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
-            CaptureRequest.CONTROL_AE_MODE_ON);
+        captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
+            new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
+                                      captureFormat.framerate.max / fpsUnitFactor));
+        captureRequestBuilder.set(
+            CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
         captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
         chooseStabilizationMode(captureRequestBuilder);
         chooseFocusMode(captureRequestBuilder);
@@ -205,8 +205,8 @@
               }
 
               // Undo camera orientation - we report it as rotation instead.
-              transformMatrix = RendererCommon.rotateTextureMatrix(
-                  transformMatrix, -cameraOrientation);
+              transformMatrix =
+                  RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
 
               events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
                   captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
@@ -250,12 +250,12 @@
     }
 
     private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
-      final int[] availableFocusModes = cameraCharacteristics.get(
-          CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+      final int[] availableFocusModes =
+          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
       for (int mode : availableFocusModes) {
         if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
-          captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
-              CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
+          captureRequestBuilder.set(
+              CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
           Logging.d(TAG, "Using continuous video auto-focus.");
           return;
         }
@@ -272,23 +272,17 @@
     }
   }
 
-  public static void create(
-      CreateSessionCallback callback, Events events,
+  public static void create(CreateSessionCallback callback, Events events,
       Context applicationContext, CameraManager cameraManager,
-      SurfaceTextureHelper surfaceTextureHelper,
-      String cameraId, int width, int height, int framerate) {
-    new Camera2Session(
-        callback, events,
-        applicationContext, cameraManager,
-        surfaceTextureHelper,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
+      int framerate) {
+    new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
         cameraId, width, height, framerate);
   }
 
-  private Camera2Session(
-      CreateSessionCallback callback, Events events,
-      Context applicationContext, CameraManager cameraManager,
-      SurfaceTextureHelper surfaceTextureHelper,
-      String cameraId, int width, int height, int framerate) {
+  private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
+      CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
+      int width, int height, int framerate) {
     Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
 
     constructionTimeNs = System.nanoTime();
@@ -341,11 +335,9 @@
     }
 
     final CaptureFormat.FramerateRange bestFpsRange =
-        CameraEnumerationAndroid.getClosestSupportedFramerateRange(
-            framerateRanges, framerate);
+        CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
 
-    final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(
-        sizes, width, height);
+    final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
 
     captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
     Logging.d(TAG, "Using capture format: " + captureFormat);
@@ -372,8 +364,7 @@
       final long stopStartTime = System.nanoTime();
       state = SessionState.STOPPED;
       stopInternal();
-      final int stopTimeMs =
-          (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
+      final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
       camera2StopTimeMsHistogram.addSample(stopTimeMs);
     }
   }
@@ -417,9 +408,8 @@
   private int getDeviceOrientation() {
     int orientation = 0;
 
-    WindowManager wm = (WindowManager) applicationContext.getSystemService(
-        Context.WINDOW_SERVICE);
-    switch(wm.getDefaultDisplay().getRotation()) {
+    WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
+    switch (wm.getDefaultDisplay().getRotation()) {
       case Surface.ROTATION_90:
         orientation = 90;
         break;
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraCapturer.java b/webrtc/api/android/java/src/org/webrtc/CameraCapturer.java
index 293022f..c94538b 100644
--- a/webrtc/api/android/java/src/org/webrtc/CameraCapturer.java
+++ b/webrtc/api/android/java/src/org/webrtc/CameraCapturer.java
@@ -19,8 +19,8 @@
 @SuppressWarnings("deprecation")
 public abstract class CameraCapturer implements CameraVideoCapturer {
   enum SwitchState {
-    IDLE,        // No switch requested.
-    PENDING,     // Waiting for previous capture session to open.
+    IDLE, // No switch requested.
+    PENDING, // Waiting for previous capture session to open.
     IN_PROGRESS, // Waiting for new switched capture session to start.
   }
 
@@ -50,8 +50,7 @@
 
             if (switchState == SwitchState.IN_PROGRESS) {
               if (switchEventsHandler != null) {
-                switchEventsHandler.onCameraSwitchDone(
-                    cameraEnumerator.isFrontFacing(cameraName));
+                switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
                 switchEventsHandler = null;
               }
               switchState = SwitchState.IDLE;
@@ -133,8 +132,7 @@
 
     @Override
     public void onByteBufferFrameCaptured(
-        CameraSession session, byte[] data, int width, int height, int rotation,
-        long timestamp) {
+        CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
       checkIsOnCameraThread();
       synchronized (stateLock) {
         if (session != currentSession) {
@@ -151,9 +149,8 @@
     }
 
     @Override
-    public void onTextureFrameCaptured(
-        CameraSession session, int width, int height, int oesTextureId, float[] transformMatrix,
-        int rotation, long timestamp) {
+    public void onTextureFrameCaptured(CameraSession session, int width, int height,
+        int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
       checkIsOnCameraThread();
       synchronized (stateLock) {
         if (session != currentSession) {
@@ -187,18 +184,18 @@
   private SurfaceTextureHelper surfaceHelper;
 
   private final Object stateLock = new Object();
-  private boolean sessionOpening;                  /* guarded by stateLock */
-  private CameraSession currentSession;            /* guarded by stateLock */
-  private String cameraName;                       /* guarded by stateLock */
-  private int width;                               /* guarded by stateLock */
-  private int height;                              /* guarded by stateLock */
-  private int framerate;                           /* guarded by stateLock */
-  private int openAttemptsRemaining;               /* guarded by stateLock */
+  private boolean sessionOpening; /* guarded by stateLock */
+  private CameraSession currentSession; /* guarded by stateLock */
+  private String cameraName; /* guarded by stateLock */
+  private int width; /* guarded by stateLock */
+  private int height; /* guarded by stateLock */
+  private int framerate; /* guarded by stateLock */
+  private int openAttemptsRemaining; /* guarded by stateLock */
   private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
   private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
   // Valid from onDone call until stopCapture, otherwise null.
-  private CameraStatistics cameraStatistics;       /* guarded by stateLock */
-  private boolean firstFrameObserved;              /* guarded by stateLock */
+  private CameraStatistics cameraStatistics; /* guarded by stateLock */
+  private boolean firstFrameObserved; /* guarded by stateLock */
 
   public CameraCapturer(
       String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
@@ -268,9 +265,8 @@
     cameraThreadHandler.postDelayed(new Runnable() {
       @Override
       public void run() {
-        createCameraSession(
-            createSessionCallback, cameraSessionEventsHandler, applicationContext, surfaceHelper,
-            cameraName, width, height, framerate);
+        createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
+            surfaceHelper, cameraName, width, height, framerate);
       }
     }, delayMs);
   }
@@ -285,7 +281,6 @@
         ThreadUtils.waitUninterruptibly(stateLock);
       }
 
-
       if (currentSession != null) {
         Logging.d(TAG, "Stop capture: Nulling session");
         cameraStatistics.release();
@@ -428,6 +423,6 @@
 
   abstract protected void createCameraSession(
       CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
-      Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
-      String cameraName, int width, int height, int framerate);
+      Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
+      int width, int height, int framerate);
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java b/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java
index b3c5062..0295cf7 100644
--- a/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java
+++ b/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java
@@ -123,7 +123,6 @@
     return new Camera1Enumerator().getDeviceNames();
   }
 
-
   /**
    * @deprecated
    * Please use Camera1Enumerator.getDeviceNames().length instead.
@@ -177,8 +176,8 @@
   // lower bound, to allow the framerate to fluctuate based on lightning conditions.
   public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
       List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
-    return Collections.min(supportedFramerates,
-        new ClosestComparator<CaptureFormat.FramerateRange>() {
+    return Collections.min(
+        supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
           // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
           // from requested.
           private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
@@ -192,32 +191,29 @@
 
           // Use one weight for small |value| less than |threshold|, and another weight above.
           private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
-            return (value < threshold)
-                ? value * lowWeight
-                : threshold * lowWeight + (value - threshold) * highWeight;
+            return (value < threshold) ? value * lowWeight
+                                       : threshold * lowWeight + (value - threshold) * highWeight;
           }
 
           @Override
           int diff(CaptureFormat.FramerateRange range) {
-            final int minFpsError = progressivePenalty(range.min,
-                MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+            final int minFpsError = progressivePenalty(
+                range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
             final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
                 MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
             return minFpsError + maxFpsError;
           }
-     });
+        });
   }
 
   public static Size getClosestSupportedSize(
-      List<Size> supportedSizes, final int requestedWidth,
-      final int requestedHeight) {
-    return Collections.min(supportedSizes,
-        new ClosestComparator<Size>() {
-          @Override
-          int diff(Size size) {
-            return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
-          }
-     });
+      List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+    return Collections.min(supportedSizes, new ClosestComparator<Size>() {
+      @Override
+      int diff(Size size) {
+        return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+      }
+    });
   }
 
   private static String getNameOfDevice(int facing) {
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java b/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java
index ac7556d..dc954b6 100644
--- a/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java
+++ b/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java
@@ -20,6 +20,6 @@
   public boolean isBackFacing(String deviceName);
   public List<CaptureFormat> getSupportedFormats(String deviceName);
 
-  public CameraVideoCapturer createCapturer(String deviceName,
-      CameraVideoCapturer.CameraEventsHandler eventsHandler);
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraSession.java b/webrtc/api/android/java/src/org/webrtc/CameraSession.java
index bad623f..6e64927 100644
--- a/webrtc/api/android/java/src/org/webrtc/CameraSession.java
+++ b/webrtc/api/android/java/src/org/webrtc/CameraSession.java
@@ -22,10 +22,10 @@
     void onCameraOpening();
     void onCameraError(CameraSession session, String error);
     void onCameraClosed(CameraSession session);
-    void onByteBufferFrameCaptured(CameraSession session, byte[] data, int width, int height,
-      int rotation, long timestamp);
+    void onByteBufferFrameCaptured(
+        CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
     void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
-      float[] transformMatrix, int rotation, long timestamp);
+        float[] transformMatrix, int rotation, long timestamp);
   }
 
   /**
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java b/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java
index a539356..1a66fcc 100644
--- a/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java
+++ b/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java
@@ -75,7 +75,7 @@
       @Override
       public void run() {
         final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
-        Logging.d(TAG, "Camera fps: " + cameraFps +".");
+        Logging.d(TAG, "Camera fps: " + cameraFps + ".");
         if (frameCount == 0) {
           ++freezePeriodCount;
           if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
diff --git a/webrtc/api/android/java/src/org/webrtc/DataChannel.java b/webrtc/api/android/java/src/org/webrtc/DataChannel.java
index 909d26f..1820822 100644
--- a/webrtc/api/android/java/src/org/webrtc/DataChannel.java
+++ b/webrtc/api/android/java/src/org/webrtc/DataChannel.java
@@ -29,9 +29,8 @@
     public Init() {}
 
     // Called only by native code.
-    private Init(
-        boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
-        String protocol, boolean negotiated, int id) {
+    private Init(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol,
+        boolean negotiated, int id) {
       this.ordered = ordered;
       this.maxRetransmitTimeMs = maxRetransmitTimeMs;
       this.maxRetransmits = maxRetransmits;
@@ -73,7 +72,7 @@
   }
 
   /** Keep in sync with DataChannelInterface::DataState. */
-  public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
+  public enum State { CONNECTING, OPEN, CLOSING, CLOSED }
 
   private final long nativeDataChannel;
   private long nativeObserver;
diff --git a/webrtc/api/android/java/src/org/webrtc/EglBase.java b/webrtc/api/android/java/src/org/webrtc/EglBase.java
index 05dd806..2c0d1e9 100644
--- a/webrtc/api/android/java/src/org/webrtc/EglBase.java
+++ b/webrtc/api/android/java/src/org/webrtc/EglBase.java
@@ -15,15 +15,13 @@
 
 import javax.microedition.khronos.egl.EGL10;
 
-
 /**
  * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
  * and an EGLSurface.
  */
 public abstract class EglBase {
   // EGL wrapper for an actual EGLContext.
-  public static class Context {
-  }
+  public static class Context {}
 
   // According to the documentation, EGL can be used from multiple threads at the same time if each
   // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
@@ -39,6 +37,7 @@
   // Android-specific extension.
   private static final int EGL_RECORDABLE_ANDROID = 0x3142;
 
+  // clang-format off
   public static final int[] CONFIG_PLAIN = {
     EGL10.EGL_RED_SIZE, 8,
     EGL10.EGL_GREEN_SIZE, 8,
@@ -79,14 +78,15 @@
     EGL_RECORDABLE_ANDROID, 1,
     EGL10.EGL_NONE
   };
+  // clang-format on
 
   // Create a new context with the specified config attributes, sharing data with sharedContext.
   // |sharedContext| can be null.
   public static EglBase create(Context sharedContext, int[] configAttributes) {
     return (EglBase14.isEGL14Supported()
-        && (sharedContext == null || sharedContext instanceof EglBase14.Context))
-            ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
-            : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
+               && (sharedContext == null || sharedContext instanceof EglBase14.Context))
+        ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
+        : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
   }
 
   public static EglBase create() {
diff --git a/webrtc/api/android/java/src/org/webrtc/EglBase10.java b/webrtc/api/android/java/src/org/webrtc/EglBase10.java
index 5ca429d..bd99bd4 100644
--- a/webrtc/api/android/java/src/org/webrtc/EglBase10.java
+++ b/webrtc/api/android/java/src/org/webrtc/EglBase10.java
@@ -159,9 +159,8 @@
     int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
     eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
     if (eglSurface == EGL10.EGL_NO_SURFACE) {
-      throw new RuntimeException(
-          "Failed to create pixel buffer surface with size " + width + "x" + height
-          + ": 0x" + Integer.toHexString(egl.eglGetError()));
+      throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+          + height + ": 0x" + Integer.toHexString(egl.eglGetError()));
     }
   }
 
@@ -235,7 +234,7 @@
   public void detachCurrent() {
     synchronized (EglBase.lock) {
       if (!egl.eglMakeCurrent(
-          eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+              eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
         throw new RuntimeException(
             "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
       }
@@ -272,8 +271,7 @@
   private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
     EGLConfig[] configs = new EGLConfig[1];
     int[] numConfigs = new int[1];
-    if (!egl.eglChooseConfig(
-        eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+    if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
       throw new RuntimeException(
           "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
     }
diff --git a/webrtc/api/android/java/src/org/webrtc/EglBase14.java b/webrtc/api/android/java/src/org/webrtc/EglBase14.java
index b1e1772..c87852d 100644
--- a/webrtc/api/android/java/src/org/webrtc/EglBase14.java
+++ b/webrtc/api/android/java/src/org/webrtc/EglBase14.java
@@ -37,8 +37,8 @@
   // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
   // time stamp on a surface is supported from 18 so we require 18.
   public static boolean isEGL14Supported() {
-    Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
-        + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
+    Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION + ". isEGL14Supported: "
+            + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
     return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
   }
 
@@ -101,9 +101,8 @@
     int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
     eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
     if (eglSurface == EGL14.EGL_NO_SURFACE) {
-      throw new RuntimeException(
-          "Failed to create pixel buffer surface with size " + width + "x" + height
-          + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
+      throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+          + height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
     }
   }
 
@@ -178,7 +177,7 @@
   public void detachCurrent() {
     synchronized (EglBase.lock) {
       if (!EGL14.eglMakeCurrent(
-          eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+              eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
         throw new RuntimeException(
             "eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
       }
@@ -202,7 +201,8 @@
       throw new RuntimeException("No EGLSurface - can't swap buffers");
     }
     synchronized (EglBase.lock) {
-      // See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+      // See
+      // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
       EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
       EGL14.eglSwapBuffers(eglDisplay, eglSurface);
     }
@@ -228,7 +228,7 @@
     EGLConfig[] configs = new EGLConfig[1];
     int[] numConfigs = new int[1];
     if (!EGL14.eglChooseConfig(
-        eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+            eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
       throw new RuntimeException(
           "eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
     }
diff --git a/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java b/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java
index 0595e2b..c81e6e8 100644
--- a/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java
+++ b/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java
@@ -25,6 +25,7 @@
  * manually to free the resources held by this object.
  */
 public class GlRectDrawer implements RendererCommon.GlDrawer {
+  // clang-format off
   // Simple vertex shader, used for both YUV and OES.
   private static final String VERTEX_SHADER_STRING =
         "varying vec2 interp_tc;\n"
@@ -76,25 +77,24 @@
       + "void main() {\n"
       + "  gl_FragColor = texture2D(oes_tex, interp_tc);\n"
       + "}\n";
+  // clang-format on
 
   // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
   // top-right.
-  private static final FloatBuffer FULL_RECTANGLE_BUF =
-      GlUtil.createFloatBuffer(new float[] {
-            -1.0f, -1.0f,  // Bottom left.
-             1.0f, -1.0f,  // Bottom right.
-            -1.0f,  1.0f,  // Top left.
-             1.0f,  1.0f,  // Top right.
-          });
+  private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
+      -1.0f, -1.0f, // Bottom left.
+      1.0f, -1.0f, // Bottom right.
+      -1.0f, 1.0f, // Top left.
+      1.0f, 1.0f, // Top right.
+  });
 
   // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
-  private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
-      GlUtil.createFloatBuffer(new float[] {
-            0.0f, 0.0f,  // Bottom left.
-            1.0f, 0.0f,  // Bottom right.
-            0.0f, 1.0f,  // Top left.
-            1.0f, 1.0f   // Top right.
-          });
+  private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
+      0.0f, 0.0f, // Bottom left.
+      1.0f, 0.0f, // Bottom right.
+      0.0f, 1.0f, // Top left.
+      1.0f, 1.0f // Top right.
+  });
 
   private static class Shader {
     public final GlShader glShader;
diff --git a/webrtc/api/android/java/src/org/webrtc/GlShader.java b/webrtc/api/android/java/src/org/webrtc/GlShader.java
index be10635..15dfc45 100644
--- a/webrtc/api/android/java/src/org/webrtc/GlShader.java
+++ b/webrtc/api/android/java/src/org/webrtc/GlShader.java
@@ -25,13 +25,11 @@
     }
     GLES20.glShaderSource(shader, source);
     GLES20.glCompileShader(shader);
-    int[] compileStatus = new int[] {
-        GLES20.GL_FALSE
-    };
+    int[] compileStatus = new int[] {GLES20.GL_FALSE};
     GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
     if (compileStatus[0] != GLES20.GL_TRUE) {
-      Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
-          GLES20.glGetShaderInfoLog(shader));
+      Logging.e(
+          TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
       throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
     }
     GlUtil.checkNoGLES2Error("compileShader");
@@ -50,13 +48,10 @@
     GLES20.glAttachShader(program, vertexShader);
     GLES20.glAttachShader(program, fragmentShader);
     GLES20.glLinkProgram(program);
-    int[] linkStatus = new int[] {
-      GLES20.GL_FALSE
-    };
+    int[] linkStatus = new int[] {GLES20.GL_FALSE};
     GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
     if (linkStatus[0] != GLES20.GL_TRUE) {
-      Logging.e(TAG, "Could not link program: " +
-          GLES20.glGetProgramInfoLog(program));
+      Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
       throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
     }
     // According to the documentation of glLinkProgram():
diff --git a/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java b/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java
index a456010..f1b13be 100644
--- a/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java
+++ b/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java
@@ -52,8 +52,8 @@
     GlUtil.checkNoGLES2Error("Generate framebuffer");
 
     // Attach the texture to the framebuffer as color attachment.
-    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
-        GLES20.GL_TEXTURE_2D, textureId, 0);
+    GLES20.glFramebufferTexture2D(
+        GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
     GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
 
     // Restore normal framebuffer.
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java
index 053780b..44572cb 100644
--- a/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -42,11 +42,7 @@
   private static final long MAX_DECODE_TIME_MS = 200;
 
   // Tracks webrtc::VideoCodecType.
-  public enum VideoCodecType {
-    VIDEO_CODEC_VP8,
-    VIDEO_CODEC_VP9,
-    VIDEO_CODEC_H264
-  }
+  public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
 
   // Timeout for input buffer dequeue.
   private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
@@ -70,14 +66,13 @@
   private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
   private static final String H264_MIME_TYPE = "video/avc";
   // List of supported HW VP8 decoders.
-  private static final String[] supportedVp8HwCodecPrefixes =
-    {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+  private static final String[] supportedVp8HwCodecPrefixes = {
+      "OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
   // List of supported HW VP9 decoders.
-  private static final String[] supportedVp9HwCodecPrefixes =
-    {"OMX.qcom.", "OMX.Exynos." };
+  private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
   // List of supported HW H.264 decoders.
-  private static final String[] supportedH264HwCodecPrefixes =
-    {"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." };
+  private static final String[] supportedH264HwCodecPrefixes = {
+      "OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
 
   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
   // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
@@ -87,13 +82,11 @@
   private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
   // Allowable color formats supported by codec - in order of preference.
   private static final List<Integer> supportedColorList = Arrays.asList(
-    CodecCapabilities.COLOR_FormatYUV420Planar,
-    CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
-    CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
-    COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
-    COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
-    COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
-    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
+      CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+      CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+      COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
+      COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
+      COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
 
   private int colorFormat;
   private int width;
@@ -108,8 +101,8 @@
   private TextureListener textureListener;
   private int droppedFrames;
   private Surface surface = null;
-  private final Queue<DecodedOutputBuffer>
-      dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+  private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
+      new LinkedList<DecodedOutputBuffer>();
 
   // MediaCodec error handler - invoked when critical error happens which may prevent
   // further use of media codec API. Now it means that one of media codec instances
@@ -142,18 +135,18 @@
 
   // Functions to query if HW decoding is supported.
   public static boolean isVp8HwSupported() {
-    return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
-        (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
+    return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
+        && (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
   }
 
   public static boolean isVp9HwSupported() {
-    return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
-        (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
+    return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
+        && (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
   }
 
   public static boolean isH264HwSupported() {
-    return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
-        (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
+    return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
+        && (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
   }
 
   public static void printStackTrace() {
@@ -175,11 +168,10 @@
       this.colorFormat = colorFormat;
     }
     public final String codecName; // OpenMax component name for VP8 codec.
-    public final int colorFormat;  // Color format supported by codec.
+    public final int colorFormat; // Color format supported by codec.
   }
 
-  private static DecoderProperties findDecoder(
-      String mime, String[] supportedCodecPrefixes) {
+  private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
       return null; // MediaCodec.setParameters is missing.
     }
@@ -189,7 +181,7 @@
       try {
         info = MediaCodecList.getCodecInfoAt(i);
       } catch (IllegalArgumentException e) {
-        Logging.e(TAG,  "Cannot retrieve decoder codec info", e);
+        Logging.e(TAG, "Cannot retrieve decoder codec info", e);
       }
       if (info == null || info.isEncoder()) {
         continue;
@@ -202,7 +194,7 @@
         }
       }
       if (name == null) {
-        continue;  // No HW support in this codec; try the next one.
+        continue; // No HW support in this codec; try the next one.
       }
       Logging.d(TAG, "Found candidate decoder " + name);
 
@@ -223,7 +215,7 @@
       try {
         capabilities = info.getCapabilitiesForType(mime);
       } catch (IllegalArgumentException e) {
-        Logging.e(TAG,  "Cannot retrieve decoder capabilities", e);
+        Logging.e(TAG, "Cannot retrieve decoder capabilities", e);
         continue;
       }
       for (int colorFormat : capabilities.colorFormats) {
@@ -233,29 +225,27 @@
         for (int codecColorFormat : capabilities.colorFormats) {
           if (codecColorFormat == supportedColorFormat) {
             // Found supported HW decoder.
-            Logging.d(TAG, "Found target decoder " + name +
-                ". Color: 0x" + Integer.toHexString(codecColorFormat));
+            Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
+                    + Integer.toHexString(codecColorFormat));
             return new DecoderProperties(name, codecColorFormat);
           }
         }
       }
     }
     Logging.d(TAG, "No HW decoder found for mime " + mime);
-    return null;  // No HW decoder.
+    return null; // No HW decoder.
   }
 
   private void checkOnMediaCodecThread() throws IllegalStateException {
     if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
-      throw new IllegalStateException(
-          "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
-          " but is now called on " + Thread.currentThread());
+      throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
+          + mediaCodecThread + " but is now called on " + Thread.currentThread());
     }
   }
 
   // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
   private boolean initDecode(
-      VideoCodecType type, int width, int height,
-      SurfaceTextureHelper surfaceTextureHelper) {
+      VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
     if (mediaCodecThread != null) {
       throw new RuntimeException("initDecode: Forgot to release()?");
     }
@@ -280,9 +270,8 @@
       throw new RuntimeException("Cannot find HW decoder for " + type);
     }
 
-    Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
-        ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
-        ". Use Surface: " + useSurface);
+    Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
+            + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
 
     runningInstance = this; // Decoder is now running and can be queried for stack traces.
     mediaCodecThread = Thread.currentThread();
@@ -317,8 +306,8 @@
       hasDecodedFirstFrame = false;
       dequeuedSurfaceOutputBuffers.clear();
       droppedFrames = 0;
-      Logging.d(TAG, "Input buffers: " + inputBuffers.length +
-          ". Output buffers: " + outputBuffers.length);
+      Logging.d(TAG,
+          "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
       return true;
     } catch (IllegalStateException e) {
       Logging.e(TAG, "initDecode failed", e);
@@ -406,12 +395,11 @@
     try {
       inputBuffers[inputBufferIndex].position(0);
       inputBuffers[inputBufferIndex].limit(size);
-      decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
-          ntpTimeStamp));
+      decodeStartTimeMs.add(
+          new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
       mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
       return true;
-    }
-    catch (IllegalStateException e) {
+    } catch (IllegalStateException e) {
       Logging.e(TAG, "decode failed", e);
       return false;
     }
@@ -511,8 +499,7 @@
 
     public void addBufferToRender(DecodedOutputBuffer buffer) {
       if (bufferToRender != null) {
-        Logging.e(TAG,
-            "Unexpected addBufferToRender() called while waiting for a texture.");
+        Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
         throw new IllegalStateException("Waiting for a texture.");
       }
       bufferToRender = buffer;
@@ -530,8 +517,8 @@
         int oesTextureId, float[] transformMatrix, long timestampNs) {
       synchronized (newFrameLock) {
         if (renderedBuffer != null) {
-          Logging.e(TAG,
-              "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+          Logging.e(
+              TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
           throw new IllegalStateException("Already holding a texture.");
         }
         // |timestampNs| is always zero on some Android versions.
@@ -550,7 +537,7 @@
         if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
           try {
             newFrameLock.wait(timeoutMs);
-          } catch(InterruptedException e) {
+          } catch (InterruptedException e) {
             // Restore the interrupted status by reinterrupting the thread.
             Thread.currentThread().interrupt();
           }
@@ -588,8 +575,8 @@
     // MediaCodec.INFO_TRY_AGAIN_LATER.
     final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
     while (true) {
-      final int result = mediaCodec.dequeueOutputBuffer(
-          info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
+      final int result =
+          mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
       switch (result) {
         case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
           outputBuffers = mediaCodec.getOutputBuffers();
@@ -604,8 +591,8 @@
           int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
           int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
           if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
-            throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
-                height + ". New " + new_width + "*" + new_height);
+            throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
+                + ". New " + new_width + "*" + new_height);
           }
           width = format.getInteger(MediaFormat.KEY_WIDTH);
           height = format.getInteger(MediaFormat.KEY_HEIGHT);
@@ -635,19 +622,14 @@
           long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
           if (decodeTimeMs > MAX_DECODE_TIME_MS) {
             Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
-                + ". Q size: " + decodeStartTimeMs.size()
-                + ". Might be caused by resuming H264 decoding after a pause.");
+                    + ". Q size: " + decodeStartTimeMs.size()
+                    + ". Might be caused by resuming H264 decoding after a pause.");
             decodeTimeMs = MAX_DECODE_TIME_MS;
           }
-          return new DecodedOutputBuffer(result,
-              info.offset,
-              info.size,
-              TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
-              timeStamps.timeStampMs,
-              timeStamps.ntpTimeStampMs,
-              decodeTimeMs,
-              SystemClock.elapsedRealtime());
-        }
+          return new DecodedOutputBuffer(result, info.offset, info.size,
+              TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
+              timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
+      }
     }
   }
 
@@ -675,8 +657,8 @@
     }
 
     if ((dequeuedSurfaceOutputBuffers.size()
-         >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
-         || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+                >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+            || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
       ++droppedFrames;
       // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
       // The oldest frame is owned by |textureListener| and can't be dropped since
@@ -686,18 +668,17 @@
         // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
         // return the one and only texture even if it does not render.
         Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
-            + droppedFrame.presentationTimeStampMs +
-            ". Total number of dropped frames: " + droppedFrames);
+                + droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: "
+                + droppedFrames);
       } else {
-        Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
-            ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
-            ". Total number of dropped frames: " + droppedFrames);
+        Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size()
+                + ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs
+                + ". Total number of dropped frames: " + droppedFrames);
       }
 
       mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
-      return new DecodedTextureBuffer(0, null,
-          droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
-          droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+      return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStampMs,
+          droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
           SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
     }
     return null;
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java
index 923ea91..258ef96 100644
--- a/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -42,14 +42,10 @@
   private static final String TAG = "MediaCodecVideoEncoder";
 
   // Tracks webrtc::VideoCodecType.
-  public enum VideoCodecType {
-    VIDEO_CODEC_VP8,
-    VIDEO_CODEC_VP9,
-    VIDEO_CODEC_H264
-  }
+  public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
 
   private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
-  private static final int DEQUEUE_TIMEOUT = 0;  // Non-blocking, no wait.
+  private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
   private static final int BITRATE_ADJUSTMENT_FPS = 30;
   private static final int MAXIMUM_INITIAL_FPS = 30;
   private static final double BITRATE_CORRECTION_SEC = 3.0;
@@ -115,56 +111,45 @@
       "OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
   private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
       "OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT);
-  private static final MediaCodecProperties[] vp8HwList = new MediaCodecProperties[] {
-    qcomVp8HwProperties, exynosVp8HwProperties
-  };
+  private static final MediaCodecProperties[] vp8HwList =
+      new MediaCodecProperties[] {qcomVp8HwProperties, exynosVp8HwProperties};
 
   // List of supported HW VP9 encoders.
   private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
       "OMX.qcom.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
   private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
       "OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.NO_ADJUSTMENT);
-  private static final MediaCodecProperties[] vp9HwList = new MediaCodecProperties[] {
-    qcomVp9HwProperties, exynosVp9HwProperties
-  };
+  private static final MediaCodecProperties[] vp9HwList =
+      new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties};
 
   // List of supported HW H.264 encoders.
   private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
       "OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
   private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
       "OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
-  private static final MediaCodecProperties[] h264HwList = new MediaCodecProperties[] {
-    qcomH264HwProperties, exynosH264HwProperties
-  };
+  private static final MediaCodecProperties[] h264HwList =
+      new MediaCodecProperties[] {qcomH264HwProperties, exynosH264HwProperties};
 
   // List of devices with poor H.264 encoder quality.
-  private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
-    // HW H.264 encoder on below devices has poor bitrate control - actual
-    // bitrates deviates a lot from the target value.
-    "SAMSUNG-SGH-I337",
-    "Nexus 7",
-    "Nexus 4"
-  };
+  // HW H.264 encoder on below devices has poor bitrate control - actual
+  // bitrates deviates a lot from the target value.
+  private static final String[] H264_HW_EXCEPTION_MODELS =
+      new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"};
 
   // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
   // in OMX_Video.h
   private static final int VIDEO_ControlRateConstant = 2;
   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
   // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
-  private static final int
-    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+  private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
   // Allowable color formats supported by codec - in order of preference.
-  private static final int[] supportedColorList = {
-    CodecCapabilities.COLOR_FormatYUV420Planar,
-    CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
-    CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
-    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
-  };
-  private static final int[] supportedSurfaceColorList = {
-    CodecCapabilities.COLOR_FormatSurface
-  };
+  private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar,
+      CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+      CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+      COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
+  private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface};
   private VideoCodecType type;
-  private int colorFormat;  // Used by native code.
+  private int colorFormat; // Used by native code.
 
   // Variables used for dynamic bitrate adjustment.
   private BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT;
@@ -209,33 +194,33 @@
 
   // Functions to query if HW encoding is supported.
   public static boolean isVp8HwSupported() {
-    return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
-        (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
+    return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
+        && (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
   }
 
   public static boolean isVp9HwSupported() {
-    return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
-        (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
+    return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
+        && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
   }
 
   public static boolean isH264HwSupported() {
-    return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
-        (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
+    return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
+        && (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
   }
 
   public static boolean isVp8HwSupportedUsingTextures() {
-    return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
-        (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
+    return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
+        && (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
   }
 
   public static boolean isVp9HwSupportedUsingTextures() {
-    return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
-        (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
+    return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
+        && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
   }
 
   public static boolean isH264HwSupportedUsingTextures() {
-    return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
-        (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
+    return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
+        && (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
   }
 
   // Helper struct for findHwEncoder() below.
@@ -247,7 +232,7 @@
       this.bitrateAdjustmentType = bitrateAdjustmentType;
     }
     public final String codecName; // OpenMax component name for HW codec.
-    public final int colorFormat;  // Color format supported by codec.
+    public final int colorFormat; // Color format supported by codec.
     public final BitrateAdjustmentType bitrateAdjustmentType; // Bitrate adjustment type
   }
 
@@ -273,7 +258,7 @@
       try {
         info = MediaCodecList.getCodecInfoAt(i);
       } catch (IllegalArgumentException e) {
-        Logging.e(TAG,  "Cannot retrieve encoder codec info", e);
+        Logging.e(TAG, "Cannot retrieve encoder codec info", e);
       }
       if (info == null || !info.isEncoder()) {
         continue;
@@ -286,7 +271,7 @@
         }
       }
       if (name == null) {
-        continue;  // No HW support in this codec; try the next one.
+        continue; // No HW support in this codec; try the next one.
       }
       Logging.v(TAG, "Found candidate encoder " + name);
 
@@ -296,14 +281,14 @@
       for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
         if (name.startsWith(codecProperties.codecPrefix)) {
           if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
-            Logging.w(TAG, "Codec " + name + " is disabled due to SDK version " +
-                Build.VERSION.SDK_INT);
+            Logging.w(
+                TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT);
             continue;
           }
           if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) {
             bitrateAdjustmentType = codecProperties.bitrateAdjustmentType;
-            Logging.w(TAG, "Codec " + name
-                + " requires bitrate adjustment: " + bitrateAdjustmentType);
+            Logging.w(
+                TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType);
           }
           supportedCodec = true;
           break;
@@ -318,7 +303,7 @@
       try {
         capabilities = info.getCapabilitiesForType(mime);
       } catch (IllegalArgumentException e) {
-        Logging.e(TAG,  "Cannot retrieve encoder capabilities", e);
+        Logging.e(TAG, "Cannot retrieve encoder capabilities", e);
         continue;
       }
       for (int colorFormat : capabilities.colorFormats) {
@@ -329,22 +314,21 @@
         for (int codecColorFormat : capabilities.colorFormats) {
           if (codecColorFormat == supportedColorFormat) {
             // Found supported HW encoder.
-            Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name
-                + ". Color: 0x" + Integer.toHexString(codecColorFormat)
-                + ". Bitrate adjustment: " + bitrateAdjustmentType);
+            Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x"
+                    + Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: "
+                    + bitrateAdjustmentType);
             return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType);
           }
         }
       }
     }
-    return null;  // No HW encoder.
+    return null; // No HW encoder.
   }
 
   private void checkOnMediaCodecThread() {
     if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
-      throw new RuntimeException(
-          "MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
-          " but is now called on " + Thread.currentThread());
+      throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread
+          + " but is now called on " + Thread.currentThread());
     }
   }
 
@@ -373,8 +357,8 @@
   boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
       EglBase14.Context sharedContext) {
     final boolean useSurface = sharedContext != null;
-    Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
-        ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
+    Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + ". @ " + kbps
+            + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
 
     this.width = width;
     this.height = height;
@@ -408,12 +392,11 @@
     bitrateAdjustmentType = properties.bitrateAdjustmentType;
     if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT) {
       fps = BITRATE_ADJUSTMENT_FPS;
-    } else  {
+    } else {
       fps = Math.min(fps, MAXIMUM_INITIAL_FPS);
     }
-    Logging.d(TAG, "Color format: " + colorFormat +
-        ". Bitrate adjustment: " + bitrateAdjustmentType +
-        ". Initial fps: " + fps);
+    Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType
+            + ". Initial fps: " + fps);
     targetBitrateBps = 1000 * kbps;
     targetFps = fps;
     bitrateAccumulatorMax = targetBitrateBps / 8.0;
@@ -436,8 +419,7 @@
         Logging.e(TAG, "Can not create media encoder");
         return false;
       }
-      mediaCodec.configure(
-          format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+      mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
 
       if (useSurface) {
         eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
@@ -457,15 +439,14 @@
     return true;
   }
 
-  ByteBuffer[]  getInputBuffers() {
+  ByteBuffer[] getInputBuffers() {
     ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
     Logging.d(TAG, "Input buffers: " + inputBuffers.length);
     return inputBuffers;
   }
 
   boolean encodeBuffer(
-      boolean isKeyframe, int inputBuffer, int size,
-      long presentationTimestampUs) {
+      boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) {
     checkOnMediaCodecThread();
     try {
       if (isKeyframe) {
@@ -478,11 +459,9 @@
         b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
         mediaCodec.setParameters(b);
       }
-      mediaCodec.queueInputBuffer(
-          inputBuffer, 0, size, presentationTimestampUs, 0);
+      mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0);
       return true;
-    }
-    catch (IllegalStateException e) {
+    } catch (IllegalStateException e) {
       Logging.e(TAG, "encodeBuffer failed", e);
       return false;
     }
@@ -505,8 +484,7 @@
       drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
       eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
       return true;
-    }
-    catch (RuntimeException e) {
+    } catch (RuntimeException e) {
       Logging.e(TAG, "encodeTexture failed", e);
       return false;
     }
@@ -580,13 +558,13 @@
     // Adjust actual encoder bitrate based on bitrate adjustment type.
     if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
       codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
-      Logging.v(TAG, "setRates: " + kbps + " -> " + (codecBitrateBps / 1000)
-          + " kbps. Fps: " + targetFps);
+      Logging.v(TAG,
+          "setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
     } else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
-      Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps
-          + ". ExpScale: " + bitrateAdjustmentScaleExp);
+      Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
+              + bitrateAdjustmentScaleExp);
       if (bitrateAdjustmentScaleExp != 0) {
-        codecBitrateBps = (int)(codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
+        codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
       }
     } else {
       Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps);
@@ -618,8 +596,7 @@
   // Helper struct for dequeueOutputBuffer() below.
   static class OutputBufferInfo {
     public OutputBufferInfo(
-        int index, ByteBuffer buffer,
-        boolean isKeyFrame, long presentationTimestampUs) {
+        int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) {
       this.index = index;
       this.buffer = buffer;
       this.isKeyFrame = isKeyFrame;
@@ -641,11 +618,9 @@
       int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
       // Check if this is config frame and save configuration data.
       if (result >= 0) {
-        boolean isConfigFrame =
-            (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
+        boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
         if (isConfigFrame) {
-          Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
-              ". Size: " + info.size);
+          Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
           configData = ByteBuffer.allocateDirect(info.size);
           outputBuffers[result].position(info.offset);
           outputBuffers[result].limit(info.offset + info.size);
@@ -666,27 +641,23 @@
         reportEncodedFrame(info.size);
 
         // Check key frame flag.
-        boolean isKeyFrame =
-            (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+        boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
         if (isKeyFrame) {
           Logging.d(TAG, "Sync frame generated");
         }
         if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
-          Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
-              " to output buffer with offset " + info.offset + ", size " +
-              info.size);
+          Logging.d(TAG, "Appending config frame of size " + configData.capacity()
+                  + " to output buffer with offset " + info.offset + ", size " + info.size);
           // For H.264 key frame append SPS and PPS NALs at the start
-          ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
-              configData.capacity() + info.size);
+          ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
           configData.rewind();
           keyFrameBuffer.put(configData);
           keyFrameBuffer.put(outputBuffer);
           keyFrameBuffer.position(0);
-          return new OutputBufferInfo(result, keyFrameBuffer,
-              isKeyFrame, info.presentationTimeUs);
+          return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
         } else {
-          return new OutputBufferInfo(result, outputBuffer.slice(),
-              isKeyFrame, info.presentationTimeUs);
+          return new OutputBufferInfo(
+              result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
         }
       } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
         outputBuffers = mediaCodec.getOutputBuffers();
@@ -705,7 +676,7 @@
 
   private double getBitrateScale(int bitrateAdjustmentScaleExp) {
     return Math.pow(BITRATE_CORRECTION_MAX_SCALE,
-        (double)bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
+        (double) bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
   }
 
   private void reportEncodedFrame(int size) {
@@ -727,9 +698,8 @@
     // Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
     // form the target value.
     if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) {
-      Logging.d(TAG, "Acc: " + (int)bitrateAccumulator
-          + ". Max: " + (int)bitrateAccumulatorMax
-          + ". ExpScale: " + bitrateAdjustmentScaleExp);
+      Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax
+              + ". ExpScale: " + bitrateAdjustmentScaleExp);
       boolean bitrateAdjustmentScaleChanged = false;
       if (bitrateAccumulator > bitrateAccumulatorMax) {
         // Encoder generates too high bitrate - need to reduce the scale.
@@ -745,8 +715,8 @@
       if (bitrateAdjustmentScaleChanged) {
         bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS);
         bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS);
-        Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp
-            + ". Value: " + getBitrateScale(bitrateAdjustmentScaleExp));
+        Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: "
+                + getBitrateScale(bitrateAdjustmentScaleExp));
         setRates(targetBitrateBps / 1000, targetFps);
       }
       bitrateObservationTimeMs = 0;
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java b/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java
index 1cab682..cfe709e 100644
--- a/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java
+++ b/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java
@@ -48,7 +48,7 @@
       if (other == null || getClass() != other.getClass()) {
         return false;
       }
-      KeyValuePair that = (KeyValuePair)other;
+      KeyValuePair that = (KeyValuePair) other;
       return key.equals(that.key) && value.equals(that.value);
     }
 
@@ -78,7 +78,7 @@
   }
 
   public String toString() {
-    return "mandatory: " + stringifyKeyValuePairList(mandatory) +
-        ", optional: " + stringifyKeyValuePairList(optional);
+    return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+        + stringifyKeyValuePairList(optional);
   }
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaSource.java b/webrtc/api/android/java/src/org/webrtc/MediaSource.java
index 070a95d..9f6dec6 100644
--- a/webrtc/api/android/java/src/org/webrtc/MediaSource.java
+++ b/webrtc/api/android/java/src/org/webrtc/MediaSource.java
@@ -8,17 +8,14 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
 package org.webrtc;
 
 /** Java wrapper for a C++ MediaSourceInterface. */
 public class MediaSource {
   /** Tracks MediaSourceInterface.SourceState */
-  public enum State {
-    INITIALIZING, LIVE, ENDED, MUTED
-  }
+  public enum State { INITIALIZING, LIVE, ENDED, MUTED }
 
-  final long nativeSource;  // Package-protected for PeerConnectionFactory.
+  final long nativeSource; // Package-protected for PeerConnectionFactory.
 
   public MediaSource(long nativeSource) {
     this.nativeSource = nativeSource;
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaStream.java b/webrtc/api/android/java/src/org/webrtc/MediaStream.java
index 2128b73..3c7349d 100644
--- a/webrtc/api/android/java/src/org/webrtc/MediaStream.java
+++ b/webrtc/api/android/java/src/org/webrtc/MediaStream.java
@@ -89,21 +89,16 @@
   }
 
   public String toString() {
-    return "[" + label() + ":A=" + audioTracks.size() +
-        ":V=" + videoTracks.size() + "]";
+    return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
   }
 
-  private static native boolean nativeAddAudioTrack(
-      long nativeStream, long nativeAudioTrack);
+  private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
 
-  private static native boolean nativeAddVideoTrack(
-      long nativeStream, long nativeVideoTrack);
+  private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
 
-  private static native boolean nativeRemoveAudioTrack(
-      long nativeStream, long nativeAudioTrack);
+  private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
 
-  private static native boolean nativeRemoveVideoTrack(
-      long nativeStream, long nativeVideoTrack);
+  private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
 
   private static native String nativeLabel(long nativeStream);
 
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java b/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java
index 49b2210..31a3736 100644
--- a/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java
+++ b/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java
@@ -51,8 +51,7 @@
 
   private static native boolean nativeEnabled(long nativeTrack);
 
-  private static native boolean nativeSetEnabled(
-      long nativeTrack, boolean enabled);
+  private static native boolean nativeSetEnabled(long nativeTrack, boolean enabled);
 
   private static native State nativeState(long nativeTrack);
 
diff --git a/webrtc/api/android/java/src/org/webrtc/Metrics.java b/webrtc/api/android/java/src/org/webrtc/Metrics.java
index 69c81ee..7f0f5cb 100644
--- a/webrtc/api/android/java/src/org/webrtc/Metrics.java
+++ b/webrtc/api/android/java/src/org/webrtc/Metrics.java
@@ -36,7 +36,7 @@
     System.loadLibrary("jingle_peerconnection_so");
   }
   public final Map<String, HistogramInfo> map =
-       new HashMap<String, HistogramInfo>();  // <name, HistogramInfo>
+      new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
 
   /**
    * Class holding histogram information.
@@ -46,7 +46,7 @@
     public final int max;
     public final int bucketCount;
     public final Map<Integer, Integer> samples =
-        new HashMap<Integer, Integer>();  // <value, # of events>
+        new HashMap<Integer, Integer>(); // <value, # of events>
 
     public HistogramInfo(int min, int max, int bucketCount) {
       this.min = min;
diff --git a/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java b/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java
index 01b9428..26fefc0 100644
--- a/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java
+++ b/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java
@@ -140,25 +140,23 @@
       return;
     }
     if (autoDetector == null) {
-      autoDetector = new NetworkMonitorAutoDetect(
-        new NetworkMonitorAutoDetect.Observer() {
+      autoDetector = new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
 
-          @Override
-          public void onConnectionTypeChanged(ConnectionType newConnectionType) {
-            updateCurrentConnectionType(newConnectionType);
-          }
+        @Override
+        public void onConnectionTypeChanged(ConnectionType newConnectionType) {
+          updateCurrentConnectionType(newConnectionType);
+        }
 
-          @Override
-          public void onNetworkConnect(NetworkInformation networkInfo) {
-            notifyObserversOfNetworkConnect(networkInfo);
-          }
+        @Override
+        public void onNetworkConnect(NetworkInformation networkInfo) {
+          notifyObserversOfNetworkConnect(networkInfo);
+        }
 
-          @Override
-          public void onNetworkDisconnect(long networkHandle) {
-            notifyObserversOfNetworkDisconnect(networkHandle);
-          }
-        },
-        applicationContext);
+        @Override
+        public void onNetworkDisconnect(long networkHandle) {
+          notifyObserversOfNetworkDisconnect(networkHandle);
+        }
+      }, applicationContext);
       final NetworkMonitorAutoDetect.NetworkState networkState =
           autoDetector.getCurrentNetworkState();
       updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
@@ -241,8 +239,8 @@
   private native void nativeNotifyConnectionTypeChanged(long nativePtr);
   private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
   private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle);
-  private native void nativeNotifyOfActiveNetworkList(long nativePtr,
-                                                      NetworkInformation[] networkInfos);
+  private native void nativeNotifyOfActiveNetworkList(
+      long nativePtr, NetworkInformation[] networkInfos);
 
   // For testing only.
   static void resetInstanceForTests(Context context) {
diff --git a/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java b/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java
index c437b44..270fca0 100644
--- a/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java
+++ b/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java
@@ -58,19 +58,19 @@
 
   public static class IPAddress {
     public final byte[] address;
-    public IPAddress (byte[] address) {
+    public IPAddress(byte[] address) {
       this.address = address;
     }
   }
 
   /** Java version of NetworkMonitor.NetworkInformation */
-  public static class NetworkInformation{
+  public static class NetworkInformation {
     public final String name;
     public final ConnectionType type;
     public final long handle;
     public final IPAddress[] ipAddresses;
-    public NetworkInformation(String name, ConnectionType type, long handle,
-                              IPAddress[] addresses) {
+    public NetworkInformation(
+        String name, ConnectionType type, long handle, IPAddress[] addresses) {
       this.name = name;
       this.type = type;
       this.handle = handle;
@@ -112,7 +112,6 @@
    */
   @SuppressLint("NewApi")
   private class SimpleNetworkCallback extends NetworkCallback {
-
     @Override
     public void onAvailable(Network network) {
       Logging.d(TAG, "Network becomes available: " + network.toString());
@@ -120,8 +119,7 @@
     }
 
     @Override
-    public void onCapabilitiesChanged(
-        Network network, NetworkCapabilities networkCapabilities) {
+    public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
       // A capabilities change may indicate the ConnectionType has changed,
       // so forward the new NetworkInformation along to the observer.
       Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
@@ -140,8 +138,8 @@
     public void onLosing(Network network, int maxMsToLive) {
       // Tell the network is going to lose in MaxMsToLive milliseconds.
       // We may use this signal later.
-      Logging.d(TAG,
-                "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
+      Logging.d(
+          TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
     }
 
     @Override
@@ -303,15 +301,13 @@
       if (connectionType == ConnectionType.CONNECTION_UNKNOWN
           || connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
         Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
-                  + " because it has type " + networkState.getNetworkType()
-                  + " and subtype " + networkState.getNetworkSubType());
+                + " because it has type " + networkState.getNetworkType() + " and subtype "
+                + networkState.getNetworkSubType());
       }
 
-      NetworkInformation networkInformation = new NetworkInformation(
-          linkProperties.getInterfaceName(),
-          connectionType,
-          networkToNetId(network),
-          getIPAddresses(linkProperties));
+      NetworkInformation networkInformation =
+          new NetworkInformation(linkProperties.getInterfaceName(), connectionType,
+              networkToNetId(network), getIPAddresses(linkProperties));
       return networkInformation;
     }
 
@@ -324,8 +320,7 @@
       if (connectivityManager == null) {
         return false;
       }
-      final NetworkCapabilities capabilities =
-          connectivityManager.getNetworkCapabilities(network);
+      final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
       return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
     }
 
@@ -369,7 +364,6 @@
     }
   }
 
-
   /** Queries the WifiManager for SSID of the current Wifi connection. */
   static class WifiManagerDelegate {
     private final Context context;
@@ -384,8 +378,8 @@
     }
 
     String getWifiSSID() {
-      final Intent intent = context.registerReceiver(null,
-          new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+      final Intent intent = context.registerReceiver(
+          null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
       if (intent != null) {
         final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
         if (wifiInfo != null) {
@@ -397,7 +391,6 @@
       }
       return "";
     }
-
   }
 
   static final long INVALID_NET_ID = -1;
@@ -507,7 +500,8 @@
    * Registers a BroadcastReceiver in the given context.
    */
   private void registerReceiver() {
-    if (isRegistered) return;
+    if (isRegistered)
+      return;
 
     isRegistered = true;
     context.registerReceiver(this, intentFilter);
@@ -517,7 +511,8 @@
    * Unregisters the BroadcastReceiver in the given context.
    */
   private void unregisterReceiver() {
-    if (!isRegistered) return;
+    if (!isRegistered)
+      return;
 
     isRegistered = false;
     context.unregisterReceiver(this);
@@ -581,7 +576,8 @@
   }
 
   private String getWifiSSID(NetworkState networkState) {
-    if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
+    if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
+      return "";
     return wifiManagerDelegate.getWifiSSID();
   }
 
@@ -597,7 +593,8 @@
   private void connectionTypeChanged(NetworkState networkState) {
     ConnectionType newConnectionType = getConnectionType(networkState);
     String newWifiSSID = getWifiSSID(networkState);
-    if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
+    if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
+      return;
 
     connectionType = newConnectionType;
     wifiSSID = newWifiSSID;
diff --git a/webrtc/api/android/java/src/org/webrtc/PeerConnection.java b/webrtc/api/android/java/src/org/webrtc/PeerConnection.java
index 3f7952e..359b52e 100644
--- a/webrtc/api/android/java/src/org/webrtc/PeerConnection.java
+++ b/webrtc/api/android/java/src/org/webrtc/PeerConnection.java
@@ -8,7 +8,6 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
 package org.webrtc;
 
 import java.util.Collections;
@@ -27,19 +26,28 @@
   }
 
   /** Tracks PeerConnectionInterface::IceGatheringState */
-  public enum IceGatheringState { NEW, GATHERING, COMPLETE };
-
+  public enum IceGatheringState { NEW, GATHERING, COMPLETE }
 
   /** Tracks PeerConnectionInterface::IceConnectionState */
   public enum IceConnectionState {
-    NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
-  };
+    NEW,
+    CHECKING,
+    CONNECTED,
+    COMPLETED,
+    FAILED,
+    DISCONNECTED,
+    CLOSED
+  }
 
   /** Tracks PeerConnectionInterface::SignalingState */
   public enum SignalingState {
-    STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
-    HAVE_REMOTE_PRANSWER, CLOSED
-  };
+    STABLE,
+    HAVE_LOCAL_OFFER,
+    HAVE_LOCAL_PRANSWER,
+    HAVE_REMOTE_OFFER,
+    HAVE_REMOTE_PRANSWER,
+    CLOSED
+  }
 
   /** Java version of PeerConnectionObserver. */
   public static interface Observer {
@@ -97,39 +105,25 @@
   }
 
   /** Java version of PeerConnectionInterface.IceTransportsType */
-  public enum IceTransportsType {
-    NONE, RELAY, NOHOST, ALL
-  };
+  public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
 
   /** Java version of PeerConnectionInterface.BundlePolicy */
-  public enum BundlePolicy {
-    BALANCED, MAXBUNDLE, MAXCOMPAT
-  };
+  public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
 
   /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
-  public enum RtcpMuxPolicy {
-    NEGOTIATE, REQUIRE
-  };
+  public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
 
   /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
-  public enum TcpCandidatePolicy {
-    ENABLED, DISABLED
-  };
+  public enum TcpCandidatePolicy { ENABLED, DISABLED }
 
   /** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
-  public enum CandidateNetworkPolicy {
-    ALL, LOW_COST
-  };
+  public enum CandidateNetworkPolicy { ALL, LOW_COST }
 
   /** Java version of rtc::KeyType */
-  public enum KeyType {
-    RSA, ECDSA
-  }
+  public enum KeyType { RSA, ECDSA }
 
   /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
-  public enum ContinualGatheringPolicy {
-    GATHER_ONCE, GATHER_CONTINUALLY
-  }
+  public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
 
   /** Java version of PeerConnectionInterface.RTCConfiguration */
   public static class RTCConfiguration {
@@ -187,26 +181,20 @@
 
   public native SessionDescription getRemoteDescription();
 
-  public native DataChannel createDataChannel(
-      String label, DataChannel.Init init);
+  public native DataChannel createDataChannel(String label, DataChannel.Init init);
 
-  public native void createOffer(
-      SdpObserver observer, MediaConstraints constraints);
+  public native void createOffer(SdpObserver observer, MediaConstraints constraints);
 
-  public native void createAnswer(
-      SdpObserver observer, MediaConstraints constraints);
+  public native void createAnswer(SdpObserver observer, MediaConstraints constraints);
 
-  public native void setLocalDescription(
-      SdpObserver observer, SessionDescription sdp);
+  public native void setLocalDescription(SdpObserver observer, SessionDescription sdp);
 
-  public native void setRemoteDescription(
-      SdpObserver observer, SessionDescription sdp);
+  public native void setRemoteDescription(SdpObserver observer, SessionDescription sdp);
 
   public native boolean setConfiguration(RTCConfiguration config);
 
   public boolean addIceCandidate(IceCandidate candidate) {
-    return nativeAddIceCandidate(
-        candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
+    return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
   }
 
   public boolean removeIceCandidates(final IceCandidate[] candidates) {
@@ -314,8 +302,7 @@
 
   private native void nativeRemoveLocalStream(long nativeStream);
 
-  private native boolean nativeGetStats(
-      StatsObserver observer, long nativeTrack);
+  private native boolean nativeGetStats(StatsObserver observer, long nativeTrack);
 
   private native RtpSender nativeCreateSender(String kind, String stream_id);
 
@@ -323,9 +310,7 @@
 
   private native List<RtpReceiver> nativeGetReceivers();
 
-  private native boolean nativeStartRtcEventLog(
-      int file_descriptor, int max_size_bytes);
+  private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
 
   private native void nativeStopRtcEventLog();
-
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java b/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
index e1ee0c0..8bbc33b 100644
--- a/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
+++ b/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
@@ -8,7 +8,6 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
 package org.webrtc;
 
 import java.util.List;
@@ -52,9 +51,8 @@
   // |renderEGLContext| can be provided to suport HW video decoding to
   // texture and will be used to create a shared EGL context on video
   // decoding thread.
-  public static native boolean initializeAndroidGlobals(
-      Object context, boolean initializeAudio, boolean initializeVideo,
-      boolean videoHwAcceleration);
+  public static native boolean initializeAndroidGlobals(Object context, boolean initializeAudio,
+      boolean initializeVideo, boolean videoHwAcceleration);
 
   // Field trial initialization. Must be called before PeerConnectionFactory
   // is created.
@@ -81,51 +79,44 @@
     }
   }
 
-  public PeerConnection createPeerConnection(
-      PeerConnection.RTCConfiguration rtcConfig,
-      MediaConstraints constraints,
-      PeerConnection.Observer observer) {
+  public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
+      MediaConstraints constraints, PeerConnection.Observer observer) {
     long nativeObserver = nativeCreateObserver(observer);
     if (nativeObserver == 0) {
       return null;
     }
-    long nativePeerConnection = nativeCreatePeerConnection(
-        nativeFactory, rtcConfig, constraints, nativeObserver);
+    long nativePeerConnection =
+        nativeCreatePeerConnection(nativeFactory, rtcConfig, constraints, nativeObserver);
     if (nativePeerConnection == 0) {
       return null;
     }
     return new PeerConnection(nativePeerConnection, nativeObserver);
   }
 
-  public PeerConnection createPeerConnection(
-      List<PeerConnection.IceServer> iceServers,
-      MediaConstraints constraints,
-      PeerConnection.Observer observer) {
-    PeerConnection.RTCConfiguration rtcConfig =
-        new PeerConnection.RTCConfiguration(iceServers);
+  public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
+      MediaConstraints constraints, PeerConnection.Observer observer) {
+    PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
     return createPeerConnection(rtcConfig, constraints, observer);
   }
 
   public MediaStream createLocalMediaStream(String label) {
-    return new MediaStream(
-        nativeCreateLocalMediaStream(nativeFactory, label));
+    return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
   }
 
   public VideoSource createVideoSource(VideoCapturer capturer) {
     final EglBase.Context eglContext =
         localEglbase == null ? null : localEglbase.getEglBaseContext();
-    long nativeAndroidVideoTrackSource = nativeCreateVideoSource(
-        nativeFactory, eglContext, capturer.isScreencast());
-    VideoCapturer.CapturerObserver capturerObserver
-        = new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
-    nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource,
-         capturerObserver);
+    long nativeAndroidVideoTrackSource =
+        nativeCreateVideoSource(nativeFactory, eglContext, capturer.isScreencast());
+    VideoCapturer.CapturerObserver capturerObserver =
+        new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
+    nativeInitializeVideoCapturer(
+        nativeFactory, capturer, nativeAndroidVideoTrackSource, capturerObserver);
     return new VideoSource(nativeAndroidVideoTrackSource);
   }
 
   public VideoTrack createVideoTrack(String id, VideoSource source) {
-    return new VideoTrack(nativeCreateVideoTrack(
-        nativeFactory, id, source.nativeSource));
+    return new VideoTrack(nativeCreateVideoTrack(nativeFactory, id, source.nativeSource));
   }
 
   public AudioSource createAudioSource(MediaConstraints constraints) {
@@ -133,8 +124,7 @@
   }
 
   public AudioTrack createAudioTrack(String id, AudioSource source) {
-    return new AudioTrack(nativeCreateAudioTrack(
-        nativeFactory, id, source.nativeSource));
+    return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.nativeSource));
   }
 
   // Starts recording an AEC dump. Ownership of the file is transfered to the
@@ -161,8 +151,8 @@
    *                          renderer.
    * @param remoteEglContext  Must be the same as used by any remote video renderer.
    */
-  public void setVideoHwAccelerationOptions(EglBase.Context localEglContext,
-      EglBase.Context remoteEglContext) {
+  public void setVideoHwAccelerationOptions(
+      EglBase.Context localEglContext, EglBase.Context remoteEglContext) {
     if (localEglbase != null) {
       Logging.w(TAG, "Egl context already set.");
       localEglbase.release();
@@ -173,8 +163,8 @@
     }
     localEglbase = EglBase.create(localEglContext);
     remoteEglbase = EglBase.create(remoteEglContext);
-    nativeSetVideoHwAccelerationOptions(nativeFactory, localEglbase.getEglBaseContext(),
-        remoteEglbase.getEglBaseContext());
+    nativeSetVideoHwAccelerationOptions(
+        nativeFactory, localEglbase.getEglBaseContext(), remoteEglbase.getEglBaseContext());
   }
 
   public void dispose() {
@@ -227,22 +217,19 @@
 
   private static native long nativeCreatePeerConnectionFactory(Options options);
 
-  private static native long nativeCreateObserver(
-      PeerConnection.Observer observer);
+  private static native long nativeCreateObserver(PeerConnection.Observer observer);
 
-  private static native long nativeCreatePeerConnection(
-      long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
-      MediaConstraints constraints, long nativeObserver);
+  private static native long nativeCreatePeerConnection(long nativeFactory,
+      PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
 
-  private static native long nativeCreateLocalMediaStream(
-      long nativeFactory, String label);
+  private static native long nativeCreateLocalMediaStream(long nativeFactory, String label);
 
   private static native long nativeCreateVideoSource(
       long nativeFactory, EglBase.Context eglContext, boolean is_screencast);
 
-  private static native void nativeInitializeVideoCapturer(
-    long native_factory, VideoCapturer j_video_capturer, long native_source,
-    VideoCapturer.CapturerObserver j_frame_observer);
+  private static native void nativeInitializeVideoCapturer(long native_factory,
+      VideoCapturer j_video_capturer, long native_source,
+      VideoCapturer.CapturerObserver j_frame_observer);
 
   private static native long nativeCreateVideoTrack(
       long nativeFactory, String id, long nativeVideoSource);
@@ -258,8 +245,7 @@
 
   private static native void nativeStopAecDump(long nativeFactory);
 
-  @Deprecated
-  public native void nativeSetOptions(long nativeFactory, Options options);
+  @Deprecated public native void nativeSetOptions(long nativeFactory, Options options);
 
   private static native void nativeSetVideoHwAccelerationOptions(
       long nativeFactory, Object localEGLContext, Object remoteEGLContext);
diff --git a/webrtc/api/android/java/src/org/webrtc/RendererCommon.java b/webrtc/api/android/java/src/org/webrtc/RendererCommon.java
index 55547eb..3cec5b0 100644
--- a/webrtc/api/android/java/src/org/webrtc/RendererCommon.java
+++ b/webrtc/api/android/java/src/org/webrtc/RendererCommon.java
@@ -42,8 +42,8 @@
      */
     void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
         int viewportX, int viewportY, int viewportWidth, int viewportHeight);
-    void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
-        int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+    void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
+        int viewportY, int viewportWidth, int viewportHeight);
     void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
         int viewportX, int viewportY, int viewportWidth, int viewportHeight);
 
@@ -116,6 +116,7 @@
   // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
   // This limits excessive cropping when adjusting display size.
   private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+  // clang-format off
   public static final float[] identityMatrix() {
     return new float[] {
         1, 0, 0, 0,
@@ -140,6 +141,7 @@
          0, 0, 1, 0,
          1, 0, 0, 1};
   }
+  // clang-format on
 
   /**
    * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
@@ -189,8 +191,8 @@
   /**
    * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
    */
-  public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
-      int maxDisplayWidth, int maxDisplayHeight) {
+  public static Point getDisplaySize(
+      ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
     return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
         maxDisplayWidth, maxDisplayHeight);
   }
@@ -230,17 +232,17 @@
    * Calculate display size based on minimum fraction of the video that must remain visible,
    * video aspect ratio, and maximum display size.
    */
-  private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
-      int maxDisplayWidth, int maxDisplayHeight) {
+  private static Point getDisplaySize(
+      float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
     // If there is no constraint on the amount of cropping, fill the allowed display area.
     if (minVisibleFraction == 0 || videoAspectRatio == 0) {
       return new Point(maxDisplayWidth, maxDisplayHeight);
     }
     // Each dimension is constrained on max display size and how much we are allowed to crop.
-    final int width = Math.min(maxDisplayWidth,
-        Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
-    final int height = Math.min(maxDisplayHeight,
-        Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+    final int width = Math.min(
+        maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+    final int height = Math.min(
+        maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
     return new Point(width, height);
   }
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java b/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java
index 1c4eef3..7df030d 100644
--- a/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java
+++ b/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java
@@ -48,8 +48,8 @@
   // Will be released in dispose().
   private static native long nativeGetTrack(long nativeRtpReceiver);
 
-  private static native boolean nativeSetParameters(long nativeRtpReceiver,
-                                                    RtpParameters parameters);
+  private static native boolean nativeSetParameters(
+      long nativeRtpReceiver, RtpParameters parameters);
 
   private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);
 
diff --git a/webrtc/api/android/java/src/org/webrtc/RtpSender.java b/webrtc/api/android/java/src/org/webrtc/RtpSender.java
index 2c094ac..12df4e2 100644
--- a/webrtc/api/android/java/src/org/webrtc/RtpSender.java
+++ b/webrtc/api/android/java/src/org/webrtc/RtpSender.java
@@ -30,9 +30,8 @@
   // not appropriate when the track is owned by, for example, another RtpSender
   // or a MediaStream.
   public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
-    if (!nativeSetTrack(nativeRtpSender,
-                        (track == null) ? 0 : track.nativeTrack)) {
-        return false;
+    if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
+      return false;
     }
     if (cachedTrack != null && ownsTrack) {
       cachedTrack.dispose();
@@ -65,20 +64,17 @@
     free(nativeRtpSender);
   }
 
-  private static native boolean nativeSetTrack(long nativeRtpSender,
-                                               long nativeTrack);
+  private static native boolean nativeSetTrack(long nativeRtpSender, long nativeTrack);
 
   // This should increment the reference count of the track.
   // Will be released in dispose() or setTrack().
   private static native long nativeGetTrack(long nativeRtpSender);
 
-  private static native boolean nativeSetParameters(long nativeRtpSender,
-                                                    RtpParameters parameters);
+  private static native boolean nativeSetParameters(long nativeRtpSender, RtpParameters parameters);
 
   private static native RtpParameters nativeGetParameters(long nativeRtpSender);
 
   private static native String nativeId(long nativeRtpSender);
 
   private static native void free(long nativeRtpSender);
-}
-;
+};
diff --git a/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java b/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java
index 9b86fe3..08d34dd 100644
--- a/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java
+++ b/webrtc/api/android/java/src/org/webrtc/ScreenCapturerAndroid.java
@@ -35,11 +35,10 @@
  * frames. At any time, at most one frame is being processed.
  */
 @TargetApi(21)
-public class ScreenCapturerAndroid implements
-    VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
-
-  private static final int DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
-      | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
+public class ScreenCapturerAndroid
+    implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
+  private static final int DISPLAY_FLAGS =
+      DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
   // DPI for VirtualDisplay, does not seem to matter for us.
   private static final int VIRTUAL_DISPLAY_DPI = 400;
 
@@ -65,8 +64,7 @@
    * @param mediaProjectionCallback MediaProjection callback to implement application specific
    *     logic in events such as when the user revokes a previously granted capture permission.
   **/
-  public ScreenCapturerAndroid(
-      Intent mediaProjectionPermissionResultData,
+  public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
       MediaProjection.Callback mediaProjectionCallback) {
     this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
     this.mediaProjectionCallback = mediaProjectionCallback;
@@ -79,10 +77,8 @@
   }
 
   @Override
-  public synchronized void initialize(
-      final SurfaceTextureHelper surfaceTextureHelper,
-      final Context applicationContext,
-      final VideoCapturer.CapturerObserver capturerObserver) {
+  public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
+      final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
     checkNotDisposed();
 
     if (capturerObserver == null) {
@@ -95,13 +91,13 @@
     }
     this.surfaceTextureHelper = surfaceTextureHelper;
 
-    mediaProjectionManager = (MediaProjectionManager)
-        applicationContext.getSystemService(Context.MEDIA_PROJECTION_SERVICE);
+    mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
+        Context.MEDIA_PROJECTION_SERVICE);
   }
 
   @Override
-  public synchronized void startCapture(final int width, final int height,
-       final int ignoredFramerate) {
+  public synchronized void startCapture(
+      final int width, final int height, final int ignoredFramerate) {
     checkNotDisposed();
 
     this.width = width;
@@ -143,7 +139,6 @@
     });
   }
 
-
   @Override
   public synchronized void dispose() {
     isDisposed = true;
@@ -184,9 +179,8 @@
 
   private void createVirtualDisplay() {
     surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
-    virtualDisplay = mediaProjection.createVirtualDisplay(
-        "WebRTC_ScreenCapture", width, height, VIRTUAL_DISPLAY_DPI,
-        DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
+    virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
+        VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
         null /* callback */, null /* callback handler */);
   }
 
@@ -194,8 +188,8 @@
   @Override
   public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
     numCapturedFrames++;
-    capturerObserver.onTextureFrameCaptured(width, height, oesTextureId, transformMatrix,
-        0 /* rotation */, timestampNs);
+    capturerObserver.onTextureFrameCaptured(
+        width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs);
   }
 
   @Override
@@ -207,4 +201,3 @@
     return numCapturedFrames;
   }
 }
-
diff --git a/webrtc/api/android/java/src/org/webrtc/SessionDescription.java b/webrtc/api/android/java/src/org/webrtc/SessionDescription.java
index f6dd361..3236fe8 100644
--- a/webrtc/api/android/java/src/org/webrtc/SessionDescription.java
+++ b/webrtc/api/android/java/src/org/webrtc/SessionDescription.java
@@ -8,7 +8,6 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
 package org.webrtc;
 
 /**
@@ -19,7 +18,9 @@
 public class SessionDescription {
   /** Java-land enum version of SessionDescriptionInterface's type() string. */
   public static enum Type {
-    OFFER, PRANSWER, ANSWER;
+    OFFER,
+    PRANSWER,
+    ANSWER;
 
     public String canonicalForm() {
       return name().toLowerCase();
diff --git a/webrtc/api/android/java/src/org/webrtc/StatsReport.java b/webrtc/api/android/java/src/org/webrtc/StatsReport.java
index 0082b6e..a72ea5d 100644
--- a/webrtc/api/android/java/src/org/webrtc/StatsReport.java
+++ b/webrtc/api/android/java/src/org/webrtc/StatsReport.java
@@ -12,7 +12,6 @@
 
 /** Java version of webrtc::StatsReport. */
 public class StatsReport {
-
   /** Java version of webrtc::StatsReport::Value. */
   public static class Value {
     public final String name;
@@ -45,8 +44,13 @@
 
   public String toString() {
     StringBuilder builder = new StringBuilder();
-    builder.append("id: ").append(id).append(", type: ").append(type)
-        .append(", timestamp: ").append(timestamp).append(", values: ");
+    builder.append("id: ")
+        .append(id)
+        .append(", type: ")
+        .append(type)
+        .append(", timestamp: ")
+        .append(timestamp)
+        .append(", values: ");
     for (int i = 0; i < values.length; ++i) {
       builder.append(values[i].toString()).append(", ");
     }
diff --git a/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java b/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java
index a38b0e4..c9f3b55 100644
--- a/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java
+++ b/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java
@@ -83,69 +83,69 @@
 
     // Vertex coordinates in Normalized Device Coordinates, i.e.
     // (-1, -1) is bottom-left and (1, 1) is top-right.
-    private static final FloatBuffer DEVICE_RECTANGLE =
-        GlUtil.createFloatBuffer(new float[] {
-              -1.0f, -1.0f,  // Bottom left.
-               1.0f, -1.0f,  // Bottom right.
-              -1.0f,  1.0f,  // Top left.
-               1.0f,  1.0f,  // Top right.
-            });
+    private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
+        -1.0f, -1.0f, // Bottom left.
+        1.0f, -1.0f, // Bottom right.
+        -1.0f, 1.0f, // Top left.
+        1.0f, 1.0f, // Top right.
+    });
 
     // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
-    private static final FloatBuffer TEXTURE_RECTANGLE =
-        GlUtil.createFloatBuffer(new float[] {
-              0.0f, 0.0f,  // Bottom left.
-              1.0f, 0.0f,  // Bottom right.
-              0.0f, 1.0f,  // Top left.
-              1.0f, 1.0f   // Top right.
-            });
+    private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
+        0.0f, 0.0f, // Bottom left.
+        1.0f, 0.0f, // Bottom right.
+        0.0f, 1.0f, // Top left.
+        1.0f, 1.0f // Top right.
+    });
 
+    // clang-format off
     private static final String VERTEX_SHADER =
-        "varying vec2 interp_tc;\n"
-      + "attribute vec4 in_pos;\n"
-      + "attribute vec4 in_tc;\n"
-      + "\n"
-      + "uniform mat4 texMatrix;\n"
-      + "\n"
-      + "void main() {\n"
-      + "    gl_Position = in_pos;\n"
-      + "    interp_tc = (texMatrix * in_tc).xy;\n"
-      + "}\n";
+          "varying vec2 interp_tc;\n"
+        + "attribute vec4 in_pos;\n"
+        + "attribute vec4 in_tc;\n"
+        + "\n"
+        + "uniform mat4 texMatrix;\n"
+        + "\n"
+        + "void main() {\n"
+        + "    gl_Position = in_pos;\n"
+        + "    interp_tc = (texMatrix * in_tc).xy;\n"
+        + "}\n";
 
     private static final String FRAGMENT_SHADER =
-        "#extension GL_OES_EGL_image_external : require\n"
-      + "precision mediump float;\n"
-      + "varying vec2 interp_tc;\n"
-      + "\n"
-      + "uniform samplerExternalOES oesTex;\n"
-      // Difference in texture coordinate corresponding to one
-      // sub-pixel in the x direction.
-      + "uniform vec2 xUnit;\n"
-      // Color conversion coefficients, including constant term
-      + "uniform vec4 coeffs;\n"
-      + "\n"
-      + "void main() {\n"
-      // Since the alpha read from the texture is always 1, this could
-      // be written as a mat4 x vec4 multiply. However, that seems to
-      // give a worse framerate, possibly because the additional
-      // multiplies by 1.0 consume resources. TODO(nisse): Could also
-      // try to do it as a vec3 x mat3x4, followed by an add in of a
-      // constant vector.
-      + "  gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
-      + "      texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
-      + "  gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
-      + "      texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
-      + "  gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
-      + "      texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
-      + "  gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
-      + "      texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
-      + "}\n";
+          "#extension GL_OES_EGL_image_external : require\n"
+        + "precision mediump float;\n"
+        + "varying vec2 interp_tc;\n"
+        + "\n"
+        + "uniform samplerExternalOES oesTex;\n"
+        // Difference in texture coordinate corresponding to one
+        // sub-pixel in the x direction.
+        + "uniform vec2 xUnit;\n"
+        // Color conversion coefficients, including constant term
+        + "uniform vec4 coeffs;\n"
+        + "\n"
+        + "void main() {\n"
+        // Since the alpha read from the texture is always 1, this could
+        // be written as a mat4 x vec4 multiply. However, that seems to
+        // give a worse framerate, possibly because the additional
+        // multiplies by 1.0 consume resources. TODO(nisse): Could also
+        // try to do it as a vec3 x mat3x4, followed by an add in of a
+        // constant vector.
+        + "  gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+        + "      texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+        + "  gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+        + "      texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+        + "  gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+        + "      texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+        + "  gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+        + "      texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+        + "}\n";
+    // clang-format on
 
     private int texMatrixLoc;
     private int xUnitLoc;
-    private int coeffsLoc;;
+    private int coeffsLoc;
 
-    YuvConverter (EglBase.Context sharedContext) {
+    YuvConverter(EglBase.Context sharedContext) {
       eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
       eglBase.createDummyPbufferSurface();
       eglBase.makeCurrent();
@@ -165,11 +165,10 @@
       eglBase.detachCurrent();
     }
 
-    synchronized void convert(ByteBuffer buf,
-        int width, int height, int stride, int textureId, float [] transformMatrix) {
+    synchronized void convert(
+        ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
       if (released) {
-        throw new IllegalStateException(
-            "YuvConverter.convert called on released object");
+        throw new IllegalStateException("YuvConverter.convert called on released object");
       }
 
       // We draw into a buffer laid out like
@@ -202,17 +201,15 @@
       // has to be a multiple of 8 pixels.
 
       if (stride % 8 != 0) {
-        throw new IllegalArgumentException(
-            "Invalid stride, must be a multiple of 8");
+        throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
       }
-      if (stride < width){
-        throw new IllegalArgumentException(
-            "Invalid stride, must >= width");
+      if (stride < width) {
+        throw new IllegalArgumentException("Invalid stride, must >= width");
       }
 
-      int y_width = (width+3) / 4;
-      int uv_width = (width+7) / 8;
-      int uv_height = (height+1)/2;
+      int y_width = (width + 3) / 4;
+      int uv_width = (width + 7) / 8;
+      int uv_height = (height + 1) / 2;
       int total_height = height + uv_height;
       int size = stride * total_height;
 
@@ -222,18 +219,16 @@
       // Produce a frame buffer starting at top-left corner, not
       // bottom-left.
       transformMatrix =
-          RendererCommon.multiplyMatrices(transformMatrix,
-              RendererCommon.verticalFlipMatrix());
+          RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
 
       // Create new pBuffferSurface with the correct size if needed.
       if (eglBase.hasSurface()) {
-        if (eglBase.surfaceWidth() != stride/4 ||
-            eglBase.surfaceHeight() != total_height){
+        if (eglBase.surfaceWidth() != stride / 4 || eglBase.surfaceHeight() != total_height) {
           eglBase.releaseSurface();
-          eglBase.createPbufferSurface(stride/4, total_height);
+          eglBase.createPbufferSurface(stride / 4, total_height);
         }
       } else {
-        eglBase.createPbufferSurface(stride/4, total_height);
+        eglBase.createPbufferSurface(stride / 4, total_height);
       }
 
       eglBase.makeCurrent();
@@ -245,9 +240,7 @@
       // Draw Y
       GLES20.glViewport(0, 0, y_width, height);
       // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
-      GLES20.glUniform2f(xUnitLoc,
-          transformMatrix[0] / width,
-          transformMatrix[1] / width);
+      GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
       // Y'UV444 to RGB888, see
       // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
       // We use the ITU-R coefficients for U and V */
@@ -257,19 +250,18 @@
       // Draw U
       GLES20.glViewport(0, height, uv_width, uv_height);
       // Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
-      GLES20.glUniform2f(xUnitLoc,
-          2.0f * transformMatrix[0] / width,
-          2.0f * transformMatrix[1] / width);
+      GLES20.glUniform2f(
+          xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
       GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
       GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
 
       // Draw V
-      GLES20.glViewport(stride/8, height, uv_width, uv_height);
+      GLES20.glViewport(stride / 8, height, uv_width, uv_height);
       GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
       GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
 
-      GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
-          GLES20.GL_UNSIGNED_BYTE, buf);
+      GLES20.glReadPixels(
+          0, 0, stride / 4, total_height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
 
       GlUtil.checkNoGLES2Error("YuvConverter.convert");
 
@@ -351,7 +343,7 @@
     if (yuvConverter != null)
       return yuvConverter;
 
-    synchronized(this) {
+    synchronized (this) {
       if (yuvConverter == null)
         yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
       return yuvConverter;
@@ -409,7 +401,8 @@
    */
   public void returnTextureFrame() {
     handler.post(new Runnable() {
-      @Override public void run() {
+      @Override
+      public void run() {
         isTextureInUse = false;
         if (isQuitting) {
           release();
@@ -442,8 +435,8 @@
     });
   }
 
-  public void textureToYUV(ByteBuffer buf,
-      int width, int height, int stride, int textureId, float [] transformMatrix) {
+  public void textureToYUV(
+      ByteBuffer buf, int width, int height, int stride, int textureId, float[] transformMatrix) {
     if (textureId != oesTextureId)
       throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
 
diff --git a/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java b/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java
index eec6add..eaaf24b 100644
--- a/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java
+++ b/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java
@@ -33,8 +33,8 @@
  * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
  * Interaction with the layout framework in onMeasure and onSizeChanged.
  */
-public class SurfaceViewRenderer extends SurfaceView
-    implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
+public class SurfaceViewRenderer
+    extends SurfaceView implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
   private static final String TAG = "SurfaceViewRenderer";
 
   // Dedicated render thread.
@@ -103,13 +103,15 @@
 
   // Runnable for posting frames to render thread.
   private final Runnable renderFrameRunnable = new Runnable() {
-    @Override public void run() {
+    @Override
+    public void run() {
       renderFrameOnRenderThread();
     }
   };
   // Runnable for clearing Surface to black.
   private final Runnable makeBlackRunnable = new Runnable() {
-    @Override public void run() {
+    @Override
+    public void run() {
       makeBlack();
     }
   };
@@ -134,8 +136,7 @@
    * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
    * reinitialize the renderer after a previous init()/release() cycle.
    */
-  public void init(
-      EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+  public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
     init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
   }
 
@@ -145,9 +146,9 @@
    * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
    * init()/release() cycle.
    */
-  public void init(
-      final EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
-      final int[] configAttributes, RendererCommon.GlDrawer drawer) {
+  public void init(final EglBase.Context sharedContext,
+      RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer) {
     synchronized (handlerLock) {
       if (renderThreadHandler != null) {
         throw new IllegalStateException(getResourceName() + "Already initialized");
@@ -210,7 +211,8 @@
       // when the EGL context is lost. It might be dangerous to delete them manually in
       // Activity.onDestroy().
       renderThreadHandler.postAtFrontOfQueue(new Runnable() {
-        @Override public void run() {
+        @Override
+        public void run() {
           drawer.release();
           drawer = null;
           if (yuvTextures != null) {
@@ -289,8 +291,7 @@
     }
     synchronized (handlerLock) {
       if (renderThreadHandler == null) {
-        Logging.d(TAG, getResourceName()
-            + "Dropping frame - Not initialized or already released.");
+        Logging.d(TAG, getResourceName() + "Dropping frame - Not initialized or already released.");
         VideoRenderer.renderFrameDone(frame);
         return;
       }
@@ -335,8 +336,8 @@
         return;
       }
       desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
-      isNewSize = (desiredLayoutSize.x != getMeasuredWidth()
-          || desiredLayoutSize.y != getMeasuredHeight());
+      isNewSize =
+          (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight());
       setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
     }
     if (isNewSize) {
@@ -498,17 +499,17 @@
       // Make sure YUV textures are allocated.
       if (yuvTextures == null) {
         yuvTextures = new int[3];
-        for (int i = 0; i < 3; i++)  {
+        for (int i = 0; i < 3; i++) {
           yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
         }
       }
       yuvUploader.uploadYuvData(
           yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
-      drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
-          0, 0, surfaceSize.x, surfaceSize.y);
+      drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
+          surfaceSize.x, surfaceSize.y);
     } else {
-      drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
-          0, 0, surfaceSize.x, surfaceSize.y);
+      drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), 0, 0,
+          surfaceSize.x, surfaceSize.y);
     }
 
     eglBase.swapBuffers();
@@ -547,8 +548,8 @@
     synchronized (layoutLock) {
       if (frameWidth != frame.width || frameHeight != frame.height
           || frameRotation != frame.rotationDegree) {
-        Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
-            + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
+        Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " + frame.width
+                + "x" + frame.height + " with rotation " + frame.rotationDegree);
         if (rendererEvents != null) {
           rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
         }
@@ -556,7 +557,8 @@
         frameHeight = frame.height;
         frameRotation = frame.rotationDegree;
         post(new Runnable() {
-          @Override public void run() {
+          @Override
+          public void run() {
             requestLayout();
           }
         });
@@ -566,14 +568,14 @@
 
   private void logStatistics() {
     synchronized (statisticsLock) {
-      Logging.d(TAG, getResourceName() + "Frames received: "
-          + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+      Logging.d(TAG, getResourceName() + "Frames received: " + framesReceived + ". Dropped: "
+              + framesDropped + ". Rendered: " + framesRendered);
       if (framesReceived > 0 && framesRendered > 0) {
         final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
-        Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
-            " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+        Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6)
+                + " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
         Logging.d(TAG, getResourceName() + "Average render time: "
-            + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
+                + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
       }
     }
   }
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
index 706c43d..b8cceaf 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
@@ -24,14 +24,13 @@
     void onCapturerStopped();
 
     // Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
-    void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
-        long timeStamp);
+    void onByteBufferFrameCaptured(
+        byte[] data, int width, int height, int rotation, long timeStamp);
 
     // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
     // owned by VideoCapturer.
-    void onTextureFrameCaptured(
-        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
-        long timestamp);
+    void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix,
+        int rotation, long timestamp);
   }
 
   // An implementation of CapturerObserver that forwards all calls from
@@ -55,25 +54,23 @@
     }
 
     @Override
-    public void onByteBufferFrameCaptured(byte[] data, int width, int height,
-        int rotation, long timeStamp) {
-      nativeOnByteBufferFrameCaptured(nativeSource, data, data.length, width, height, rotation,
-          timeStamp);
+    public void onByteBufferFrameCaptured(
+        byte[] data, int width, int height, int rotation, long timeStamp) {
+      nativeOnByteBufferFrameCaptured(
+          nativeSource, data, data.length, width, height, rotation, timeStamp);
     }
 
     @Override
-    public void onTextureFrameCaptured(
-        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
-        long timestamp) {
-      nativeOnTextureFrameCaptured(nativeSource, width, height, oesTextureId, transformMatrix,
-          rotation, timestamp);
+    public void onTextureFrameCaptured(int width, int height, int oesTextureId,
+        float[] transformMatrix, int rotation, long timestamp) {
+      nativeOnTextureFrameCaptured(
+          nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
     }
 
-    private native void nativeCapturerStarted(long nativeSource,
-        boolean success);
+    private native void nativeCapturerStarted(long nativeSource, boolean success);
     private native void nativeCapturerStopped(long nativeSource);
-    private native void nativeOnByteBufferFrameCaptured(long nativeSource,
-        byte[] data, int length, int width, int height, int rotation, long timeStamp);
+    private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
+        int width, int height, int rotation, long timeStamp);
     private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
         int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
   }
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java b/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
index ec447ab..a019ff6 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
@@ -40,14 +40,13 @@
 // the camera has been stopped.
 // TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
 @SuppressWarnings("deprecation")
-public class VideoCapturerAndroid implements
-    CameraVideoCapturer,
-    android.hardware.Camera.PreviewCallback,
-    SurfaceTextureHelper.OnTextureFrameAvailableListener {
+public class VideoCapturerAndroid
+    implements CameraVideoCapturer, android.hardware.Camera.PreviewCallback,
+               SurfaceTextureHelper.OnTextureFrameAvailableListener {
   private static final String TAG = "VideoCapturerAndroid";
   private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
 
-  private android.hardware.Camera camera;  // Only non-null while capturing.
+  private android.hardware.Camera camera; // Only non-null while capturing.
   private final AtomicBoolean isCameraRunning = new AtomicBoolean();
   // Use maybePostOnCameraThread() instead of posting directly to the handler - this way all
   // callbacks with a specifed token can be removed at once.
@@ -83,30 +82,29 @@
   // Camera error callback.
   private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
       new android.hardware.Camera.ErrorCallback() {
-    @Override
-    public void onError(int error, android.hardware.Camera camera) {
-      String errorMessage;
-      if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
-        errorMessage = "Camera server died!";
-      } else {
-        errorMessage = "Camera error: " + error;
-      }
-      Logging.e(TAG, errorMessage);
-      if (eventsHandler != null) {
-        eventsHandler.onCameraError(errorMessage);
-      }
-    }
-  };
+        @Override
+        public void onError(int error, android.hardware.Camera camera) {
+          String errorMessage;
+          if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
+            errorMessage = "Camera server died!";
+          } else {
+            errorMessage = "Camera error: " + error;
+          }
+          Logging.e(TAG, errorMessage);
+          if (eventsHandler != null) {
+            eventsHandler.onCameraError(errorMessage);
+          }
+        }
+      };
 
-  public static VideoCapturerAndroid create(String name,
-      CameraEventsHandler eventsHandler) {
+  public static VideoCapturerAndroid create(String name, CameraEventsHandler eventsHandler) {
     return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
   }
 
   // Use ctor directly instead.
   @Deprecated
-  public static VideoCapturerAndroid create(String name,
-      CameraEventsHandler eventsHandler, boolean captureToTexture) {
+  public static VideoCapturerAndroid create(
+      String name, CameraEventsHandler eventsHandler, boolean captureToTexture) {
     try {
       return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
     } catch (RuntimeException e) {
@@ -176,7 +174,8 @@
   @Override
   public void changeCaptureFormat(final int width, final int height, final int framerate) {
     maybePostOnCameraThread(new Runnable() {
-      @Override public void run() {
+      @Override
+      public void run() {
         startPreviewOnCameraThread(width, height, framerate);
       }
     });
@@ -195,8 +194,8 @@
     return isCapturingToTexture;
   }
 
-  public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
-      boolean captureToTexture) {
+  public VideoCapturerAndroid(
+      String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
     if (android.hardware.Camera.getNumberOfCameras() == 0) {
       throw new RuntimeException("No cameras available");
     }
@@ -225,7 +224,7 @@
   private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
     return cameraThreadHandler != null && isCameraRunning.get()
         && cameraThreadHandler.postAtTime(
-            runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
+               runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
   }
 
   @Override
@@ -332,8 +331,8 @@
 
       camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
 
-      Logging.d(TAG, "Camera orientation: " + info.orientation +
-          " .Device orientation: " + getDeviceOrientation());
+      Logging.d(TAG, "Camera orientation: " + info.orientation + " .Device orientation: "
+              + getDeviceOrientation());
       camera.setErrorCallback(cameraErrorCallback);
       startPreviewOnCameraThread(width, height, framerate);
       frameObserver.onCapturerStarted(true);
@@ -343,7 +342,7 @@
 
       // Start camera observer.
       cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
-    } catch (IOException|RuntimeException e) {
+    } catch (IOException | RuntimeException e) {
       Logging.e(TAG, "startCapture failed", e);
       // Make sure the camera is released.
       stopCaptureOnCameraThread(true /* stopHandler */);
@@ -351,7 +350,7 @@
       if (eventsHandler != null) {
         eventsHandler.onCameraError("Camera can not be started.");
       }
-     }
+    }
   }
 
   // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
@@ -392,8 +391,7 @@
     }
 
     // Update camera parameters.
-    Logging.d(TAG, "isVideoStabilizationSupported: " +
-        parameters.isVideoStabilizationSupported());
+    Logging.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported());
     if (parameters.isVideoStabilizationSupported()) {
       parameters.setVideoStabilization(true);
     }
@@ -453,7 +451,8 @@
     Logging.d(TAG, "stopCapture");
     final CountDownLatch barrier = new CountDownLatch(1);
     final boolean didPost = maybePostOnCameraThread(new Runnable() {
-      @Override public void run() {
+      @Override
+      public void run() {
         stopCaptureOnCameraThread(true /* stopHandler */);
         barrier.countDown();
       }
@@ -535,9 +534,8 @@
   private int getDeviceOrientation() {
     int orientation = 0;
 
-    WindowManager wm = (WindowManager) applicationContext.getSystemService(
-        Context.WINDOW_SERVICE);
-    switch(wm.getDefaultDisplay().getRotation()) {
+    WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
+    switch (wm.getDefaultDisplay().getRotation()) {
       case Surface.ROTATION_90:
         orientation = 90;
         break;
@@ -579,8 +577,7 @@
       throw new RuntimeException("Unexpected camera in callback!");
     }
 
-    final long captureTimeNs =
-        TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+    final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
 
     if (eventsHandler != null && !firstFrameReported) {
       eventsHandler.onFirstFrameAvailable();
@@ -588,14 +585,13 @@
     }
 
     cameraStatistics.addFrame();
-    frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
-        getFrameOrientation(), captureTimeNs);
+    frameObserver.onByteBufferFrameCaptured(
+        data, captureFormat.width, captureFormat.height, getFrameOrientation(), captureTimeNs);
     camera.addCallbackBuffer(data);
   }
 
   @Override
-  public void onTextureFrameAvailable(
-      int oesTextureId, float[] transformMatrix, long timestampNs) {
+  public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
     checkIsOnCameraThread();
     if (!isCameraRunning.get()) {
       Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java b/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java
index 819b77d..8ee4d27 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java
@@ -61,11 +61,13 @@
       // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
       // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
       // matrix.
+      // clang-format off
       samplingMatrix = new float[] {
           1,  0, 0, 0,
           0, -1, 0, 0,
           0,  0, 1, 0,
           0,  1, 0, 1};
+      // clang-format on
     }
 
     /**
@@ -97,14 +99,13 @@
 
     @Override
     public String toString() {
-      return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
-          ":" + yuvStrides[2];
+      return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + ":" + yuvStrides[2];
     }
   }
 
   // Helper native function to do a video frame plane copying.
-  public static native void nativeCopyPlane(ByteBuffer src, int width,
-      int height, int srcStride, ByteBuffer dst, int dstStride);
+  public static native void nativeCopyPlane(
+      ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
 
   /** The real meat of VideoSinkInterface. */
   public static interface Callbacks {
@@ -115,17 +116,17 @@
     public void renderFrame(I420Frame frame);
   }
 
-   /**
-    * This must be called after every renderFrame() to release the frame.
-    */
-   public static void renderFrameDone(I420Frame frame) {
-     frame.yuvPlanes = null;
-     frame.textureId = 0;
-     if (frame.nativeFramePointer != 0) {
-       releaseNativeFrame(frame.nativeFramePointer);
-       frame.nativeFramePointer = 0;
-     }
-   }
+  /**
+   * This must be called after every renderFrame() to release the frame.
+   */
+  public static void renderFrameDone(I420Frame frame) {
+    frame.yuvPlanes = null;
+    frame.textureId = 0;
+    if (frame.nativeFramePointer != 0) {
+      releaseNativeFrame(frame.nativeFramePointer);
+      frame.nativeFramePointer = 0;
+    }
+  }
 
   long nativeVideoRenderer;
 
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java b/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java
index 5aa16ab..82c995d 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java
@@ -78,7 +78,7 @@
     // TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
     // currently leaking resources to avoid a rare crash in release() where the EGLContext has
     // become invalid beforehand.
-    private int[] yuvTextures = { 0, 0, 0 };
+    private int[] yuvTextures = {0, 0, 0};
     private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
     private final RendererCommon.GlDrawer drawer;
     // Resources for making a deep copy of incoming OES texture frame.
@@ -90,7 +90,8 @@
     private I420Frame pendingFrame;
     private final Object pendingFrameLock = new Object();
     // Type of video frame used for recent frame rendering.
-    private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
+    private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE }
+
     private RendererType rendererType;
     private RendererCommon.ScalingType scalingType;
     private boolean mirror;
@@ -136,9 +137,7 @@
     // it rendered up right.
     private int rotationDegree;
 
-    private YuvImageRenderer(
-        GLSurfaceView surface, int id,
-        int x, int y, int width, int height,
+    private YuvImageRenderer(GLSurfaceView surface, int id, int x, int y, int width, int height,
         RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
       Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
       this.surface = surface;
@@ -167,11 +166,11 @@
     }
 
     private void createTextures() {
-      Logging.d(TAG, "  YuvImageRenderer.createTextures " + id + " on GL thread:" +
-          Thread.currentThread().getId());
+      Logging.d(TAG, "  YuvImageRenderer.createTextures " + id + " on GL thread:"
+              + Thread.currentThread().getId());
 
       // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
-      for (int i = 0; i < 3; i++)  {
+      for (int i = 0; i < 3; i++) {
         yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
       }
       // Generate texture and framebuffer for offscreen texture copy.
@@ -179,30 +178,29 @@
     }
 
     private void updateLayoutMatrix() {
-      synchronized(updateLayoutLock) {
+      synchronized (updateLayoutLock) {
         if (!updateLayoutProperties) {
           return;
         }
         // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
         // bounding box (ceil left/top and floor right/bottom) to not break constraints.
-        displayLayout.set(
-            (screenWidth * layoutInPercentage.left + 99) / 100,
+        displayLayout.set((screenWidth * layoutInPercentage.left + 99) / 100,
             (screenHeight * layoutInPercentage.top + 99) / 100,
             (screenWidth * layoutInPercentage.right) / 100,
             (screenHeight * layoutInPercentage.bottom) / 100);
-        Logging.d(TAG, "ID: "  + id + ". AdjustTextureCoords. Allowed display size: "
-            + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
-            + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
+        Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+                + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+                + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
         final float videoAspectRatio = (rotationDegree % 180 == 0)
             ? (float) videoWidth / videoHeight
             : (float) videoHeight / videoWidth;
         // Adjust display size based on |scalingType|.
-        final Point displaySize = RendererCommon.getDisplaySize(scalingType,
-            videoAspectRatio, displayLayout.width(), displayLayout.height());
+        final Point displaySize = RendererCommon.getDisplaySize(
+            scalingType, videoAspectRatio, displayLayout.width(), displayLayout.height());
         displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
-                            (displayLayout.height() - displaySize.y) / 2);
-        Logging.d(TAG, "  Adjusted display size: " + displayLayout.width() + " x "
-            + displayLayout.height());
+            (displayLayout.height() - displaySize.y) / 2);
+        Logging.d(TAG,
+            "  Adjusted display size: " + displayLayout.width() + " x " + displayLayout.height());
         layoutMatrix = RendererCommon.getLayoutMatrix(
             mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
         updateLayoutProperties = false;
@@ -242,14 +240,13 @@
             GlUtil.checkNoGLES2Error("glBindFramebuffer");
 
             // Copy the OES texture content. This will also normalize the sampling matrix.
-            drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
-                textureCopy.getWidth(), textureCopy.getHeight(),
-                 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
-             rotatedSamplingMatrix = RendererCommon.identityMatrix();
+            drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix, textureCopy.getWidth(),
+                textureCopy.getHeight(), 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
+            rotatedSamplingMatrix = RendererCommon.identityMatrix();
 
-             // Restore normal framebuffer.
-             GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
-             GLES20.glFinish();
+            // Restore normal framebuffer.
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+            GLES20.glFinish();
           }
           copyTimeNs += (System.nanoTime() - now);
           VideoRenderer.renderFrameDone(pendingFrame);
@@ -263,8 +260,8 @@
       // OpenGL defaults to lower left origin - flip viewport position vertically.
       final int viewportY = screenHeight - displayLayout.bottom;
       if (rendererType == RendererType.RENDERER_YUV) {
-        drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight,
-            displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+        drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight, displayLayout.left,
+            viewportY, displayLayout.width(), displayLayout.height());
       } else {
         drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
             displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
@@ -281,25 +278,23 @@
 
     private void logStatistics() {
       long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
-      Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
-          ". Frames received: " + framesReceived +
-          ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+      Logging.d(TAG, "ID: " + id + ". Type: " + rendererType + ". Frames received: "
+              + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
       if (framesReceived > 0 && framesRendered > 0) {
-        Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
-            " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
-        Logging.d(TAG, "Draw time: " +
-            (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
-            (int) (copyTimeNs / (1000 * framesReceived)) + " us");
+        Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + " ms. FPS: "
+                + framesRendered * 1e9 / timeSinceFirstFrameNs);
+        Logging.d(TAG, "Draw time: " + (int) (drawTimeNs / (1000 * framesRendered))
+                + " us. Copy time: " + (int) (copyTimeNs / (1000 * framesReceived)) + " us");
       }
     }
 
     public void setScreenSize(final int screenWidth, final int screenHeight) {
-      synchronized(updateLayoutLock) {
+      synchronized (updateLayoutLock) {
         if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
           return;
         }
-        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
-            screenWidth + " x " + screenHeight);
+        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + screenWidth + " x "
+                + screenHeight);
         this.screenWidth = screenWidth;
         this.screenHeight = screenHeight;
         updateLayoutProperties = true;
@@ -310,14 +305,13 @@
         RendererCommon.ScalingType scalingType, boolean mirror) {
       final Rect layoutInPercentage =
           new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
-      synchronized(updateLayoutLock) {
+      synchronized (updateLayoutLock) {
         if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
             && mirror == this.mirror) {
           return;
         }
-        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
-            ") " +  width + " x " + height + ". Scaling: " + scalingType +
-            ". Mirror: " + mirror);
+        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + ") "
+                + width + " x " + height + ". Scaling: " + scalingType + ". Mirror: " + mirror);
         this.layoutInPercentage.set(layoutInPercentage);
         this.scalingType = scalingType;
         this.mirror = mirror;
@@ -331,14 +325,14 @@
         return;
       }
       if (rendererEvents != null) {
-        Logging.d(TAG, "ID: " + id +
-            ". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
+        Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + videoWidth + " x "
+                + videoHeight);
         rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
       }
 
       synchronized (updateLayoutLock) {
-        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
-            videoWidth + " x " + videoHeight + " rotation " + rotation);
+        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + videoWidth + " x "
+                + videoHeight + " rotation " + rotation);
 
         this.videoWidth = videoWidth;
         this.videoHeight = videoHeight;
@@ -366,11 +360,10 @@
       synchronized (pendingFrameLock) {
         // Check input frame parameters.
         if (frame.yuvFrame) {
-          if (frame.yuvStrides[0] < frame.width ||
-              frame.yuvStrides[1] < frame.width / 2 ||
-              frame.yuvStrides[2] < frame.width / 2) {
-            Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
-                frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
+          if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2
+              || frame.yuvStrides[2] < frame.width / 2) {
+            Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1]
+                    + ", " + frame.yuvStrides[2]);
             VideoRenderer.renderFrameDone(frame);
             return;
           }
@@ -394,8 +387,7 @@
   }
 
   /** Passes GLSurfaceView to video renderer. */
-  public static synchronized void setView(GLSurfaceView surface,
-      Runnable eglContextReadyCallback) {
+  public static synchronized void setView(GLSurfaceView surface, Runnable eglContextReadyCallback) {
     Logging.d(TAG, "VideoRendererGui.setView");
     instance = new VideoRendererGui(surface);
     eglContextReady = eglContextReadyCallback;
@@ -407,7 +399,7 @@
 
   /** Releases GLSurfaceView video renderer. */
   public static synchronized void dispose() {
-    if (instance == null){
+    if (instance == null) {
       return;
     }
     Logging.d(TAG, "VideoRendererGui.dispose");
@@ -431,14 +423,12 @@
    */
   public static VideoRenderer createGui(int x, int y, int width, int height,
       RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
-    YuvImageRenderer javaGuiRenderer = create(
-        x, y, width, height, scalingType, mirror);
+    YuvImageRenderer javaGuiRenderer = create(x, y, width, height, scalingType, mirror);
     return new VideoRenderer(javaGuiRenderer);
   }
 
   public static VideoRenderer.Callbacks createGuiRenderer(
-      int x, int y, int width, int height,
-      RendererCommon.ScalingType scalingType, boolean mirror) {
+      int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
     return create(x, y, width, height, scalingType, mirror);
   }
 
@@ -447,8 +437,8 @@
    * resolution (width, height). All parameters are in percentage of
    * screen resolution.
    */
-  public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
-      RendererCommon.ScalingType scalingType, boolean mirror) {
+  public static synchronized YuvImageRenderer create(
+      int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) {
     return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
   }
 
@@ -460,19 +450,16 @@
   public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
       RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
     // Check display region parameters.
-    if (x < 0 || x > 100 || y < 0 || y > 100 ||
-        width < 0 || width > 100 || height < 0 || height > 100 ||
-        x + width > 100 || y + height > 100) {
+    if (x < 0 || x > 100 || y < 0 || y > 100 || width < 0 || width > 100 || height < 0
+        || height > 100 || x + width > 100 || y + height > 100) {
       throw new RuntimeException("Incorrect window parameters.");
     }
 
     if (instance == null) {
-      throw new RuntimeException(
-          "Attempt to create yuv renderer before setting GLSurfaceView");
+      throw new RuntimeException("Attempt to create yuv renderer before setting GLSurfaceView");
     }
-    final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
-        instance.surface, instance.yuvImageRenderers.size(),
-        x, y, width, height, scalingType, mirror, drawer);
+    final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(instance.surface,
+        instance.yuvImageRenderers.size(), x, y, width, height, scalingType, mirror, drawer);
     synchronized (instance.yuvImageRenderers) {
       if (instance.onSurfaceCreatedCalled) {
         // onSurfaceCreated has already been called for VideoRendererGui -
@@ -483,8 +470,7 @@
           @Override
           public void run() {
             yuvImageRenderer.createTextures();
-            yuvImageRenderer.setScreenSize(
-                instance.screenWidth, instance.screenHeight);
+            yuvImageRenderer.setScreenSize(instance.screenWidth, instance.screenHeight);
             countDownLatch.countDown();
           }
         });
@@ -501,13 +487,11 @@
     return yuvImageRenderer;
   }
 
-  public static synchronized void update(
-      VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
-      RendererCommon.ScalingType scalingType, boolean mirror) {
+  public static synchronized void update(VideoRenderer.Callbacks renderer, int x, int y, int width,
+      int height, RendererCommon.ScalingType scalingType, boolean mirror) {
     Logging.d(TAG, "VideoRendererGui.update");
     if (instance == null) {
-      throw new RuntimeException(
-          "Attempt to update yuv renderer before setting GLSurfaceView");
+      throw new RuntimeException("Attempt to update yuv renderer before setting GLSurfaceView");
     }
     synchronized (instance.yuvImageRenderers) {
       for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@@ -522,8 +506,7 @@
       VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
     Logging.d(TAG, "VideoRendererGui.setRendererEvents");
     if (instance == null) {
-      throw new RuntimeException(
-          "Attempt to set renderer events before setting GLSurfaceView");
+      throw new RuntimeException("Attempt to set renderer events before setting GLSurfaceView");
     }
     synchronized (instance.yuvImageRenderers) {
       for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@@ -537,8 +520,7 @@
   public static synchronized void remove(VideoRenderer.Callbacks renderer) {
     Logging.d(TAG, "VideoRendererGui.remove");
     if (instance == null) {
-      throw new RuntimeException(
-          "Attempt to remove renderer before setting GLSurfaceView");
+      throw new RuntimeException("Attempt to remove renderer before setting GLSurfaceView");
     }
     synchronized (instance.yuvImageRenderers) {
       final int index = instance.yuvImageRenderers.indexOf(renderer);
@@ -553,8 +535,7 @@
   public static synchronized void reset(VideoRenderer.Callbacks renderer) {
     Logging.d(TAG, "VideoRendererGui.reset");
     if (instance == null) {
-      throw new RuntimeException(
-          "Attempt to reset renderer before setting GLSurfaceView");
+      throw new RuntimeException("Attempt to reset renderer before setting GLSurfaceView");
     }
     synchronized (instance.yuvImageRenderers) {
       for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
@@ -621,8 +602,7 @@
 
   @Override
   public void onSurfaceChanged(GL10 unused, int width, int height) {
-    Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
-        width + " x " + height + "  ");
+    Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + width + " x " + height + "  ");
     screenWidth = width;
     screenHeight = height;
     synchronized (yuvImageRenderers) {
@@ -645,5 +625,4 @@
       }
     }
   }
-
 }
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoSource.java b/webrtc/api/android/java/src/org/webrtc/VideoSource.java
index 239940d..d9531cd 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoSource.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoSource.java
@@ -8,7 +8,6 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
 package org.webrtc;
 
 /**
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoTrack.java b/webrtc/api/android/java/src/org/webrtc/VideoTrack.java
index a4ec8ca..36504c8 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoTrack.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoTrack.java
@@ -14,8 +14,7 @@
 
 /** Java version of VideoTrackInterface. */
 public class VideoTrack extends MediaStreamTrack {
-  private final LinkedList<VideoRenderer> renderers =
-      new LinkedList<VideoRenderer>();
+  private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
 
   public VideoTrack(long nativeTrack) {
     super(nativeTrack);
@@ -43,9 +42,7 @@
 
   private static native void free(long nativeTrack);
 
-  private static native void nativeAddRenderer(
-      long nativeTrack, long nativeRenderer);
+  private static native void nativeAddRenderer(long nativeTrack, long nativeRenderer);
 
-  private static native void nativeRemoveRenderer(
-      long nativeTrack, long nativeRenderer);
+  private static native void nativeRemoveRenderer(long nativeTrack, long nativeRenderer);
 }
diff --git a/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java b/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
index 8edcd1e..6163f5a 100644
--- a/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
@@ -19,8 +19,7 @@
 public class Camera1CapturerUsingByteBufferTest extends InstrumentationTestCase {
   static final String TAG = "Camera1CapturerUsingByteBufferTest";
 
-  private class TestObjectFactory
-      extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+  private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
     @Override
     public boolean isCapturingToTexture() {
       return false;
diff --git a/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java b/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
index 7a6c37a..ce282f9 100644
--- a/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
@@ -19,8 +19,7 @@
 public class Camera1CapturerUsingTextureTest extends InstrumentationTestCase {
   static final String TAG = "Camera1CapturerUsingTextureTest";
 
-  private class TestObjectFactory
-      extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+  private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
     @Override
     public CameraEnumerator getCameraEnumerator() {
       return new Camera1Enumerator();
diff --git a/webrtc/api/androidtests/src/org/webrtc/Camera2CapturerTest.java b/webrtc/api/androidtests/src/org/webrtc/Camera2CapturerTest.java
index 0326e54..5f5d95e 100644
--- a/webrtc/api/androidtests/src/org/webrtc/Camera2CapturerTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/Camera2CapturerTest.java
@@ -143,8 +143,7 @@
     }
   }
 
-  private class TestObjectFactory
-      extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+  private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
     @Override
     public CameraEnumerator getCameraEnumerator() {
       return new Camera2Enumerator(getAppContext());
diff --git a/webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java b/webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
index cbab509..9d424bb 100644
--- a/webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
+++ b/webrtc/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
@@ -114,8 +114,8 @@
     }
 
     @Override
-    public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
-        long timeStamp) {
+    public void onByteBufferFrameCaptured(
+        byte[] frame, int width, int height, int rotation, long timeStamp) {
       synchronized (frameLock) {
         ++framesCaptured;
         frameSize = frame.length;
@@ -126,9 +126,8 @@
       }
     }
     @Override
-    public void onTextureFrameCaptured(
-        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
-        long timeStamp) {
+    public void onTextureFrameCaptured(int width, int height, int oesTextureId,
+        float[] transformMatrix, int rotation, long timeStamp) {
       synchronized (frameLock) {
         ++framesCaptured;
         frameWidth = width;
@@ -182,8 +181,7 @@
     }
   }
 
-  static class CameraEvents implements
-      CameraVideoCapturer.CameraEventsHandler {
+  static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
     public boolean onCameraOpeningCalled;
     public boolean onFirstFrameAvailableCalled;
     public final Object onCameraFreezedLock = new Object();
@@ -275,8 +273,7 @@
     }
 
     public CameraVideoCapturer createCapturer(
-        String name,
-        CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+        String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
       return cameraEnumerator.createCapturer(name, eventsHandler);
     }
 
@@ -374,8 +371,8 @@
     instance.surfaceTextureHelper.dispose();
   }
 
-  private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer,
-      VideoRenderer.Callbacks rendererCallbacks) {
+  private VideoTrackWithRenderer createVideoTrackWithRenderer(
+      CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) {
     VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
     videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
     capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
@@ -410,9 +407,10 @@
   private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
     final CountDownLatch barrier = new CountDownLatch(1);
     capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
-        @Override public void run() {
-          barrier.countDown();
-        }
+      @Override
+      public void run() {
+        barrier.countDown();
+      }
     });
     barrier.await();
   }
@@ -446,7 +444,7 @@
     fail("Expected illegal argument exception when creating non-existing camera.");
   }
 
-  public void createCapturerAndRender() throws InterruptedException  {
+  public void createCapturerAndRender() throws InterruptedException {
     String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
     createCapturerAndRender(name);
   }
@@ -461,8 +459,8 @@
 
   public void switchCamera() throws InterruptedException {
     if (!testObjectFactory.haveTwoCameras()) {
-      Logging.w(TAG,
-          "Skipping test switch video capturer because the device doesn't have two cameras.");
+      Logging.w(
+          TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
       return;
     }
 
@@ -547,23 +545,23 @@
   public void startStopWithDifferentResolutions() throws InterruptedException {
     final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
 
-    for(int i = 0; i < 3 ; ++i) {
+    for (int i = 0; i < 3; ++i) {
       startCapture(capturerInstance, i);
       assertTrue(capturerInstance.observer.waitForCapturerToStart());
       capturerInstance.observer.waitForNextCapturedFrame();
 
       // Check the frame size. The actual width and height depend on how the capturer is mounted.
-      final boolean identicalResolution = (
-          capturerInstance.observer.frameWidth() == capturerInstance.format.width
-          &&  capturerInstance.observer.frameHeight() == capturerInstance.format.height);
-      final boolean flippedResolution = (
-          capturerInstance.observer.frameWidth() == capturerInstance.format.height
-          && capturerInstance.observer.frameHeight() == capturerInstance.format.width);
+      final boolean identicalResolution =
+          (capturerInstance.observer.frameWidth() == capturerInstance.format.width
+              && capturerInstance.observer.frameHeight() == capturerInstance.format.height);
+      final boolean flippedResolution =
+          (capturerInstance.observer.frameWidth() == capturerInstance.format.height
+              && capturerInstance.observer.frameHeight() == capturerInstance.format.width);
       if (!identicalResolution && !flippedResolution) {
-        fail("Wrong resolution, got: "
-            + capturerInstance.observer.frameWidth() + "x" + capturerInstance.observer.frameHeight()
-            + " expected: "+ capturerInstance.format.width + "x" + capturerInstance.format.height
-            + " or " + capturerInstance.format.height + "x" + capturerInstance.format.width);
+        fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
+            + capturerInstance.observer.frameHeight() + " expected: "
+            + capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
+            + capturerInstance.format.height + "x" + capturerInstance.format.width);
       }
 
       if (testObjectFactory.isCapturingToTexture()) {
@@ -600,8 +598,7 @@
     disposeCapturer(capturerInstance);
   }
 
-  public void returnBufferLateEndToEnd()
-      throws InterruptedException {
+  public void returnBufferLateEndToEnd() throws InterruptedException {
     final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
     final VideoTrackWithRenderer videoTrackWithRenderer =
         createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
@@ -666,7 +663,7 @@
       ++numberOfInspectedFrames;
 
       gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth
-          &&  videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
+          && videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
     } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
 
     disposeCapturer(capturerInstance);
diff --git a/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java b/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java
index 8625ccd..6dc3901 100644
--- a/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -51,9 +51,10 @@
         final int expectedB = expected.get() & 0xFF;
         if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
           fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
-              + "(" +  x + ", " + y + "). Expected color (R,G,B): "
+              + "(" + x + ", " + y + "). Expected color (R,G,B): "
               + "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
-              + " but was: " + "(" + actualR + ", " + actualG + ", " + actualB + ").");
+              + " but was: "
+              + "(" + actualR + ", " + actualG + ", " + actualB + ").");
         }
       }
     }
@@ -92,14 +93,14 @@
     final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
     GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
     GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
-    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
-        HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
+        GLES20.GL_UNSIGNED_BYTE, rgbPlane);
     GlUtil.checkNoGLES2Error("glTexImage2D");
 
     // Draw the RGB frame onto the pixel buffer.
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
-        0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
+    drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
+        0 /* viewportY */, WIDTH, HEIGHT);
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -131,7 +132,7 @@
 
     // Generate 3 texture ids for Y/U/V.
     final int yuvTextures[] = new int[3];
-    for (int i = 0; i < 3; i++)  {
+    for (int i = 0; i < 3; i++) {
       yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
     }
 
@@ -139,15 +140,15 @@
     for (int i = 0; i < 3; ++i) {
       GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
       GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
-      GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH,
-          HEIGHT, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
+      GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
+          GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
       GlUtil.checkNoGLES2Error("glTexImage2D");
     }
 
     // Draw the YUV frame onto the pixel buffer.
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
-        0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
+    drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
+        0 /* viewportY */, WIDTH, HEIGHT);
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -212,8 +213,7 @@
       private final int rgbTexture;
 
       public StubOesTextureProducer(
-          EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width,
-          int height) {
+          EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
         eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
         surfaceTexture.setDefaultBufferSize(width, height);
         eglBase.createSurface(surfaceTexture);
@@ -232,8 +232,8 @@
         // Upload RGB data to texture.
         GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
         GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
-        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
-            HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
+            GLES20.GL_UNSIGNED_BYTE, rgbPlane);
         // Draw the RGB data onto the SurfaceTexture.
         drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
             0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
diff --git a/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
index 2666d01..0514bbc 100644
--- a/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -28,8 +28,7 @@
   @SmallTest
   public static void testInitializeUsingByteBuffer() {
     if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
-      Log.i(TAG,
-            "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
+      Log.i(TAG, "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
       return;
     }
     MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
@@ -46,9 +45,8 @@
     }
     EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
     MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
-    assertTrue(encoder.initEncode(
-        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
-        eglBase.getEglBaseContext()));
+    assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480,
+        300, 30, eglBase.getEglBaseContext()));
     encoder.release();
     eglBase.release();
   }
@@ -61,13 +59,11 @@
     }
     MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
     assertTrue(encoder.initEncode(
-        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
-        null));
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
     encoder.release();
     EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
-    assertTrue(encoder.initEncode(
-        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
-        eglBase.getEglBaseContext()));
+    assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480,
+        300, 30, eglBase.getEglBaseContext()));
     encoder.release();
     eglBase.release();
   }
@@ -137,11 +133,10 @@
 
     MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
 
-    assertTrue(encoder.initEncode(
-        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
-        eglOesBase.getEglBaseContext()));
-    assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
-        presentationTs));
+    assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width,
+        height, 300, 30, eglOesBase.getEglBaseContext()));
+    assertTrue(
+        encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs));
     GlUtil.checkNoGLES2Error("encodeTexture");
 
     // It should be Ok to delete the texture after calling encodeTexture.
diff --git a/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java b/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java
index 7fd6487..9e732c4 100644
--- a/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java
@@ -37,8 +37,7 @@
   /**
    * Listens for alerts fired by the NetworkMonitor when network status changes.
    */
-  private static class NetworkMonitorTestObserver
-      implements NetworkMonitor.NetworkObserver {
+  private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
     private boolean receivedNotification = false;
 
     @Override
@@ -118,7 +117,6 @@
   // A dummy NetworkMonitorAutoDetect.Observer.
   private static class TestNetworkMonitorAutoDetectObserver
       implements NetworkMonitorAutoDetect.Observer {
-
     @Override
     public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
 
@@ -138,7 +136,7 @@
 
   private static Handler getUiThreadHandler() {
     synchronized (lock) {
-      if (uiThreadHandler == null ) {
+      if (uiThreadHandler == null) {
         uiThreadHandler = new Handler(Looper.getMainLooper());
       }
       return uiThreadHandler;
@@ -165,8 +163,7 @@
   }
 
   private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
-    final NetworkMonitorAutoDetect.NetworkState networkState =
-        receiver.getCurrentNetworkState();
+    final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
     return receiver.getConnectionType(networkState);
   }
 
diff --git a/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java b/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java
index 92c03a2..a4cb309 100644
--- a/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java
@@ -45,8 +45,7 @@
 
   private static class ObserverExpectations implements PeerConnection.Observer,
                                                        VideoRenderer.Callbacks,
-                                                       DataChannel.Observer,
-                                                       StatsObserver {
+                                                       DataChannel.Observer, StatsObserver {
     private final String name;
     private int expectedIceCandidates = 0;
     private int expectedErrors = 0;
@@ -54,32 +53,24 @@
     private int expectedWidth = 0;
     private int expectedHeight = 0;
     private int expectedFramesDelivered = 0;
-    private LinkedList<SignalingState> expectedSignalingChanges =
-        new LinkedList<SignalingState>();
+    private LinkedList<SignalingState> expectedSignalingChanges = new LinkedList<SignalingState>();
     private LinkedList<IceConnectionState> expectedIceConnectionChanges =
         new LinkedList<IceConnectionState>();
     private LinkedList<IceGatheringState> expectedIceGatheringChanges =
         new LinkedList<IceGatheringState>();
-    private LinkedList<String> expectedAddStreamLabels =
-        new LinkedList<String>();
-    private LinkedList<String> expectedRemoveStreamLabels =
-        new LinkedList<String>();
-    private final LinkedList<IceCandidate> gotIceCandidates =
-        new LinkedList<IceCandidate>();
+    private LinkedList<String> expectedAddStreamLabels = new LinkedList<String>();
+    private LinkedList<String> expectedRemoveStreamLabels = new LinkedList<String>();
+    private final LinkedList<IceCandidate> gotIceCandidates = new LinkedList<IceCandidate>();
     private Map<MediaStream, WeakReference<VideoRenderer>> renderers =
         new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>();
     private DataChannel dataChannel;
-    private LinkedList<DataChannel.Buffer> expectedBuffers =
-        new LinkedList<DataChannel.Buffer>();
+    private LinkedList<DataChannel.Buffer> expectedBuffers = new LinkedList<DataChannel.Buffer>();
     private LinkedList<DataChannel.State> expectedStateChanges =
         new LinkedList<DataChannel.State>();
-    private LinkedList<String> expectedRemoteDataChannelLabels =
-        new LinkedList<String>();
+    private LinkedList<String> expectedRemoteDataChannelLabels = new LinkedList<String>();
     private int expectedStatsCallbacks = 0;
-    private LinkedList<StatsReport[]> gotStatsReports =
-        new LinkedList<StatsReport[]>();
-    private final HashSet<MediaStream> gotRemoteStreams =
-        new HashSet<MediaStream>();
+    private LinkedList<StatsReport[]> gotStatsReports = new LinkedList<StatsReport[]>();
+    private final HashSet<MediaStream> gotRemoteStreams = new HashSet<MediaStream>();
 
     public ObserverExpectations(String name) {
       this.name = name;
@@ -140,14 +131,12 @@
       assertEquals(expectedSignalingChanges.removeFirst(), newState);
     }
 
-    public synchronized void expectIceConnectionChange(
-        IceConnectionState newState) {
+    public synchronized void expectIceConnectionChange(IceConnectionState newState) {
       expectedIceConnectionChanges.add(newState);
     }
 
     @Override
-    public synchronized void onIceConnectionChange(
-        IceConnectionState newState) {
+    public synchronized void onIceConnectionChange(IceConnectionState newState) {
       // TODO(bemasc): remove once delivery of ICECompleted is reliable
       // (https://code.google.com/p/webrtc/issues/detail?id=3021).
       if (newState.equals(IceConnectionState.COMPLETED)) {
@@ -167,8 +156,7 @@
       System.out.println(name + "Got an ice connection receiving change " + receiving);
     }
 
-    public synchronized void expectIceGatheringChange(
-        IceGatheringState newState) {
+    public synchronized void expectIceGatheringChange(IceGatheringState newState) {
       expectedIceGatheringChanges.add(newState);
     }
 
@@ -198,8 +186,7 @@
       assertEquals("audio", stream.audioTracks.get(0).kind());
       VideoRenderer renderer = createVideoRenderer(this);
       stream.videoTracks.get(0).addRenderer(renderer);
-      assertNull(renderers.put(
-          stream, new WeakReference<VideoRenderer>(renderer)));
+      assertNull(renderers.put(stream, new WeakReference<VideoRenderer>(renderer)));
       gotRemoteStreams.add(stream);
     }
 
@@ -224,8 +211,7 @@
 
     @Override
     public synchronized void onDataChannel(DataChannel remoteDataChannel) {
-      assertEquals(expectedRemoteDataChannelLabels.removeFirst(),
-                   remoteDataChannel.label());
+      assertEquals(expectedRemoteDataChannelLabels.removeFirst(), remoteDataChannel.label());
       setDataChannel(remoteDataChannel);
       assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
     }
@@ -239,10 +225,8 @@
       assertTrue(--expectedRenegotiations >= 0);
     }
 
-    public synchronized void expectMessage(ByteBuffer expectedBuffer,
-                                           boolean expectedBinary) {
-      expectedBuffers.add(
-          new DataChannel.Buffer(expectedBuffer, expectedBinary));
+    public synchronized void expectMessage(ByteBuffer expectedBuffer, boolean expectedBinary) {
+      expectedBuffers.add(new DataChannel.Buffer(expectedBuffer, expectedBinary));
     }
 
     @Override
@@ -288,7 +272,7 @@
     // empty if no such expectations exist.
     public synchronized TreeSet<String> unsatisfiedExpectations() {
       TreeSet<String> stillWaitingForExpectations = new TreeSet<String>();
-      if (expectedIceCandidates > 0) {  // See comment in onIceCandidate.
+      if (expectedIceCandidates > 0) { // See comment in onIceCandidate.
         stillWaitingForExpectations.add("expectedIceCandidates");
       }
       if (expectedErrors != 0) {
@@ -299,12 +283,12 @@
             "expectedSignalingChanges: " + expectedSignalingChanges.size());
       }
       if (expectedIceConnectionChanges.size() != 0) {
-        stillWaitingForExpectations.add("expectedIceConnectionChanges: " +
-                                        expectedIceConnectionChanges.size());
+        stillWaitingForExpectations.add(
+            "expectedIceConnectionChanges: " + expectedIceConnectionChanges.size());
       }
       if (expectedIceGatheringChanges.size() != 0) {
-        stillWaitingForExpectations.add("expectedIceGatheringChanges: " +
-                                        expectedIceGatheringChanges.size());
+        stillWaitingForExpectations.add(
+            "expectedIceGatheringChanges: " + expectedIceGatheringChanges.size());
       }
       if (expectedAddStreamLabels.size() != 0) {
         stillWaitingForExpectations.add(
@@ -315,24 +299,20 @@
             "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
       }
       if (expectedFramesDelivered > 0) {
-        stillWaitingForExpectations.add(
-            "expectedFramesDelivered: " + expectedFramesDelivered);
+        stillWaitingForExpectations.add("expectedFramesDelivered: " + expectedFramesDelivered);
       }
       if (!expectedBuffers.isEmpty()) {
-        stillWaitingForExpectations.add(
-            "expectedBuffers: " + expectedBuffers.size());
+        stillWaitingForExpectations.add("expectedBuffers: " + expectedBuffers.size());
       }
       if (!expectedStateChanges.isEmpty()) {
-        stillWaitingForExpectations.add(
-            "expectedStateChanges: " + expectedStateChanges.size());
+        stillWaitingForExpectations.add("expectedStateChanges: " + expectedStateChanges.size());
       }
       if (!expectedRemoteDataChannelLabels.isEmpty()) {
-        stillWaitingForExpectations.add("expectedRemoteDataChannelLabels: " +
-                                        expectedRemoteDataChannelLabels.size());
+        stillWaitingForExpectations.add(
+            "expectedRemoteDataChannelLabels: " + expectedRemoteDataChannelLabels.size());
       }
       if (expectedStatsCallbacks != 0) {
-        stillWaitingForExpectations.add(
-            "expectedStatsCallbacks: " + expectedStatsCallbacks);
+        stillWaitingForExpectations.add("expectedStatsCallbacks: " + expectedStatsCallbacks);
       }
       return stillWaitingForExpectations;
     }
@@ -352,11 +332,8 @@
       TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
       while (!stillWaitingForExpectations.isEmpty()) {
         if (!stillWaitingForExpectations.equals(prev)) {
-          System.out.println(
-              name + " still waiting at\n    " +
-              (new Throwable()).getStackTrace()[1] +
-              "\n    for: " +
-              Arrays.toString(stillWaitingForExpectations.toArray()));
+          System.out.println(name + " still waiting at\n    " + (new Throwable()).getStackTrace()[1]
+              + "\n    for: " + Arrays.toString(stillWaitingForExpectations.toArray()));
         }
         if (endTime < System.currentTimeMillis()) {
           System.out.println(name + " timed out waiting for: "
@@ -372,8 +349,8 @@
         stillWaitingForExpectations = unsatisfiedExpectations();
       }
       if (prev == null) {
-        System.out.println(name + " didn't need to wait at\n    " +
-                           (new Throwable()).getStackTrace()[1]);
+        System.out.println(
+            name + " didn't need to wait at\n    " + (new Throwable()).getStackTrace()[1]);
       }
       return true;
     }
@@ -464,21 +441,17 @@
 
   static int videoWindowsMapped = -1;
 
-  private static VideoRenderer createVideoRenderer(
-      VideoRenderer.Callbacks videoCallbacks) {
+  private static VideoRenderer createVideoRenderer(VideoRenderer.Callbacks videoCallbacks) {
     return new VideoRenderer(videoCallbacks);
   }
 
   // Return a weak reference to test that ownership is correctly held by
   // PeerConnection, not by test code.
-  private static WeakReference<MediaStream> addTracksToPC(
-      PeerConnectionFactory factory, PeerConnection pc,
-      VideoSource videoSource,
-      String streamLabel, String videoTrackId, String audioTrackId,
-      VideoRenderer.Callbacks videoCallbacks) {
+  private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
+      PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
+      String audioTrackId, VideoRenderer.Callbacks videoCallbacks) {
     MediaStream lMS = factory.createLocalMediaStream(streamLabel);
-    VideoTrack videoTrack =
-        factory.createVideoTrack(videoTrackId, videoSource);
+    VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
     assertNotNull(videoTrack);
     VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks);
     assertNotNull(videoRenderer);
@@ -487,8 +460,8 @@
     // Just for fun, let's remove and re-add the track.
     lMS.removeTrack(videoTrack);
     lMS.addTrack(videoTrack);
-    lMS.addTrack(factory.createAudioTrack(
-        audioTrackId, factory.createAudioSource(new MediaConstraints())));
+    lMS.addTrack(
+        factory.createAudioTrack(audioTrackId, factory.createAudioSource(new MediaConstraints())));
     pc.addStream(lMS);
     return new WeakReference<MediaStream>(lMS);
   }
@@ -497,7 +470,7 @@
   // Call initializeThreadCheck before a test and finalizeThreadCheck after
   // a test.
   void initializeThreadCheck() {
-    System.gc();  // Encourage any GC-related threads to start up.
+    System.gc(); // Encourage any GC-related threads to start up.
     threadsBeforeTest = allThreads();
   }
 
@@ -544,25 +517,20 @@
     //     Logging.Severity.LS_SENSITIVE);
 
     MediaConstraints pcConstraints = new MediaConstraints();
-    pcConstraints.mandatory.add(
-        new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
+    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
 
-    LinkedList<PeerConnection.IceServer> iceServers =
-        new LinkedList<PeerConnection.IceServer>();
-    iceServers.add(new PeerConnection.IceServer(
-        "stun:stun.l.google.com:19302"));
-    iceServers.add(new PeerConnection.IceServer(
-        "turn:fake.example.com", "fakeUsername", "fakePassword"));
-    ObserverExpectations offeringExpectations =
-        new ObserverExpectations("PCTest:offerer");
-    PeerConnection offeringPC = factory.createPeerConnection(
-        iceServers, pcConstraints, offeringExpectations);
+    LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
+    iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
+    iceServers.add(
+        new PeerConnection.IceServer("turn:fake.example.com", "fakeUsername", "fakePassword"));
+    ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+    PeerConnection offeringPC =
+        factory.createPeerConnection(iceServers, pcConstraints, offeringExpectations);
     assertNotNull(offeringPC);
 
-    ObserverExpectations answeringExpectations =
-        new ObserverExpectations("PCTest:answerer");
-    PeerConnection answeringPC = factory.createPeerConnection(
-        iceServers, pcConstraints, answeringExpectations);
+    ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+    PeerConnection answeringPC =
+        factory.createPeerConnection(iceServers, pcConstraints, answeringExpectations);
     assertNotNull(answeringPC);
 
     // We want to use the same camera for offerer & answerer, so create it here
@@ -574,14 +542,12 @@
     videoCapturer.startCapture(640, 480, 30);
 
     offeringExpectations.expectRenegotiationNeeded();
-    WeakReference<MediaStream> oLMS = addTracksToPC(
-        factory, offeringPC, videoSource, "offeredMediaStream",
-        "offeredVideoTrack", "offeredAudioTrack",
-        new ExpectedResolutionSetter(answeringExpectations));
+    WeakReference<MediaStream> oLMS =
+        addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
+            "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
 
     offeringExpectations.expectRenegotiationNeeded();
-    DataChannel offeringDC = offeringPC.createDataChannel(
-        "offeringDC", new DataChannel.Init());
+    DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
     assertEquals("offeringDC", offeringDC.label());
 
     offeringExpectations.setDataChannel(offeringDC);
@@ -593,22 +559,19 @@
     assertFalse(offerSdp.description.isEmpty());
 
     sdpLatch = new SdpObserverLatch();
-    answeringExpectations.expectSignalingChange(
-        SignalingState.HAVE_REMOTE_OFFER);
+    answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
     answeringExpectations.expectAddStream("offeredMediaStream");
     // SCTP DataChannels are announced via OPEN messages over the established
     // connection (not via SDP), so answeringExpectations can only register
     // expecting the channel during ICE, below.
     answeringPC.setRemoteDescription(sdpLatch, offerSdp);
-    assertEquals(
-        PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+    assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
     assertTrue(sdpLatch.await());
     assertNull(sdpLatch.getSdp());
 
     answeringExpectations.expectRenegotiationNeeded();
-    WeakReference<MediaStream> aLMS = addTracksToPC(
-        factory, answeringPC, videoSource, "answeredMediaStream",
-        "answeredVideoTrack", "answeredAudioTrack",
+    WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
+        "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
         new ExpectedResolutionSetter(offeringExpectations));
 
     sdpLatch = new SdpObserverLatch();
@@ -639,19 +602,15 @@
     offeringExpectations.expectSignalingChange(SignalingState.STABLE);
     offeringExpectations.expectAddStream("answeredMediaStream");
 
-    offeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CHECKING);
-    offeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CONNECTED);
+    offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+    offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
     // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
     // (https://code.google.com/p/webrtc/issues/detail?id=3021).
     //
     // offeringExpectations.expectIceConnectionChange(
     //     IceConnectionState.COMPLETED);
-    answeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CHECKING);
-    answeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CONNECTED);
+    answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+    answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
 
     offeringPC.setRemoteDescription(sdpLatch, answerSdp);
     assertTrue(sdpLatch.await());
@@ -667,7 +626,6 @@
     assertEquals(answeringPC.getSenders().size(), 2);
     assertEquals(answeringPC.getReceivers().size(), 2);
 
-
     // Wait for at least some frames to be delivered at each end (number
     // chosen arbitrarily).
     offeringExpectations.expectFramesDelivered(10);
@@ -693,10 +651,8 @@
     assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
     assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
 
-    assertEquals(
-        PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
-    assertEquals(
-        PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+    assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+    assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
 
     // Set a bitrate limit for the outgoing video stream for the offerer.
     RtpSender videoSender = null;
@@ -721,8 +677,8 @@
     // Test send & receive UTF-8 text.
     answeringExpectations.expectMessage(
         ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
-    DataChannel.Buffer buffer = new DataChannel.Buffer(
-        ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+    DataChannel.Buffer buffer =
+        new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
     assertTrue(offeringExpectations.dataChannel.send(buffer));
     assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
 
@@ -735,8 +691,7 @@
     expectedBinaryMessage.flip();
     offeringExpectations.expectMessage(expectedBinaryMessage, true);
     assertTrue(answeringExpectations.dataChannel.send(
-        new DataChannel.Buffer(
-            ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }), true)));
+        new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
     assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
 
     offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
@@ -768,24 +723,19 @@
     PeerConnectionFactory factory = new PeerConnectionFactory(options);
 
     MediaConstraints pcConstraints = new MediaConstraints();
-    pcConstraints.mandatory.add(
-        new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
+    pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
 
-    LinkedList<PeerConnection.IceServer> iceServers =
-        new LinkedList<PeerConnection.IceServer>();
-    iceServers.add(new PeerConnection.IceServer(
-        "stun:stun.l.google.com:19302"));
+    LinkedList<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
+    iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
 
-    ObserverExpectations offeringExpectations =
-        new ObserverExpectations("PCTest:offerer");
-    PeerConnection offeringPC = factory.createPeerConnection(
-        iceServers, pcConstraints, offeringExpectations);
+    ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+    PeerConnection offeringPC =
+        factory.createPeerConnection(iceServers, pcConstraints, offeringExpectations);
     assertNotNull(offeringPC);
 
-    ObserverExpectations answeringExpectations =
-        new ObserverExpectations("PCTest:answerer");
-    PeerConnection answeringPC = factory.createPeerConnection(
-        iceServers, pcConstraints, answeringExpectations);
+    ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+    PeerConnection answeringPC =
+        factory.createPeerConnection(iceServers, pcConstraints, answeringExpectations);
     assertNotNull(answeringPC);
 
     // We want to use the same camera for offerer & answerer, so create it here
@@ -798,10 +748,9 @@
 
     // Add offerer media stream.
     offeringExpectations.expectRenegotiationNeeded();
-    WeakReference<MediaStream> oLMS = addTracksToPC(
-        factory, offeringPC, videoSource, "offeredMediaStream",
-        "offeredVideoTrack", "offeredAudioTrack",
-        new ExpectedResolutionSetter(answeringExpectations));
+    WeakReference<MediaStream> oLMS =
+        addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
+            "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
 
     // Create offer.
     SdpObserverLatch sdpLatch = new SdpObserverLatch();
@@ -822,8 +771,7 @@
 
     // Set remote description for answerer.
     sdpLatch = new SdpObserverLatch();
-    answeringExpectations.expectSignalingChange(
-        SignalingState.HAVE_REMOTE_OFFER);
+    answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
     answeringExpectations.expectAddStream("offeredMediaStream");
     answeringPC.setRemoteDescription(sdpLatch, offerSdp);
     assertTrue(sdpLatch.await());
@@ -831,9 +779,8 @@
 
     // Add answerer media stream.
     answeringExpectations.expectRenegotiationNeeded();
-    WeakReference<MediaStream> aLMS = addTracksToPC(
-        factory, answeringPC, videoSource, "answeredMediaStream",
-        "answeredVideoTrack", "answeredAudioTrack",
+    WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
+        "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
         new ExpectedResolutionSetter(offeringExpectations));
 
     // Create answer.
@@ -858,19 +805,15 @@
     offeringExpectations.expectSignalingChange(SignalingState.STABLE);
     offeringExpectations.expectAddStream("answeredMediaStream");
 
-    offeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CHECKING);
-    offeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CONNECTED);
+    offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+    offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
     // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
     // (https://code.google.com/p/webrtc/issues/detail?id=3021).
     //
     // offeringExpectations.expectIceConnectionChange(
     //     IceConnectionState.COMPLETED);
-    answeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CHECKING);
-    answeringExpectations.expectIceConnectionChange(
-        IceConnectionState.CONNECTED);
+    answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+    answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
 
     offeringPC.setRemoteDescription(sdpLatch, answerSdp);
     assertTrue(sdpLatch.await());
@@ -897,10 +840,8 @@
     assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
     assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
 
-    assertEquals(
-        PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
-    assertEquals(
-        PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+    assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+    assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
 
     // Now do another negotiation, removing the video track from one peer.
     // This previously caused a crash on pc.dispose().
@@ -927,8 +868,7 @@
 
     // Set remote description for answerer.
     sdpLatch = new SdpObserverLatch();
-    answeringExpectations.expectSignalingChange(
-        SignalingState.HAVE_REMOTE_OFFER);
+    answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
     answeringPC.setRemoteDescription(sdpLatch, offerSdp);
     assertTrue(sdpLatch.await());
     assertNull(sdpLatch.getSdp());
@@ -1041,8 +981,7 @@
     assertTrue(info.samples.size() > 0);
   }
 
-  private static void shutdownPC(
-      PeerConnection pc, ObserverExpectations expectations) {
+  private static void shutdownPC(PeerConnection pc, ObserverExpectations expectations) {
     if (expectations.dataChannel != null) {
       expectations.dataChannel.unregisterObserver();
       expectations.dataChannel.dispose();
diff --git a/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java b/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java
index cff3a8c..3aaf746 100644
--- a/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java
@@ -95,11 +95,13 @@
     // Assert:
     // u' = u.
     // v' = v.
+    // clang-format off
     MoreAsserts.assertEquals(new double[] {
         1, 0, 0, 0,
         0, 1, 0, 0,
         0, 0, 1, 0,
         0, 0, 0, 1}, round(layoutMatrix));
+    // clang-format on
   }
 
   @SmallTest
@@ -108,11 +110,13 @@
     // Assert:
     // u' = 1 - u.
     // v' = v.
+    // clang-format off
     MoreAsserts.assertEquals(new double[] {
         -1, 0, 0, 0,
          0, 1, 0, 0,
          0, 0, 1, 0,
          1, 0, 0, 1}, round(layoutMatrix));
+    // clang-format on
   }
 
   @SmallTest
@@ -123,22 +127,26 @@
     // Assert:
     // u' = 0.25 + 0.5 u.
     // v' = v.
+    // clang-format off
     MoreAsserts.assertEquals(new double[] {
          0.5, 0, 0, 0,
            0, 1, 0, 0,
            0, 0, 1, 0,
         0.25, 0, 0, 1}, round(layoutMatrix));
+    // clang-format on
   }
 
   @SmallTest
   public static void testRotateTextureMatrixDefault() {
     // Test that rotation with 0 degrees returns an identical matrix.
+    // clang-format off
     final float[] matrix = new float[] {
         1, 2, 3, 4,
         5, 6, 7, 8,
         9, 0, 1, 2,
         3, 4, 5, 6
     };
+    // clang-format on
     final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
     MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix));
   }
@@ -149,11 +157,13 @@
     // Assert:
     // u' = 1 - v.
     // v' = u.
+    // clang-format off
     MoreAsserts.assertEquals(new double[] {
          0, 1, 0, 0,
         -1, 0, 0, 0,
          0, 0, 1, 0,
          1, 0, 0, 1}, round(samplingMatrix));
+    // clang-format on
   }
 
   @SmallTest
@@ -162,10 +172,12 @@
     // Assert:
     // u' = 1 - u.
     // v' = 1 - v.
+    // clang-format off
     MoreAsserts.assertEquals(new double[] {
         -1,  0, 0, 0,
          0, -1, 0, 0,
          0,  0, 1, 0,
          1,  1, 0, 1}, round(samplingMatrix));
+    // clang-format on
   }
 }
diff --git a/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
index fc66386..ee29da8 100644
--- a/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -110,8 +110,7 @@
 
     // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
     // |surfaceTextureHelper| as the target EGLSurface.
-    final EglBase eglOesBase =
-        EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+    final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
     eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
     assertEquals(eglOesBase.surfaceWidth(), width);
     assertEquals(eglOesBase.surfaceHeight(), height);
@@ -131,8 +130,8 @@
       // Wait for an OES texture to arrive and draw it onto the pixel buffer.
       listener.waitForNewFrame();
       eglBase.makeCurrent();
-      drawer.drawOes(listener.oesTextureId, listener.transformMatrix, width, height,
-          0, 0, width, height);
+      drawer.drawOes(
+          listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
 
       surfaceTextureHelper.returnTextureFrame();
 
@@ -178,8 +177,7 @@
 
     // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
     // |surfaceTextureHelper| as the target EGLSurface.
-    final EglBase eglOesBase =
-        EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+    final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
     eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
     assertEquals(eglOesBase.surfaceWidth(), width);
     assertEquals(eglOesBase.surfaceHeight(), height);
@@ -203,8 +201,8 @@
     // Draw the pending texture frame onto the pixel buffer.
     eglBase.makeCurrent();
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawOes(listener.oesTextureId, listener.transformMatrix, width, height,
-        0, 0, width, height);
+    drawer.drawOes(
+        listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
     drawer.release();
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
@@ -231,8 +229,8 @@
   @MediumTest
   public static void testDispose() throws InterruptedException {
     // Create SurfaceTextureHelper and listener.
-    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
-        "SurfaceTextureHelper test" /* threadName */, null);
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
     final MockTextureListener listener = new MockTextureListener();
     surfaceTextureHelper.startListening(listener);
     // Create EglBase with the SurfaceTexture as target EGLSurface.
@@ -267,8 +265,8 @@
    */
   @SmallTest
   public static void testDisposeImmediately() {
-    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
-        "SurfaceTextureHelper test" /* threadName */, null);
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
     surfaceTextureHelper.dispose();
   }
 
@@ -279,8 +277,8 @@
   @MediumTest
   public static void testStopListening() throws InterruptedException {
     // Create SurfaceTextureHelper and listener.
-    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
-        "SurfaceTextureHelper test" /* threadName */, null);
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
     final MockTextureListener listener = new MockTextureListener();
     surfaceTextureHelper.startListening(listener);
     // Create EglBase with the SurfaceTexture as target EGLSurface.
@@ -315,8 +313,8 @@
    */
   @SmallTest
   public static void testStopListeningImmediately() throws InterruptedException {
-    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
-        "SurfaceTextureHelper test" /* threadName */, null);
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
     final MockTextureListener listener = new MockTextureListener();
     surfaceTextureHelper.startListening(listener);
     surfaceTextureHelper.stopListening();
@@ -329,8 +327,8 @@
    */
   @SmallTest
   public static void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
-    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
-        "SurfaceTextureHelper test" /* threadName */, null);
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
     final MockTextureListener listener = new MockTextureListener();
 
     final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
@@ -353,7 +351,8 @@
     // Wait until handler thread is idle to try to catch late startListening() call.
     final CountDownLatch barrier = new CountDownLatch(1);
     surfaceTextureHelper.getHandler().post(new Runnable() {
-      @Override public void run() {
+      @Override
+      public void run() {
         barrier.countDown();
       }
     });
@@ -371,8 +370,8 @@
   @MediumTest
   public static void testRestartListeningWithNewListener() throws InterruptedException {
     // Create SurfaceTextureHelper and listener.
-    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
-        "SurfaceTextureHelper test" /* threadName */, null);
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
     final MockTextureListener listener1 = new MockTextureListener();
     surfaceTextureHelper.startListening(listener1);
     // Create EglBase with the SurfaceTexture as target EGLSurface.
@@ -467,17 +466,17 @@
       //    368 UUUUUUUU VVVVVVVV
       //    384 buffer end
       ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
-      surfaceTextureHelper.textureToYUV(buffer, width, height, width,
-          listener.oesTextureId, listener.transformMatrix);
+      surfaceTextureHelper.textureToYUV(
+          buffer, width, height, width, listener.oesTextureId, listener.transformMatrix);
 
       surfaceTextureHelper.returnTextureFrame();
 
       // Allow off-by-one differences due to different rounding.
-      while (buffer.position() < width*height) {
+      while (buffer.position() < width * height) {
         assertClose(1, buffer.get() & 0xff, ref_y[i]);
       }
       while (buffer.hasRemaining()) {
-        if (buffer.position() % width < width/2)
+        if (buffer.position() % width < width / 2)
           assertClose(1, buffer.get() & 0xff, ref_u[i]);
         else
           assertClose(1, buffer.get() & 0xff, ref_v[i]);
diff --git a/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
index 60720ff..300cdf8 100644
--- a/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
+++ b/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -24,8 +24,7 @@
    * List with all possible scaling types.
    */
   private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
-      RendererCommon.ScalingType.SCALE_ASPECT_FIT,
-      RendererCommon.ScalingType.SCALE_ASPECT_FILL,
+      RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
       RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
 
   /**
@@ -50,22 +49,19 @@
   /**
    * Assert onMeasure() with given parameters will result in expected measured size.
    */
-  private static void assertMeasuredSize(
-      SurfaceViewRenderer surfaceViewRenderer, RendererCommon.ScalingType scalingType,
-      String frameDimensions,
-      int expectedWidth, int expectedHeight,
-      int widthSpec, int heightSpec) {
+  private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
+      RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
+      int expectedHeight, int widthSpec, int heightSpec) {
     surfaceViewRenderer.setScalingType(scalingType);
     surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
     final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
     final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
     if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
-      fail("onMeasure("
-          + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec) + ")"
-          + " with scaling type " + scalingType
-          + " and frame: " + frameDimensions
-          + " expected measured size " + expectedWidth + "x" + expectedHeight
-          + ", but was " + measuredWidth + "x" + measuredHeight);
+      fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
+          + ")"
+          + " with scaling type " + scalingType + " and frame: " + frameDimensions
+          + " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
+          + measuredWidth + "x" + measuredHeight);
     }
   }
 
@@ -82,24 +78,22 @@
     for (RendererCommon.ScalingType scalingType : scalingTypes) {
       for (int measureSpecMode : measureSpecModes) {
         final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
-        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-            0, 0, zeroMeasureSize, zeroMeasureSize);
-        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-            1280, 720,
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
+            zeroMeasureSize);
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
             MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
             MeasureSpec.makeMeasureSpec(720, measureSpecMode));
       }
     }
 
-   // Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
+    // Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
     surfaceViewRenderer.init((EglBase.Context) null, null);
     for (RendererCommon.ScalingType scalingType : scalingTypes) {
       for (int measureSpecMode : measureSpecModes) {
         final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
-        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-            0, 0, zeroMeasureSize, zeroMeasureSize);
-        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-            1280, 720,
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
+            zeroMeasureSize);
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
             MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
             MeasureSpec.makeMeasureSpec(720, measureSpecMode));
       }
@@ -163,25 +157,23 @@
       for (RendererCommon.ScalingType scalingType : scalingTypes) {
         for (int measureSpecMode : measureSpecModes) {
           final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
-          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-              0, 0, zeroMeasureSize, zeroMeasureSize);
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
+              zeroMeasureSize, zeroMeasureSize);
         }
       }
 
       // Test perfect fit.
       for (RendererCommon.ScalingType scalingType : scalingTypes) {
         for (int measureSpecMode : measureSpecModes) {
-          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-              rotatedWidth, rotatedHeight,
-              MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
+              rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
               MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
         }
       }
 
       // Force spec size with different aspect ratio than frame aspect ratio.
       for (RendererCommon.ScalingType scalingType : scalingTypes) {
-        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-            720, 1280,
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
             MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
             MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
       }
@@ -194,8 +186,8 @@
         for (RendererCommon.ScalingType scalingType : scalingTypes) {
           final Point expectedSize =
               RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
-          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-                  expectedSize.x, expectedSize.y, widthSpec, heightSpec);
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
+              expectedSize.y, widthSpec, heightSpec);
         }
       }
       {
@@ -206,8 +198,8 @@
         for (RendererCommon.ScalingType scalingType : scalingTypes) {
           final Point expectedSize =
               RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
-          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-                  expectedSize.x, expectedSize.y, widthSpec, heightSpec);
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
+              expectedSize.y, widthSpec, heightSpec);
         }
       }
       {
@@ -215,8 +207,8 @@
         final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
         final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
         for (RendererCommon.ScalingType scalingType : scalingTypes) {
-          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
-                  720, 1280, widthSpec, heightSpec);
+          assertMeasuredSize(
+              surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
         }
       }
     }
diff --git a/webrtc/base/java/src/org/webrtc/Logging.java b/webrtc/base/java/src/org/webrtc/Logging.java
index 7d073b3..736dcf0 100644
--- a/webrtc/base/java/src/org/webrtc/Logging.java
+++ b/webrtc/base/java/src/org/webrtc/Logging.java
@@ -57,12 +57,10 @@
     TraceLevel(int level) {
       this.level = level;
     }
-  };
+  }
 
   // Keep in sync with webrtc/base/logging.h:LoggingSeverity.
-  public enum Severity {
-    LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE
-  };
+  public enum Severity { LS_SENSITIVE, LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
 
   public static void enableLogThreads() {
     if (!nativeLibLoaded) {
@@ -74,8 +72,8 @@
 
   public static void enableLogTimeStamps() {
     if (!nativeLibLoaded) {
-      fallbackLogger.log(Level.WARNING,
-                         "Cannot enable log timestamps because native lib not loaded.");
+      fallbackLogger.log(
+          Level.WARNING, "Cannot enable log timestamps because native lib not loaded.");
       return;
     }
     nativeEnableLogTimeStamps();
@@ -84,8 +82,7 @@
   // Enable tracing to |path| of messages of |levels|.
   // On Android, use "logcat:" for |path| to send output there.
   // Note: this function controls the output of the WEBRTC_TRACE() macros.
-  public static synchronized void enableTracing(
-      String path, EnumSet<TraceLevel> levels) {
+  public static synchronized void enableTracing(String path, EnumSet<TraceLevel> levels) {
     if (!nativeLibLoaded) {
       fallbackLogger.log(Level.WARNING, "Cannot enable tracing because native lib not loaded.");
       return;
@@ -178,8 +175,7 @@
     return sw.toString();
   }
 
-  private static native void nativeEnableTracing(
-      String path, int nativeLevels);
+  private static native void nativeEnableTracing(String path, int nativeLevels);
   private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
   private static native void nativeEnableLogThreads();
   private static native void nativeEnableLogTimeStamps();
diff --git a/webrtc/base/java/src/org/webrtc/ThreadUtils.java b/webrtc/base/java/src/org/webrtc/ThreadUtils.java
index b7ec8fb..4337fd0 100644
--- a/webrtc/base/java/src/org/webrtc/ThreadUtils.java
+++ b/webrtc/base/java/src/org/webrtc/ThreadUtils.java
@@ -42,9 +42,7 @@
    * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
    * to complete without getting interrupted..
    */
-  public interface BlockingOperation {
-    void run() throws InterruptedException;
-  }
+  public interface BlockingOperation { void run() throws InterruptedException; }
 
   /**
    * Utility method to make sure a blocking operation is executed to completion without getting
@@ -168,7 +166,8 @@
     final CaughtException caughtException = new CaughtException();
     final CountDownLatch barrier = new CountDownLatch(1);
     handler.post(new Runnable() {
-      @Override public void run() {
+      @Override
+      public void run() {
         try {
           result.value = callable.call();
         } catch (Exception e) {
@@ -182,9 +181,8 @@
     // stack trace for the waiting thread as well.
     if (caughtException.e != null) {
       final RuntimeException runtimeException = new RuntimeException(caughtException.e);
-      runtimeException.setStackTrace(concatStackTraces(
-          caughtException.e.getStackTrace(),
-          runtimeException.getStackTrace()));
+      runtimeException.setStackTrace(
+          concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
       throw runtimeException;
     }
     return result.value;
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
index d2a248b..460ada1 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java
@@ -84,10 +84,8 @@
 
     // The proximity sensor should only be activated when there are exactly two
     // available audio devices.
-    if (audioDevices.size() == 2
-        && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
-        && audioDevices.contains(
-            AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
+    if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
+        && audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
       if (proximitySensor.sensorReportsNearState()) {
         // Sensor reports that a "handset is being held up to a person's ear",
         // or "something is covering the light sensor".
@@ -101,17 +99,14 @@
   }
 
   /** Construction */
-  static AppRTCAudioManager create(Context context,
-      Runnable deviceStateChangeListener) {
+  static AppRTCAudioManager create(Context context, Runnable deviceStateChangeListener) {
     return new AppRTCAudioManager(context, deviceStateChangeListener);
   }
 
-  private AppRTCAudioManager(Context context,
-      Runnable deviceStateChangeListener) {
+  private AppRTCAudioManager(Context context, Runnable deviceStateChangeListener) {
     apprtcContext = context;
     onStateChangeListener = deviceStateChangeListener;
-    audioManager = ((AudioManager) context.getSystemService(
-        Context.AUDIO_SERVICE));
+    audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
 
     SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
     useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
@@ -149,8 +144,8 @@
     savedIsMicrophoneMute = audioManager.isMicrophoneMute();
 
     // Request audio focus before making any device switch.
-    audioManager.requestAudioFocus(null, AudioManager.STREAM_VOICE_CALL,
-        AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
+    audioManager.requestAudioFocus(
+        null, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
 
     // Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
     // required to be in this mode when playout and/or recording starts for
@@ -250,13 +245,11 @@
         int state = intent.getIntExtra("state", STATE_UNPLUGGED);
         int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
         String name = intent.getStringExtra("name");
-        Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo()
-            + ": "
-            + "a=" + intent.getAction()
-            + ", s=" + (state == STATE_UNPLUGGED ? "unplugged" : "plugged")
-            + ", m=" + (microphone == HAS_MIC ? "mic" : "no mic")
-            + ", n=" + name
-            + ", sb=" + isInitialStickyBroadcast());
+        Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
+                + "a=" + intent.getAction() + ", s="
+                + (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
+                + (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
+                + isInitialStickyBroadcast());
 
         boolean hasWiredHeadset = (state == STATE_PLUGGED);
         switch (state) {
@@ -304,8 +297,7 @@
 
   /** Gets the current earpiece state. */
   private boolean hasEarpiece() {
-    return apprtcContext.getPackageManager().hasSystemFeature(
-        PackageManager.FEATURE_TELEPHONY);
+    return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
   }
 
   /**
@@ -331,7 +323,7 @@
       // No wired headset, hence the audio-device list can contain speaker
       // phone (on a tablet), or speaker phone and earpiece (on mobile phone).
       audioDevices.add(AudioDevice.SPEAKER_PHONE);
-      if (hasEarpiece())  {
+      if (hasEarpiece()) {
         audioDevices.add(AudioDevice.EARPIECE);
       }
     }
@@ -347,8 +339,8 @@
 
   /** Called each time a new audio device has been added or removed. */
   private void onAudioManagerChangedState() {
-    Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices
-        + ", selected=" + selectedAudioDevice);
+    Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices + ", selected="
+            + selectedAudioDevice);
 
     // Enable the proximity sensor if there are two available audio devices
     // in the list. Given the current implementation, we know that the choice
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
index 0b16db8..72a449b 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
@@ -20,7 +20,6 @@
  * AppRTCClient is the interface representing an AppRTC client.
  */
 public interface AppRTCClient {
-
   /**
    * Struct holding the connection parameters of an AppRTC room.
    */
@@ -28,8 +27,7 @@
     public final String roomUrl;
     public final String roomId;
     public final boolean loopback;
-    public RoomConnectionParameters(
-        String roomUrl, String roomId, boolean loopback) {
+    public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
       this.roomUrl = roomUrl;
       this.roomId = roomId;
       this.loopback = loopback;
@@ -80,11 +78,9 @@
     public final SessionDescription offerSdp;
     public final List<IceCandidate> iceCandidates;
 
-    public SignalingParameters(
-        List<PeerConnection.IceServer> iceServers,
-        boolean initiator, String clientId,
-        String wssUrl, String wssPostUrl,
-        SessionDescription offerSdp, List<IceCandidate> iceCandidates) {
+    public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
+        String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
+        List<IceCandidate> iceCandidates) {
       this.iceServers = iceServers;
       this.initiator = initiator;
       this.clientId = clientId;
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
index ebbb066..57f0c78 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
@@ -45,16 +45,14 @@
   private boolean lastStateReportIsNear = false;
 
   /** Construction */
-  static AppRTCProximitySensor create(Context context,
-      Runnable sensorStateListener) {
+  static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
     return new AppRTCProximitySensor(context, sensorStateListener);
   }
 
   private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
     Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
     onSensorStateListener = sensorStateListener;
-    sensorManager = ((SensorManager) context.getSystemService(
-        Context.SENSOR_SERVICE));
+    sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
   }
 
   /**
@@ -68,8 +66,7 @@
       // Proximity sensor is not supported on this device.
       return false;
     }
-    sensorManager.registerListener(
-        this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
+    sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
     return true;
   }
 
@@ -120,8 +117,8 @@
     }
 
     Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
-        + "accuracy=" + event.accuracy
-        + ", timestamp=" + event.timestamp + ", distance=" + event.values[0]);
+            + "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
+            + event.values[0]);
   }
 
   /**
@@ -168,5 +165,4 @@
     }
     Log.d(TAG, info.toString());
   }
-
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
index 3763c9e..8fdcc39 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
@@ -42,68 +42,41 @@
  * Activity for peer connection call setup, call waiting
  * and call view.
  */
-public class CallActivity extends Activity
-    implements AppRTCClient.SignalingEvents,
-      PeerConnectionClient.PeerConnectionEvents,
-      CallFragment.OnCallEvents {
-
-  public static final String EXTRA_ROOMID =
-      "org.appspot.apprtc.ROOMID";
-  public static final String EXTRA_LOOPBACK =
-      "org.appspot.apprtc.LOOPBACK";
-  public static final String EXTRA_VIDEO_CALL =
-      "org.appspot.apprtc.VIDEO_CALL";
-  public static final String EXTRA_CAMERA2 =
-      "org.appspot.apprtc.CAMERA2";
-  public static final String EXTRA_VIDEO_WIDTH =
-      "org.appspot.apprtc.VIDEO_WIDTH";
-  public static final String EXTRA_VIDEO_HEIGHT =
-      "org.appspot.apprtc.VIDEO_HEIGHT";
-  public static final String EXTRA_VIDEO_FPS =
-      "org.appspot.apprtc.VIDEO_FPS";
+public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
+                                                      PeerConnectionClient.PeerConnectionEvents,
+                                                      CallFragment.OnCallEvents {
+  public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
+  public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
+  public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
+  public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
+  public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
+  public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
+  public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
   public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
       "org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
-  public static final String EXTRA_VIDEO_BITRATE =
-      "org.appspot.apprtc.VIDEO_BITRATE";
-  public static final String EXTRA_VIDEOCODEC =
-      "org.appspot.apprtc.VIDEOCODEC";
-  public static final String EXTRA_HWCODEC_ENABLED =
-      "org.appspot.apprtc.HWCODEC";
-  public static final String EXTRA_CAPTURETOTEXTURE_ENABLED =
-      "org.appspot.apprtc.CAPTURETOTEXTURE";
-  public static final String EXTRA_AUDIO_BITRATE =
-      "org.appspot.apprtc.AUDIO_BITRATE";
-  public static final String EXTRA_AUDIOCODEC =
-      "org.appspot.apprtc.AUDIOCODEC";
+  public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
+  public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
+  public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
+  public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
+  public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
+  public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
   public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
       "org.appspot.apprtc.NOAUDIOPROCESSING";
-  public static final String EXTRA_AECDUMP_ENABLED =
-      "org.appspot.apprtc.AECDUMP";
-  public static final String EXTRA_OPENSLES_ENABLED =
-      "org.appspot.apprtc.OPENSLES";
-  public static final String EXTRA_DISABLE_BUILT_IN_AEC =
-      "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
-  public static final String EXTRA_DISABLE_BUILT_IN_AGC =
-      "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
-  public static final String EXTRA_DISABLE_BUILT_IN_NS =
-      "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
-    public static final String EXTRA_ENABLE_LEVEL_CONTROL =
-      "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
-  public static final String EXTRA_DISPLAY_HUD =
-      "org.appspot.apprtc.DISPLAY_HUD";
+  public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
+  public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
+  public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
+  public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
+  public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
+  public static final String EXTRA_ENABLE_LEVEL_CONTROL = "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
+  public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
   public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
-  public static final String EXTRA_CMDLINE =
-      "org.appspot.apprtc.CMDLINE";
-  public static final String EXTRA_RUNTIME =
-      "org.appspot.apprtc.RUNTIME";
+  public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
+  public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
   private static final String TAG = "CallRTCClient";
 
   // List of mandatory application permissions.
-  private static final String[] MANDATORY_PERMISSIONS = {
-    "android.permission.MODIFY_AUDIO_SETTINGS",
-    "android.permission.RECORD_AUDIO",
-    "android.permission.INTERNET"
-  };
+  private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
+      "android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
 
   // Peer connection statistics callback period in ms.
   private static final int STAT_CALLBACK_PERIOD = 1000;
@@ -152,22 +125,16 @@
   @Override
   public void onCreate(Bundle savedInstanceState) {
     super.onCreate(savedInstanceState);
-    Thread.setDefaultUncaughtExceptionHandler(
-        new UnhandledExceptionHandler(this));
+    Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
 
     // Set window styles for fullscreen-window size. Needs to be done before
     // adding content.
     requestWindowFeature(Window.FEATURE_NO_TITLE);
-    getWindow().addFlags(
-        LayoutParams.FLAG_FULLSCREEN
-        | LayoutParams.FLAG_KEEP_SCREEN_ON
-        | LayoutParams.FLAG_DISMISS_KEYGUARD
-        | LayoutParams.FLAG_SHOW_WHEN_LOCKED
+    getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
+        | LayoutParams.FLAG_DISMISS_KEYGUARD | LayoutParams.FLAG_SHOW_WHEN_LOCKED
         | LayoutParams.FLAG_TURN_SCREEN_ON);
-    getWindow().getDecorView().setSystemUiVisibility(
-        View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
-        | View.SYSTEM_UI_FLAG_FULLSCREEN
-        | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
+    getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
+        | View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
     setContentView(R.layout.activity_call);
 
     iceConnected = false;
@@ -232,30 +199,24 @@
     boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
     boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
 
-    boolean useCamera2 = Camera2Enumerator.isSupported(this)
-        && intent.getBooleanExtra(EXTRA_CAMERA2, true);
+    boolean useCamera2 =
+        Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
 
-    peerConnectionParameters = new PeerConnectionParameters(
-        intent.getBooleanExtra(EXTRA_VIDEO_CALL, true),
-        loopback,
-        tracing,
-        useCamera2,
-        intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
-        intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
-        intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
-        intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
-        intent.getStringExtra(EXTRA_VIDEOCODEC),
-        intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
-        intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
-        intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0),
-        intent.getStringExtra(EXTRA_AUDIOCODEC),
-        intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
-        intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
-        intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
-        intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
-        intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
-        intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
-        intent.getBooleanExtra(EXTRA_ENABLE_LEVEL_CONTROL, false));
+    peerConnectionParameters =
+        new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
+            tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
+            intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
+            intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
+            intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
+            intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
+            intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
+            intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
+            intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
+            intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
+            intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
+            intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
+            intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
+            intent.getBooleanExtra(EXTRA_ENABLE_LEVEL_CONTROL, false));
     commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
     runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
 
@@ -268,8 +229,7 @@
       appRtcClient = new DirectRTCClient(this);
     }
     // Create connection parameters.
-    roomConnectionParameters = new RoomConnectionParameters(
-        roomUri.toString(), roomId, loopback);
+    roomConnectionParameters = new RoomConnectionParameters(roomUri.toString(), roomId, loopback);
 
     // Create CPU monitor
     cpuMonitor = new CpuMonitor(this);
@@ -419,21 +379,19 @@
     callStartedTimeMs = System.currentTimeMillis();
 
     // Start room connection.
-    logAndToast(getString(R.string.connecting_to,
-        roomConnectionParameters.roomUrl));
+    logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
     appRtcClient.connectToRoom(roomConnectionParameters);
 
     // Create and audio manager that will take care of audio routing,
     // audio modes, audio device enumeration etc.
     audioManager = AppRTCAudioManager.create(this, new Runnable() {
-        // This method will be called each time the audio state (number and
-        // type of devices) has been changed.
-        @Override
-        public void run() {
-          onAudioManagerChangedState();
-        }
+      // This method will be called each time the audio state (number and
+      // type of devices) has been changed.
+      @Override
+      public void run() {
+        onAudioManagerChangedState();
       }
-    );
+    });
     // Store existing audio settings and change audio mode to
     // MODE_IN_COMMUNICATION for best possible VoIP performance.
     Log.d(TAG, "Initializing the audio manager...");
@@ -499,13 +457,16 @@
           .setTitle(getText(R.string.channel_error_title))
           .setMessage(errorMessage)
           .setCancelable(false)
-          .setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
-            @Override
-            public void onClick(DialogInterface dialog, int id) {
-              dialog.cancel();
-              disconnect();
-            }
-          }).create().show();
+          .setNeutralButton(R.string.ok,
+              new DialogInterface.OnClickListener() {
+                @Override
+                public void onClick(DialogInterface dialog, int id) {
+                  dialog.cancel();
+                  disconnect();
+                }
+              })
+          .create()
+          .show();
     }
   }
 
@@ -539,8 +500,8 @@
 
     signalingParameters = params;
     logAndToast("Creating peer connection, delay=" + delta + "ms");
-    peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(),
-        localRender, remoteRender, signalingParameters);
+    peerConnectionClient.createPeerConnection(
+        rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
 
     if (signalingParameters.initiator) {
       logAndToast("Creating OFFER...");
@@ -716,8 +677,7 @@
   }
 
   @Override
-  public void onPeerConnectionClosed() {
-  }
+  public void onPeerConnectionClosed() {}
 
   @Override
   public void onPeerConnectionStatsReady(final StatsReport[] reports) {
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
index f9e89d8..8482de0 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CallFragment.java
@@ -50,26 +50,18 @@
   }
 
   @Override
-  public View onCreateView(LayoutInflater inflater, ViewGroup container,
-      Bundle savedInstanceState) {
-    controlView =
-        inflater.inflate(R.layout.fragment_call, container, false);
+  public View onCreateView(
+      LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+    controlView = inflater.inflate(R.layout.fragment_call, container, false);
 
     // Create UI controls.
-    contactView =
-        (TextView) controlView.findViewById(R.id.contact_name_call);
-    disconnectButton =
-        (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
-    cameraSwitchButton =
-        (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
-    videoScalingButton =
-        (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
-    toggleMuteButton =
-        (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
-    captureFormatText =
-        (TextView) controlView.findViewById(R.id.capture_format_text_call);
-    captureFormatSlider =
-        (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
+    contactView = (TextView) controlView.findViewById(R.id.contact_name_call);
+    disconnectButton = (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
+    cameraSwitchButton = (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
+    videoScalingButton = (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
+    toggleMuteButton = (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
+    captureFormatText = (TextView) controlView.findViewById(R.id.capture_format_text_call);
+    captureFormatSlider = (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
 
     // Add buttons click events.
     disconnectButton.setOnClickListener(new View.OnClickListener() {
@@ -90,12 +82,10 @@
       @Override
       public void onClick(View view) {
         if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
-          videoScalingButton.setBackgroundResource(
-              R.drawable.ic_action_full_screen);
+          videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
           scalingType = ScalingType.SCALE_ASPECT_FIT;
         } else {
-          videoScalingButton.setBackgroundResource(
-              R.drawable.ic_action_return_from_full_screen);
+          videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
           scalingType = ScalingType.SCALE_ASPECT_FILL;
         }
         callEvents.onVideoScalingSwitch(scalingType);
@@ -144,5 +134,4 @@
     super.onAttach(activity);
     callEvents = (OnCallEvents) activity;
   }
-
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
index d231c41..5476ee9 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
@@ -24,13 +24,10 @@
  * Control capture format based on a seekbar listener.
  */
 public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
-  private final List<CaptureFormat> formats = Arrays.asList(
-      new CaptureFormat(1280, 720, 0, 30000),
-      new CaptureFormat(960, 540, 0, 30000),
-      new CaptureFormat(640, 480, 0, 30000),
-      new CaptureFormat(480, 360, 0, 30000),
-      new CaptureFormat(320, 240, 0, 30000),
-      new CaptureFormat(256, 144, 0, 30000));
+  private final List<CaptureFormat> formats =
+      Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
+          new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
+          new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
   // Prioritize framerate below this threshold and resolution above the threshold.
   private static final int FRAMERATE_THRESHOLD = 15;
   private TextView captureFormatText;
@@ -52,14 +49,14 @@
       int firstFps = calculateFramerate(targetBandwidth, first);
       int secondFps = calculateFramerate(targetBandwidth, second);
 
-     if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
-         || firstFps == secondFps) {
+      if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
+          || firstFps == secondFps) {
         // Compare resolution.
         return first.width * first.height - second.width * second.height;
-     } else {
+      } else {
         // Compare fps.
         return firstFps - secondFps;
-     }
+      }
     }
   };
 
@@ -76,8 +73,8 @@
     // Extract max bandwidth (in millipixels / second).
     long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
     for (CaptureFormat format : formats) {
-      maxCaptureBandwidth = Math.max(maxCaptureBandwidth,
-          (long) format.width * format.height * format.framerate.max);
+      maxCaptureBandwidth =
+          Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
     }
 
     // Fraction between 0 and 1.
@@ -97,8 +94,7 @@
   }
 
   @Override
-  public void onStartTrackingTouch(SeekBar seekBar) {
-  }
+  public void onStartTrackingTouch(SeekBar seekBar) {}
 
   @Override
   public void onStopTrackingTouch(SeekBar seekBar) {
@@ -107,8 +103,8 @@
 
   // Return the highest frame rate possible based on bandwidth and format.
   private int calculateFramerate(double bandwidth, CaptureFormat format) {
-    return (int) Math.round(Math.min(format.framerate.max,
-        (int) Math.round(bandwidth / (format.width * format.height))) / 1000.0);
+    return (int) Math.round(
+        Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
+        / 1000.0);
   }
 }
-
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
index a55901b..5a84c44 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
@@ -118,17 +118,15 @@
     setContentView(R.layout.activity_connect);
 
     roomEditText = (EditText) findViewById(R.id.room_edittext);
-    roomEditText.setOnEditorActionListener(
-      new TextView.OnEditorActionListener() {
-        @Override
-        public boolean onEditorAction(
-            TextView textView, int i, KeyEvent keyEvent) {
-          if (i == EditorInfo.IME_ACTION_DONE) {
-            addFavoriteButton.performClick();
-            return true;
-          }
-          return false;
+    roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
+      @Override
+      public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
+        if (i == EditorInfo.IME_ACTION_DONE) {
+          addFavoriteButton.performClick();
+          return true;
         }
+        return false;
+      }
     });
     roomEditText.requestFocus();
 
@@ -143,12 +141,9 @@
 
     // If an implicit VIEW intent is launching the app, go directly to that URL.
     final Intent intent = getIntent();
-    if ("android.intent.action.VIEW".equals(intent.getAction())
-        && !commandLineRun) {
-      boolean loopback = intent.getBooleanExtra(
-          CallActivity.EXTRA_LOOPBACK, false);
-      int runTimeMs = intent.getIntExtra(
-          CallActivity.EXTRA_RUNTIME, 0);
+    if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
+      boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
+      int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
       String room = sharedPref.getString(keyprefRoom, "");
       connectToRoom(room, true, loopback, runTimeMs);
     }
@@ -230,8 +225,7 @@
         Log.e(TAG, "Failed to load room list: " + e.toString());
       }
     }
-    adapter = new ArrayAdapter<String>(
-        this, android.R.layout.simple_list_item_1, roomList);
+    adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, roomList);
     roomListView.setAdapter(adapter);
     if (adapter.getCount() > 0) {
       roomListView.requestFocus();
@@ -240,8 +234,7 @@
   }
 
   @Override
-  protected void onActivityResult(
-      int requestCode, int resultCode, Intent data) {
+  protected void onActivityResult(int requestCode, int resultCode, Intent data) {
     if (requestCode == CONNECTION_REQUEST && commandLineRun) {
       Log.d(TAG, "Return: " + resultCode);
       setResult(resultCode);
@@ -260,71 +253,63 @@
     }
 
     String roomUrl = sharedPref.getString(
-        keyprefRoomServerUrl,
-        getString(R.string.pref_room_server_url_default));
+        keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
 
     // Video call enabled flag.
-    boolean videoCallEnabled = sharedPref.getBoolean(keyprefVideoCallEnabled,
-        Boolean.valueOf(getString(R.string.pref_videocall_default)));
+    boolean videoCallEnabled = sharedPref.getBoolean(
+        keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
 
     // Use Camera2 option.
-    boolean useCamera2 = sharedPref.getBoolean(keyprefCamera2,
-        Boolean.valueOf(getString(R.string.pref_camera2_default)));
+    boolean useCamera2 = sharedPref.getBoolean(
+        keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
 
     // Get default codecs.
-    String videoCodec = sharedPref.getString(keyprefVideoCodec,
-        getString(R.string.pref_videocodec_default));
-    String audioCodec = sharedPref.getString(keyprefAudioCodec,
-        getString(R.string.pref_audiocodec_default));
+    String videoCodec =
+        sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
+    String audioCodec =
+        sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
 
     // Check HW codec flag.
-    boolean hwCodec = sharedPref.getBoolean(keyprefHwCodecAcceleration,
-        Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
+    boolean hwCodec = sharedPref.getBoolean(
+        keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
 
     // Check Capture to texture.
     boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
         Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
 
     // Check Disable Audio Processing flag.
-    boolean noAudioProcessing = sharedPref.getBoolean(
-        keyprefNoAudioProcessingPipeline,
+    boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
         Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
 
     // Check Disable Audio Processing flag.
     boolean aecDump = sharedPref.getBoolean(
-        keyprefAecDump,
-        Boolean.valueOf(getString(R.string.pref_aecdump_default)));
+        keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
 
     // Check OpenSL ES enabled flag.
     boolean useOpenSLES = sharedPref.getBoolean(
-        keyprefOpenSLES,
-        Boolean.valueOf(getString(R.string.pref_opensles_default)));
+        keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
 
     // Check Disable built-in AEC flag.
-    boolean disableBuiltInAEC = sharedPref.getBoolean(
-        keyprefDisableBuiltInAec,
+    boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
         Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
 
     // Check Disable built-in AGC flag.
-    boolean disableBuiltInAGC = sharedPref.getBoolean(
-        keyprefDisableBuiltInAgc,
+    boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
         Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
 
     // Check Disable built-in NS flag.
-    boolean disableBuiltInNS = sharedPref.getBoolean(
-        keyprefDisableBuiltInNs,
+    boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
         Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
 
     // Check Enable level control.
-    boolean enableLevelControl = sharedPref.getBoolean(
-        keyprefEnableLevelControl,
+    boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
         Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
 
     // Get video resolution from settings.
     int videoWidth = 0;
     int videoHeight = 0;
-    String resolution = sharedPref.getString(keyprefResolution,
-        getString(R.string.pref_resolution_default));
+    String resolution =
+        sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
     String[] dimensions = resolution.split("[ x]+");
     if (dimensions.length == 2) {
       try {
@@ -339,8 +324,7 @@
 
     // Get camera fps from settings.
     int cameraFps = 0;
-    String fps = sharedPref.getString(keyprefFps,
-        getString(R.string.pref_fps_default));
+    String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
     String[] fpsValues = fps.split("[ x]+");
     if (fpsValues.length == 2) {
       try {
@@ -356,31 +340,28 @@
 
     // Get video and audio start bitrate.
     int videoStartBitrate = 0;
-    String bitrateTypeDefault = getString(
-        R.string.pref_maxvideobitrate_default);
-    String bitrateType = sharedPref.getString(
-        keyprefVideoBitrateType, bitrateTypeDefault);
+    String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
+    String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
     if (!bitrateType.equals(bitrateTypeDefault)) {
-      String bitrateValue = sharedPref.getString(keyprefVideoBitrateValue,
-          getString(R.string.pref_maxvideobitratevalue_default));
+      String bitrateValue = sharedPref.getString(
+          keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
       videoStartBitrate = Integer.parseInt(bitrateValue);
     }
     int audioStartBitrate = 0;
     bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
-    bitrateType = sharedPref.getString(
-        keyprefAudioBitrateType, bitrateTypeDefault);
+    bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
     if (!bitrateType.equals(bitrateTypeDefault)) {
-      String bitrateValue = sharedPref.getString(keyprefAudioBitrateValue,
-          getString(R.string.pref_startaudiobitratevalue_default));
+      String bitrateValue = sharedPref.getString(
+          keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
       audioStartBitrate = Integer.parseInt(bitrateValue);
     }
 
     // Check statistics display option.
-    boolean displayHud = sharedPref.getBoolean(keyprefDisplayHud,
-        Boolean.valueOf(getString(R.string.pref_displayhud_default)));
+    boolean displayHud = sharedPref.getBoolean(
+        keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
 
     boolean tracing = sharedPref.getBoolean(
-            keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
+        keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
 
     // Start AppRTCMobile activity.
     Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
@@ -395,14 +376,12 @@
       intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
       intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
       intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
-      intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
-          captureQualitySlider);
+      intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
       intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
       intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
       intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
       intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
-      intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED,
-          noAudioProcessing);
+      intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
       intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
       intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
       intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
@@ -429,22 +408,25 @@
         .setTitle(getText(R.string.invalid_url_title))
         .setMessage(getString(R.string.invalid_url_text, url))
         .setCancelable(false)
-        .setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
-            public void onClick(DialogInterface dialog, int id) {
-              dialog.cancel();
-            }
-          }).create().show();
+        .setNeutralButton(R.string.ok,
+            new DialogInterface.OnClickListener() {
+              public void onClick(DialogInterface dialog, int id) {
+                dialog.cancel();
+              }
+            })
+        .create()
+        .show();
     return false;
   }
 
-  private final AdapterView.OnItemClickListener
-      roomListClickListener = new AdapterView.OnItemClickListener() {
-    @Override
-    public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
-      String roomId = ((TextView) view).getText().toString();
-      connectToRoom(roomId, false, false, 0);
-    }
-  };
+  private final AdapterView.OnItemClickListener roomListClickListener =
+      new AdapterView.OnItemClickListener() {
+        @Override
+        public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
+          String roomId = ((TextView) view).getText().toString();
+          connectToRoom(roomId, false, false, 0);
+        }
+      };
 
   private final OnClickListener addFavoriteListener = new OnClickListener() {
     @Override
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
index d3c5954..86da9d8 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
@@ -228,7 +228,7 @@
       try {
         BufferedReader reader = new BufferedReader(fin);
         Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");
-        scanner.nextInt();  // Skip leading number 0.
+        scanner.nextInt(); // Skip leading number 0.
         cpusPresent = 1 + scanner.nextInt();
         scanner.close();
       } catch (Exception e) {
@@ -247,7 +247,7 @@
     curPath = new String[cpusPresent];
     curFreqScales = new double[cpusPresent];
     for (int i = 0; i < cpusPresent; i++) {
-      cpuFreqMax[i] = 0;  // Frequency "not yet determined".
+      cpuFreqMax[i] = 0; // Frequency "not yet determined".
       curFreqScales[i] = 0;
       maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
       curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
@@ -275,8 +275,8 @@
     int batteryLevel = 0;
     int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
     if (batteryScale > 0) {
-      batteryLevel = (int) (
-          100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
+      batteryLevel =
+          (int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
     }
     return batteryLevel;
   }
@@ -317,10 +317,10 @@
           Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
           lastSeenMaxFreq = cpufreqMax;
           cpuFreqMax[i] = cpufreqMax;
-          maxPath[i] = null;  // Kill path to free its memory.
+          maxPath[i] = null; // Kill path to free its memory.
         }
       } else {
-        lastSeenMaxFreq = cpuFreqMax[i];  // A valid, previously read value.
+        lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
       }
 
       long cpuFreqCur = readFreqFromFile(curPath[i]);
@@ -402,16 +402,20 @@
   private synchronized String getStatString() {
     StringBuilder stat = new StringBuilder();
     stat.append("CPU User: ")
-        .append(doubleToPercent(userCpuUsage.getCurrent())).append("/")
+        .append(doubleToPercent(userCpuUsage.getCurrent()))
+        .append("/")
         .append(doubleToPercent(userCpuUsage.getAverage()))
         .append(". System: ")
-        .append(doubleToPercent(systemCpuUsage.getCurrent())).append("/")
+        .append(doubleToPercent(systemCpuUsage.getCurrent()))
+        .append("/")
         .append(doubleToPercent(systemCpuUsage.getAverage()))
         .append(". Freq: ")
-        .append(doubleToPercent(frequencyScale.getCurrent())).append("/")
+        .append(doubleToPercent(frequencyScale.getCurrent()))
+        .append("/")
         .append(doubleToPercent(frequencyScale.getAverage()))
         .append(". Total usage: ")
-        .append(doubleToPercent(totalCpuUsage.getCurrent())).append("/")
+        .append(doubleToPercent(totalCpuUsage.getCurrent()))
+        .append("/")
         .append(doubleToPercent(totalCpuUsage.getAverage()))
         .append(". Cores: ")
         .append(actualCpusPresent);
@@ -478,13 +482,13 @@
         String lines[] = line.split("\\s+");
         int length = lines.length;
         if (length >= 5) {
-          userTime = parseLong(lines[1]);  // user
-          userTime += parseLong(lines[2]);  // nice
+          userTime = parseLong(lines[1]); // user
+          userTime += parseLong(lines[2]); // nice
           systemTime = parseLong(lines[3]); // system
           idleTime = parseLong(lines[4]); // idle
         }
         if (length >= 8) {
-          userTime += parseLong(lines[5]);  // iowait
+          userTime += parseLong(lines[5]); // iowait
           systemTime += parseLong(lines[6]); // irq
           systemTime += parseLong(lines[7]); // softirq
         }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
index b85f6da..199979f 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
@@ -35,32 +35,28 @@
   private static final int DEFAULT_PORT = 8888;
 
   // Regex pattern used for checking if room id looks like an IP.
-  static final Pattern IP_PATTERN = Pattern.compile(
-      "("
-        // IPv4
-        + "((\\d+\\.){3}\\d+)|"
-        // IPv6
-        + "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
-              + "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
-        + "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
-        // IPv6 without []
-        + "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
-        + "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
-        // Literals
-        + "localhost"
+  static final Pattern IP_PATTERN = Pattern.compile("("
+      // IPv4
+      + "((\\d+\\.){3}\\d+)|"
+      // IPv6
+      + "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
+      + "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
+      + "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
+      // IPv6 without []
+      + "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
+      + "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
+      // Literals
+      + "localhost"
       + ")"
       // Optional port number
-      + "(:(\\d+))?"
-  );
+      + "(:(\\d+))?");
 
   private final ExecutorService executor;
   private final SignalingEvents events;
   private TCPChannelClient tcpClient;
   private RoomConnectionParameters connectionParameters;
 
-  private enum ConnectionState {
-    NEW, CONNECTED, CLOSED, ERROR
-  };
+  private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
 
   // All alterations of the room state should be done from inside the looper thread.
   private ConnectionState roomState;
@@ -209,7 +205,7 @@
       public void run() {
         JSONObject json = new JSONObject();
         jsonPut(json, "type", "remove-candidates");
-        JSONArray jsonArray =  new JSONArray();
+        JSONArray jsonArray = new JSONArray();
         for (final IceCandidate candidate : candidates) {
           jsonArray.put(toJsonCandidate(candidate));
         }
@@ -244,7 +240,7 @@
           null, // wwsPostUrl
           null, // offerSdp
           null // iceCandidates
-      );
+          );
       events.onConnectedToRoom(parameters);
     }
   }
@@ -265,13 +261,11 @@
         events.onRemoteIceCandidatesRemoved(candidates);
       } else if (type.equals("answer")) {
         SessionDescription sdp = new SessionDescription(
-            SessionDescription.Type.fromCanonicalForm(type),
-            json.getString("sdp"));
+            SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
         events.onRemoteDescription(sdp);
       } else if (type.equals("offer")) {
         SessionDescription sdp = new SessionDescription(
-            SessionDescription.Type.fromCanonicalForm(type),
-            json.getString("sdp"));
+            SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
 
         SignalingParameters parameters = new SignalingParameters(
             // Ice servers are not needed for direct connections.
@@ -282,7 +276,7 @@
             null, // wssPostUrl
             sdp, // offerSdp
             null // iceCandidates
-        );
+            );
         roomState = ConnectionState.CONNECTED;
         events.onConnectedToRoom(parameters);
       } else {
@@ -347,8 +341,7 @@
 
   // Converts a JSON candidate to a Java object.
   private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
-    return new IceCandidate(json.getString("id"),
-        json.getInt("label"),
-        json.getString("candidate"));
+    return new IceCandidate(
+        json.getString("id"), json.getInt("label"), json.getString("candidate"));
   }
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
index 7b6c290..ca571bb 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/HudFragment.java
@@ -41,8 +41,8 @@
   private CpuMonitor cpuMonitor;
 
   @Override
-  public View onCreateView(LayoutInflater inflater, ViewGroup container,
-      Bundle savedInstanceState) {
+  public View onCreateView(
+      LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
     controlView = inflater.inflate(R.layout.fragment_hud, container, false);
 
     // Create UI controls.
@@ -57,8 +57,8 @@
       @Override
       public void onClick(View view) {
         if (displayHud) {
-          int visibility = (hudViewBwe.getVisibility() == View.VISIBLE)
-              ? View.INVISIBLE : View.VISIBLE;
+          int visibility =
+              (hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
           hudViewsSetProperties(visibility);
         }
       }
@@ -126,8 +126,7 @@
     String actualBitrate = null;
 
     for (StatsReport report : reports) {
-      if (report.type.equals("ssrc") && report.id.contains("ssrc")
-          && report.id.contains("send")) {
+      if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
         // Send video statistics.
         Map<String, String> reportMap = getReportMap(report);
         String trackId = reportMap.get("googTrackId");
@@ -195,9 +194,11 @@
 
     if (cpuMonitor != null) {
       encoderStat.append("CPU%: ")
-        .append(cpuMonitor.getCpuUsageCurrent()).append("/")
-        .append(cpuMonitor.getCpuUsageAverage())
-        .append(". Freq: ").append(cpuMonitor.getFrequencyScaleAverage());
+          .append(cpuMonitor.getCpuUsageCurrent())
+          .append("/")
+          .append(cpuMonitor.getCpuUsageAverage())
+          .append(". Freq: ")
+          .append(cpuMonitor.getFrequencyScaleAverage());
     }
     encoderStatView.setText(encoderStat.toString());
   }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
index 5c50e5a..f855fc1 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -74,12 +74,11 @@
   private static final String VIDEO_CODEC_H264 = "H264";
   private static final String AUDIO_CODEC_OPUS = "opus";
   private static final String AUDIO_CODEC_ISAC = "ISAC";
-  private static final String VIDEO_CODEC_PARAM_START_BITRATE =
-      "x-google-start-bitrate";
+  private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
   private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
   private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
-  private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT= "googAutoGainControl";
-  private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT  = "googHighpassFilter";
+  private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
+  private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
   private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
   private static final String AUDIO_LEVEL_CONTROL_CONSTRAINT = "levelControl";
   private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
@@ -162,14 +161,12 @@
     public final boolean disableBuiltInNS;
     public final boolean enableLevelControl;
 
-    public PeerConnectionParameters(
-        boolean videoCallEnabled, boolean loopback, boolean tracing, boolean useCamera2,
-        int videoWidth, int videoHeight, int videoFps,
-        int videoMaxBitrate, String videoCodec, boolean videoCodecHwAcceleration,
-        boolean captureToTexture, int audioStartBitrate, String audioCodec,
-        boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
-        boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
-        boolean enableLevelControl) {
+    public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
+        boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
+        String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
+        int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
+        boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
+        boolean disableBuiltInNS, boolean enableLevelControl) {
       this.videoCallEnabled = videoCallEnabled;
       this.useCamera2 = useCamera2;
       this.loopback = loopback;
@@ -255,10 +252,8 @@
     this.options = options;
   }
 
-  public void createPeerConnectionFactory(
-      final Context context,
-      final PeerConnectionParameters peerConnectionParameters,
-      final PeerConnectionEvents events) {
+  public void createPeerConnectionFactory(final Context context,
+      final PeerConnectionParameters peerConnectionParameters, final PeerConnectionEvents events) {
     this.peerConnectionParameters = peerConnectionParameters;
     this.events = events;
     videoCallEnabled = peerConnectionParameters.videoCallEnabled;
@@ -289,10 +284,8 @@
     });
   }
 
-  public void createPeerConnection(
-      final EglBase.Context renderEGLContext,
-      final VideoRenderer.Callbacks localRender,
-      final VideoRenderer.Callbacks remoteRender,
+  public void createPeerConnection(final EglBase.Context renderEGLContext,
+      final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
       final SignalingParameters signalingParameters) {
     if (peerConnectionParameters == null) {
       Log.e(TAG, "Creating peer connection without initializing factory.");
@@ -329,14 +322,14 @@
   }
 
   private void createPeerConnectionFactoryInternal(Context context) {
-      PeerConnectionFactory.initializeInternalTracer();
-      if (peerConnectionParameters.tracing) {
-          PeerConnectionFactory.startInternalTracingCapture(
-                  Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
-                  + "webrtc-trace.txt");
-      }
-    Log.d(TAG, "Create peer connection factory. Use video: " +
-        peerConnectionParameters.videoCallEnabled);
+    PeerConnectionFactory.initializeInternalTracer();
+    if (peerConnectionParameters.tracing) {
+      PeerConnectionFactory.startInternalTracingCapture(
+          Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+          + "webrtc-trace.txt");
+    }
+    Log.d(TAG,
+        "Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
     isError = false;
 
     // Initialize field trials.
@@ -391,8 +384,8 @@
     }
 
     // Create peer connection factory.
-    if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
-        peerConnectionParameters.videoCodecHwAcceleration)) {
+    if (!PeerConnectionFactory.initializeAndroidGlobals(
+            context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
       events.onPeerConnectionError("Failed to initializeAndroidGlobals");
     }
     if (options != null) {
@@ -448,30 +441,30 @@
     // added for audio performance measurements
     if (peerConnectionParameters.noAudioProcessing) {
       Log.d(TAG, "Disabling audio processing");
-      audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-            AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
-      audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-            AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
-      audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-            AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
-      audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-           AUDIO_NOISE_SUPPRESSION_CONSTRAINT , "false"));
+      audioConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
+      audioConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
+      audioConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
+      audioConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
     }
     if (peerConnectionParameters.enableLevelControl) {
       Log.d(TAG, "Enabling level control.");
-      audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-          AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
+      audioConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
     }
     // Create SDP constraints.
     sdpMediaConstraints = new MediaConstraints();
-    sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-        "OfferToReceiveAudio", "true"));
+    sdpMediaConstraints.mandatory.add(
+        new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
     if (videoCallEnabled || peerConnectionParameters.loopback) {
-      sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-          "OfferToReceiveVideo", "true"));
+      sdpMediaConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
     } else {
-      sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
-          "OfferToReceiveVideo", "false"));
+      sdpMediaConstraints.mandatory.add(
+          new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
     }
   }
 
@@ -531,15 +524,12 @@
     // Use ECDSA encryption.
     rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
 
-    peerConnection = factory.createPeerConnection(
-        rtcConfig, pcConstraints, pcObserver);
+    peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
     isInitiator = false;
 
     // Set default WebRTC tracing and INFO libjingle logging.
     // NOTE: this _must_ happen while |factory| is alive!
-    Logging.enableTracing(
-        "logcat:",
-        EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
+    Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
     Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
 
     mediaStream = factory.createLocalMediaStream("ARDAMS");
@@ -572,15 +562,13 @@
 
     if (peerConnectionParameters.aecDump) {
       try {
-        aecDumpFileDescriptor = ParcelFileDescriptor.open(
-            new File(Environment.getExternalStorageDirectory().getPath()
-                + File.separator
-                + "Download/audio.aecdump"),
-                ParcelFileDescriptor.MODE_READ_WRITE |
-                ParcelFileDescriptor.MODE_CREATE |
-                ParcelFileDescriptor.MODE_TRUNCATE);
+        aecDumpFileDescriptor =
+            ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
+                                          + File.separator + "Download/audio.aecdump"),
+                ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
+                    | ParcelFileDescriptor.MODE_TRUNCATE);
         factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
-      } catch(IOException e) {
+      } catch (IOException e) {
         Log.e(TAG, "Can not open aecdump file", e);
       }
     }
@@ -607,7 +595,7 @@
     if (videoCapturer != null) {
       try {
         videoCapturer.stopCapture();
-      } catch(InterruptedException e) {
+      } catch (InterruptedException e) {
         throw new RuntimeException(e);
       }
       videoCapturer.dispose();
@@ -773,12 +761,11 @@
           sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
         }
         if (peerConnectionParameters.audioStartBitrate > 0) {
-          sdpDescription = setStartBitrate(AUDIO_CODEC_OPUS, false,
-              sdpDescription, peerConnectionParameters.audioStartBitrate);
+          sdpDescription = setStartBitrate(
+              AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
         }
         Log.d(TAG, "Set remote SDP.");
-        SessionDescription sdpRemote = new SessionDescription(
-            sdp.type, sdpDescription);
+        SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
         peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
       }
     });
@@ -792,7 +779,8 @@
           Log.d(TAG, "Stop video source.");
           try {
             videoCapturer.stopCapture();
-          } catch (InterruptedException e) {}
+          } catch (InterruptedException e) {
+          }
           videoCapturerStopped = true;
         }
       }
@@ -833,9 +821,7 @@
 
         for (RtpParameters.Encoding encoding : parameters.encodings) {
           // Null value means no limit.
-          encoding.maxBitrateBps = maxBitrateKbps == null
-              ? null
-              : maxBitrateKbps * BPS_IN_KBPS;
+          encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
         }
         if (!localVideoSender.setParameters(parameters)) {
           Log.e(TAG, "RtpSender.setParameters failed.");
@@ -887,8 +873,8 @@
     }
   }
 
-  private static String setStartBitrate(String codec, boolean isVideoCodec,
-      String sdpDescription, int bitrateKbps) {
+  private static String setStartBitrate(
+      String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
     String[] lines = sdpDescription.split("\r\n");
     int rtpmapLineIndex = -1;
     boolean sdpFormatUpdated = false;
@@ -909,8 +895,7 @@
       Log.w(TAG, "No rtpmap for " + codec + " codec");
       return sdpDescription;
     }
-    Log.d(TAG, "Found " +  codec + " rtpmap " + codecRtpMap
-        + " at " + lines[rtpmapLineIndex]);
+    Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
 
     // Check if a=fmtp string already exist in remote SDP for this codec and
     // update it with new bitrate parameter.
@@ -919,13 +904,11 @@
     for (int i = 0; i < lines.length; i++) {
       Matcher codecMatcher = codecPattern.matcher(lines[i]);
       if (codecMatcher.matches()) {
-        Log.d(TAG, "Found " +  codec + " " + lines[i]);
+        Log.d(TAG, "Found " + codec + " " + lines[i]);
         if (isVideoCodec) {
-          lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE
-              + "=" + bitrateKbps;
+          lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
         } else {
-          lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE
-              + "=" + (bitrateKbps * 1000);
+          lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
         }
         Log.d(TAG, "Update remote SDP line: " + lines[i]);
         sdpFormatUpdated = true;
@@ -940,22 +923,20 @@
       if (!sdpFormatUpdated && i == rtpmapLineIndex) {
         String bitrateSet;
         if (isVideoCodec) {
-          bitrateSet = "a=fmtp:" + codecRtpMap + " "
-              + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
+          bitrateSet =
+              "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
         } else {
-          bitrateSet = "a=fmtp:" + codecRtpMap + " "
-              + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
+          bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
+              + (bitrateKbps * 1000);
         }
         Log.d(TAG, "Add remote SDP line: " + bitrateSet);
         newSdpDescription.append(bitrateSet).append("\r\n");
       }
-
     }
     return newSdpDescription.toString();
   }
 
-  private static String preferCodec(
-      String sdpDescription, String codec, boolean isAudio) {
+  private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
     String[] lines = sdpDescription.split("\r\n");
     int mLineIndex = -1;
     String codecRtpMap = null;
@@ -966,8 +947,7 @@
     if (isAudio) {
       mediaDescription = "m=audio ";
     }
-    for (int i = 0; (i < lines.length)
-        && (mLineIndex == -1 || codecRtpMap == null); i++) {
+    for (int i = 0; (i < lines.length) && (mLineIndex == -1 || codecRtpMap == null); i++) {
       if (lines[i].startsWith(mediaDescription)) {
         mLineIndex = i;
         continue;
@@ -985,8 +965,7 @@
       Log.w(TAG, "No rtpmap for " + codec);
       return sdpDescription;
     }
-    Log.d(TAG, "Found " +  codec + " rtpmap " + codecRtpMap + ", prefer at "
-        + lines[mLineIndex]);
+    Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " + lines[mLineIndex]);
     String[] origMLineParts = lines[mLineIndex].split(" ");
     if (origMLineParts.length > 3) {
       StringBuilder newMLine = new StringBuilder();
@@ -1025,9 +1004,9 @@
 
   private void switchCameraInternal() {
     if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
-      Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : "
-          + isError + ". Number of cameras: " + numberOfCameras);
-      return;  // No video is sent or only one camera is available or error happened.
+      Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
+              + ". Number of cameras: " + numberOfCameras);
+      return; // No video is sent or only one camera is available or error happened.
     }
     Log.d(TAG, "Switch camera");
     videoCapturer.switchCamera(null);
@@ -1053,8 +1032,8 @@
 
   private void changeCaptureFormatInternal(int width, int height, int framerate) {
     if (!videoCallEnabled || isError || videoCapturer == null) {
-      Log.e(TAG, "Failed to change capture format. Video: " + videoCallEnabled + ". Error : "
-          + isError);
+      Log.e(TAG,
+          "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
       return;
     }
     Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
@@ -1064,7 +1043,7 @@
   // Implementation detail: observe ICE & stream changes and react accordingly.
   private class PCObserver implements PeerConnection.Observer {
     @Override
-    public void onIceCandidate(final IceCandidate candidate){
+    public void onIceCandidate(final IceCandidate candidate) {
       executor.execute(new Runnable() {
         @Override
         public void run() {
@@ -1084,14 +1063,12 @@
     }
 
     @Override
-    public void onSignalingChange(
-        PeerConnection.SignalingState newState) {
+    public void onSignalingChange(PeerConnection.SignalingState newState) {
       Log.d(TAG, "SignalingState: " + newState);
     }
 
     @Override
-    public void onIceConnectionChange(
-        final PeerConnection.IceConnectionState newState) {
+    public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
       executor.execute(new Runnable() {
         @Override
         public void run() {
@@ -1108,8 +1085,7 @@
     }
 
     @Override
-    public void onIceGatheringChange(
-      PeerConnection.IceGatheringState newState) {
+    public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
       Log.d(TAG, "IceGatheringState: " + newState);
     }
 
@@ -1119,7 +1095,7 @@
     }
 
     @Override
-    public void onAddStream(final MediaStream stream){
+    public void onAddStream(final MediaStream stream) {
       executor.execute(new Runnable() {
         @Override
         public void run() {
@@ -1140,7 +1116,7 @@
     }
 
     @Override
-    public void onRemoveStream(final MediaStream stream){
+    public void onRemoveStream(final MediaStream stream) {
       executor.execute(new Runnable() {
         @Override
         public void run() {
@@ -1151,8 +1127,7 @@
 
     @Override
     public void onDataChannel(final DataChannel dc) {
-      reportError("AppRTC doesn't use data channels, but got: " + dc.label()
-          + " anyway!");
+      reportError("AppRTC doesn't use data channels, but got: " + dc.label() + " anyway!");
     }
 
     @Override
@@ -1178,8 +1153,7 @@
       if (videoCallEnabled) {
         sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
       }
-      final SessionDescription sdp = new SessionDescription(
-          origSdp.type, sdpDescription);
+      final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
       localSdp = sdp;
       executor.execute(new Runnable() {
         @Override
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/PercentFrameLayout.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/PercentFrameLayout.java
index 5335664..81f22eb 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/PercentFrameLayout.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/PercentFrameLayout.java
@@ -54,8 +54,7 @@
   protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
     final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec);
     final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec);
-    setMeasuredDimension(
-        MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
+    setMeasuredDimension(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
         MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
 
     final int childWidthMeasureSpec =
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
index 8996c07..f79154c 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java
@@ -58,8 +58,8 @@
     void onSignalingParametersError(final String description);
   }
 
-  public RoomParametersFetcher(String roomUrl, String roomMessage,
-      final RoomParametersFetcherEvents events) {
+  public RoomParametersFetcher(
+      String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
     this.roomUrl = roomUrl;
     this.roomMessage = roomMessage;
     this.events = events;
@@ -67,9 +67,8 @@
 
   public void makeRequest() {
     Log.d(TAG, "Connecting to room: " + roomUrl);
-    httpConnection = new AsyncHttpURLConnection(
-        "POST", roomUrl, roomMessage,
-        new AsyncHttpEvents() {
+    httpConnection =
+        new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
           @Override
           public void onHttpError(String errorMessage) {
             Log.e(TAG, "Room connection error: " + errorMessage);
@@ -114,13 +113,10 @@
           Log.d(TAG, "GAE->C #" + i + " : " + messageString);
           if (messageType.equals("offer")) {
             offerSdp = new SessionDescription(
-                SessionDescription.Type.fromCanonicalForm(messageType),
-                message.getString("sdp"));
+                SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
           } else if (messageType.equals("candidate")) {
             IceCandidate candidate = new IceCandidate(
-                message.getString("id"),
-                message.getInt("label"),
-                message.getString("candidate"));
+                message.getString("id"), message.getInt("label"), message.getString("candidate"));
             iceCandidates.add(candidate);
           } else {
             Log.e(TAG, "Unknown message: " + messageString);
@@ -153,13 +149,10 @@
       }
 
       SignalingParameters params = new SignalingParameters(
-          iceServers, initiator,
-          clientId, wssUrl, wssPostUrl,
-          offerSdp, iceCandidates);
+          iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
       events.onSignalingParametersReady(params);
     } catch (JSONException e) {
-      events.onSignalingParametersError(
-          "Room JSON parsing error: " + e.toString());
+      events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
     } catch (IOException e) {
       events.onSignalingParametersError("Room IO error: " + e.toString());
     }
@@ -169,19 +162,17 @@
   // off the main thread!
   private LinkedList<PeerConnection.IceServer> requestTurnServers(String url)
       throws IOException, JSONException {
-    LinkedList<PeerConnection.IceServer> turnServers =
-        new LinkedList<PeerConnection.IceServer>();
+    LinkedList<PeerConnection.IceServer> turnServers = new LinkedList<PeerConnection.IceServer>();
     Log.d(TAG, "Request TURN from: " + url);
-    HttpURLConnection connection =
-        (HttpURLConnection) new URL(url).openConnection();
+    HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
     connection.setDoOutput(true);
     connection.setRequestProperty("REFERER", "https://appr.tc");
     connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
     connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
     int responseCode = connection.getResponseCode();
     if (responseCode != 200) {
-      throw new IOException("Non-200 response when requesting TURN server from "
-          + url + " : " + connection.getHeaderField(null));
+      throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
+          + connection.getHeaderField(null));
     }
     InputStream responseStream = connection.getInputStream();
     String response = drainStream(responseStream);
@@ -192,14 +183,11 @@
     for (int i = 0; i < iceServers.length(); ++i) {
       JSONObject server = iceServers.getJSONObject(i);
       JSONArray turnUrls = server.getJSONArray("urls");
-      String username =
-          server.has("username") ? server.getString("username") : "";
-      String credential =
-          server.has("credential") ? server.getString("credential") : "";
+      String username = server.has("username") ? server.getString("username") : "";
+      String credential = server.has("credential") ? server.getString("credential") : "";
       for (int j = 0; j < turnUrls.length(); j++) {
         String turnUrl = turnUrls.getString(j);
-        turnServers.add(new PeerConnection.IceServer(turnUrl, username,
-            credential));
+        turnServers.add(new PeerConnection.IceServer(turnUrl, username, credential));
       }
     }
     return turnServers;
@@ -207,17 +195,15 @@
 
   // Return the list of ICE servers described by a WebRTCPeerConnection
   // configuration string.
-  private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(
-      String pcConfig) throws JSONException {
+  private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
+      throws JSONException {
     JSONObject json = new JSONObject(pcConfig);
     JSONArray servers = json.getJSONArray("iceServers");
-    LinkedList<PeerConnection.IceServer> ret =
-        new LinkedList<PeerConnection.IceServer>();
+    LinkedList<PeerConnection.IceServer> ret = new LinkedList<PeerConnection.IceServer>();
     for (int i = 0; i < servers.length(); ++i) {
       JSONObject server = servers.getJSONObject(i);
       String url = server.getString("urls");
-      String credential =
-          server.has("credential") ? server.getString("credential") : "";
+      String credential = server.has("credential") ? server.getString("credential") : "";
       ret.add(new PeerConnection.IceServer(url, "", credential));
     }
     return ret;
@@ -228,5 +214,4 @@
     Scanner s = new Scanner(in).useDelimiter("\\A");
     return s.hasNext() ? s.next() : "";
   }
-
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
index 7f8daad..fd38f90 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
@@ -23,8 +23,7 @@
 /**
  * Settings activity for AppRTC.
  */
-public class SettingsActivity extends Activity
-    implements OnSharedPreferenceChangeListener{
+public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
   private SettingsFragment settingsFragment;
   private String keyprefVideoCall;
   private String keyprefCamera2;
@@ -85,7 +84,8 @@
 
     // Display the fragment as the main content.
     settingsFragment = new SettingsFragment();
-    getFragmentManager().beginTransaction()
+    getFragmentManager()
+        .beginTransaction()
         .replace(android.R.id.content, settingsFragment)
         .commit();
   }
@@ -127,8 +127,7 @@
     updateSummaryB(sharedPreferences, keyPrefTracing);
 
     if (!Camera2Enumerator.isSupported(this)) {
-      Preference camera2Preference =
-          settingsFragment.findPreference(keyprefCamera2);
+      Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
 
       camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
       camera2Preference.setEnabled(false);
@@ -173,8 +172,8 @@
   }
 
   @Override
-  public void onSharedPreferenceChanged(SharedPreferences sharedPreferences,
-      String key) {
+  public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
+    // clang-format off
     if (key.equals(keyprefResolution)
         || key.equals(keyprefFps)
         || key.equals(keyprefMaxVideoBitrateType)
@@ -204,6 +203,7 @@
     } else if (key.equals(keyprefSpeakerphone)) {
       updateSummaryList(sharedPreferences, key);
     }
+    // clang-format on
     if (key.equals(keyprefMaxVideoBitrateType)) {
       setVideoBitrateEnable(sharedPreferences);
     }
@@ -218,8 +218,7 @@
     updatedPref.setSummary(sharedPreferences.getString(key, ""));
   }
 
-  private void updateSummaryBitrate(
-      SharedPreferences sharedPreferences, String key) {
+  private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
     Preference updatedPref = settingsFragment.findPreference(key);
     updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
   }
@@ -227,8 +226,8 @@
   private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
     Preference updatedPref = settingsFragment.findPreference(key);
     updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
-        ? getString(R.string.pref_value_enabled)
-        : getString(R.string.pref_value_disabled));
+            ? getString(R.string.pref_value_enabled)
+            : getString(R.string.pref_value_disabled));
   }
 
   private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
@@ -240,8 +239,8 @@
     Preference bitratePreferenceValue =
         settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
     String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
-    String bitrateType = sharedPreferences.getString(
-        keyprefMaxVideoBitrateType, bitrateTypeDefault);
+    String bitrateType =
+        sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
     if (bitrateType.equals(bitrateTypeDefault)) {
       bitratePreferenceValue.setEnabled(false);
     } else {
@@ -253,8 +252,8 @@
     Preference bitratePreferenceValue =
         settingsFragment.findPreference(keyprefStartAudioBitrateValue);
     String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
-    String bitrateType = sharedPreferences.getString(
-        keyprefStartAudioBitrateType, bitrateTypeDefault);
+    String bitrateType =
+        sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
     if (bitrateType.equals(bitrateTypeDefault)) {
       bitratePreferenceValue.setEnabled(false);
     } else {
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
index 3fc5b51..d969bd7 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/SettingsFragment.java
@@ -17,7 +17,6 @@
  * Settings fragment for AppRTC.
  */
 public class SettingsFragment extends PreferenceFragment {
-
   @Override
   public void onCreate(Bundle savedInstanceState) {
     super.onCreate(savedInstanceState);
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
index 7e09c9b..019a996 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
@@ -116,7 +116,6 @@
     });
   }
 
-
   /**
    * Base class for server and client sockets. Contains a listening thread that will call
    * eventListener.onTCPMessage on new messages.
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
index 18f5dc5..33a4382 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/UnhandledExceptionHandler.java
@@ -29,8 +29,7 @@
  * Thread.setDefaultUncaughtExceptionHandler() rather than
  * Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
  */
-public class UnhandledExceptionHandler
-    implements Thread.UncaughtExceptionHandler {
+public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
   private static final String TAG = "AppRTCMobileActivity";
   private final Activity activity;
 
@@ -40,31 +39,30 @@
 
   public void uncaughtException(Thread unusedThread, final Throwable e) {
     activity.runOnUiThread(new Runnable() {
-        @Override public void run() {
-          String title = "Fatal error: " + getTopLevelCauseMessage(e);
-          String msg = getRecursiveStackTrace(e);
-          TextView errorView = new TextView(activity);
-          errorView.setText(msg);
-          errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
-          ScrollView scrollingContainer = new ScrollView(activity);
-          scrollingContainer.addView(errorView);
-          Log.e(TAG, title + "\n\n" + msg);
-          DialogInterface.OnClickListener listener =
-              new DialogInterface.OnClickListener() {
-                @Override public void onClick(
-                    DialogInterface dialog, int which) {
-                  dialog.dismiss();
-                  System.exit(1);
-                }
-              };
-          AlertDialog.Builder builder =
-              new AlertDialog.Builder(activity);
-          builder
-              .setTitle(title)
-              .setView(scrollingContainer)
-              .setPositiveButton("Exit", listener).show();
-        }
-      });
+      @Override
+      public void run() {
+        String title = "Fatal error: " + getTopLevelCauseMessage(e);
+        String msg = getRecursiveStackTrace(e);
+        TextView errorView = new TextView(activity);
+        errorView.setText(msg);
+        errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
+        ScrollView scrollingContainer = new ScrollView(activity);
+        scrollingContainer.addView(errorView);
+        Log.e(TAG, title + "\n\n" + msg);
+        DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
+          @Override
+          public void onClick(DialogInterface dialog, int which) {
+            dialog.dismiss();
+            System.exit(1);
+          }
+        };
+        AlertDialog.Builder builder = new AlertDialog.Builder(activity);
+        builder.setTitle(title)
+            .setView(scrollingContainer)
+            .setPositiveButton("Exit", listener)
+            .show();
+      }
+    });
   }
 
   // Returns the Message attached to the original Cause of |t|.
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
index 139f2bd..b5e8d99 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketChannelClient.java
@@ -56,9 +56,7 @@
   /**
    * Possible WebSocket connection states.
    */
-  public enum WebSocketConnectionState {
-    NEW, CONNECTED, REGISTERED, CLOSED, ERROR
-  };
+  public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
 
   /**
    * Callback interface for messages delivered on WebSocket.
@@ -179,8 +177,7 @@
       sendWSSMessage("DELETE", "");
     }
     // Close WebSocket in CONNECTED or ERROR states only.
-    if (state == WebSocketConnectionState.CONNECTED
-        || state == WebSocketConnectionState.ERROR) {
+    if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
       ws.disconnect();
       state = WebSocketConnectionState.CLOSED;
 
@@ -219,16 +216,15 @@
   private void sendWSSMessage(final String method, final String message) {
     String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
     Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
-    AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection(
-        method, postUrl, message, new AsyncHttpEvents() {
+    AsyncHttpURLConnection httpConnection =
+        new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
           @Override
           public void onHttpError(String errorMessage) {
             reportError("WS " + method + " error: " + errorMessage);
           }
 
           @Override
-          public void onHttpComplete(String response) {
-          }
+          public void onHttpComplete(String response) {}
         });
     httpConnection.send();
   }
@@ -237,8 +233,7 @@
   // called on a looper thread.
   private void checkIfCalledOnValidThread() {
     if (Thread.currentThread() != handler.getLooper().getThread()) {
-      throw new IllegalStateException(
-          "WebSocket method is not called on valid thread");
+      throw new IllegalStateException("WebSocket method is not called on valid thread");
     }
   }
 
@@ -260,8 +255,8 @@
 
     @Override
     public void onClose(WebSocketCloseNotification code, String reason) {
-      Log.d(TAG, "WebSocket connection closed. Code: " + code
-          + ". Reason: " + reason + ". State: " + state);
+      Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
+              + state);
       synchronized (closeEventLock) {
         closeEvent = true;
         closeEventLock.notify();
@@ -293,12 +288,9 @@
     }
 
     @Override
-    public void onRawTextMessage(byte[] payload) {
-    }
+    public void onRawTextMessage(byte[] payload) {}
 
     @Override
-    public void onBinaryMessage(byte[] payload) {
-    }
+    public void onBinaryMessage(byte[] payload) {}
   }
-
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
index 0178d69..28eef2e 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/WebSocketRTCClient.java
@@ -36,19 +36,16 @@
  * Messages to other party (with local Ice candidates and answer SDP) can
  * be sent after WebSocket connection is established.
  */
-public class WebSocketRTCClient implements AppRTCClient,
-    WebSocketChannelEvents {
+public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
   private static final String TAG = "WSRTCClient";
   private static final String ROOM_JOIN = "join";
   private static final String ROOM_MESSAGE = "message";
   private static final String ROOM_LEAVE = "leave";
 
-  private enum ConnectionState {
-    NEW, CONNECTED, CLOSED, ERROR
-  };
-  private enum MessageType {
-    MESSAGE, LEAVE
-  };
+  private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
+
+  private enum MessageType { MESSAGE, LEAVE }
+
   private final Handler handler;
   private boolean initiator;
   private SignalingEvents events;
@@ -101,8 +98,7 @@
 
     RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
       @Override
-      public void onSignalingParametersReady(
-          final SignalingParameters params) {
+      public void onSignalingParametersReady(final SignalingParameters params) {
         WebSocketRTCClient.this.handler.post(new Runnable() {
           @Override
           public void run() {
@@ -134,37 +130,32 @@
   }
 
   // Helper functions to get connection, post message and leave message URLs
-  private String getConnectionUrl(
-      RoomConnectionParameters connectionParameters) {
-    return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/"
-        + connectionParameters.roomId;
+  private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
+    return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId;
   }
 
-  private String getMessageUrl(RoomConnectionParameters connectionParameters,
-      SignalingParameters signalingParameters) {
-    return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/"
-      + connectionParameters.roomId + "/" + signalingParameters.clientId;
+  private String getMessageUrl(
+      RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+    return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
+        + "/" + signalingParameters.clientId;
   }
 
-  private String getLeaveUrl(RoomConnectionParameters connectionParameters,
-      SignalingParameters signalingParameters) {
-    return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/"
-        + connectionParameters.roomId + "/" + signalingParameters.clientId;
+  private String getLeaveUrl(
+      RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
+    return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
+        + signalingParameters.clientId;
   }
 
   // Callback issued when room parameters are extracted. Runs on local
   // looper thread.
-  private void signalingParametersReady(
-      final SignalingParameters signalingParameters) {
+  private void signalingParametersReady(final SignalingParameters signalingParameters) {
     Log.d(TAG, "Room connection completed.");
     if (connectionParameters.loopback
-        && (!signalingParameters.initiator
-            || signalingParameters.offerSdp != null)) {
+        && (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
       reportError("Loopback room is busy.");
       return;
     }
-    if (!connectionParameters.loopback
-        && !signalingParameters.initiator
+    if (!connectionParameters.loopback && !signalingParameters.initiator
         && signalingParameters.offerSdp == null) {
       Log.w(TAG, "No offer SDP in room response.");
     }
@@ -200,8 +191,7 @@
         if (connectionParameters.loopback) {
           // In loopback mode rename this offer to answer and route it back.
           SessionDescription sdpAnswer = new SessionDescription(
-              SessionDescription.Type.fromCanonicalForm("answer"),
-              sdp.description);
+              SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
           events.onRemoteDescription(sdpAnswer);
         }
       }
@@ -263,7 +253,7 @@
       public void run() {
         JSONObject json = new JSONObject();
         jsonPut(json, "type", "remove-candidates");
-        JSONArray jsonArray =  new JSONArray();
+        JSONArray jsonArray = new JSONArray();
         for (final IceCandidate candidate : candidates) {
           jsonArray.put(toJsonCandidate(candidate));
         }
@@ -308,15 +298,14 @@
         } else if (type.equals("remove-candidates")) {
           JSONArray candidateArray = json.getJSONArray("candidates");
           IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
-          for (int i =0; i < candidateArray.length(); ++i) {
+          for (int i = 0; i < candidateArray.length(); ++i) {
             candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
           }
           events.onRemoteIceCandidatesRemoved(candidates);
         } else if (type.equals("answer")) {
           if (initiator) {
             SessionDescription sdp = new SessionDescription(
-                SessionDescription.Type.fromCanonicalForm(type),
-                json.getString("sdp"));
+                SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
             events.onRemoteDescription(sdp);
           } else {
             reportError("Received answer for call initiator: " + msg);
@@ -324,8 +313,7 @@
         } else if (type.equals("offer")) {
           if (!initiator) {
             SessionDescription sdp = new SessionDescription(
-                SessionDescription.Type.fromCanonicalForm(type),
-                json.getString("sdp"));
+                SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
             events.onRemoteDescription(sdp);
           } else {
             reportError("Received offer for call receiver: " + msg);
@@ -389,28 +377,28 @@
       logInfo += ". Message: " + message;
     }
     Log.d(TAG, "C->GAE: " + logInfo);
-    AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection(
-      "POST", url, message, new AsyncHttpEvents() {
-        @Override
-        public void onHttpError(String errorMessage) {
-          reportError("GAE POST error: " + errorMessage);
-        }
+    AsyncHttpURLConnection httpConnection =
+        new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
+          @Override
+          public void onHttpError(String errorMessage) {
+            reportError("GAE POST error: " + errorMessage);
+          }
 
-        @Override
-        public void onHttpComplete(String response) {
-          if (messageType == MessageType.MESSAGE) {
-            try {
-              JSONObject roomJson = new JSONObject(response);
-              String result = roomJson.getString("result");
-              if (!result.equals("SUCCESS")) {
-                reportError("GAE POST error: " + result);
+          @Override
+          public void onHttpComplete(String response) {
+            if (messageType == MessageType.MESSAGE) {
+              try {
+                JSONObject roomJson = new JSONObject(response);
+                String result = roomJson.getString("result");
+                if (!result.equals("SUCCESS")) {
+                  reportError("GAE POST error: " + result);
+                }
+              } catch (JSONException e) {
+                reportError("GAE POST JSON error: " + e.toString());
               }
-            } catch (JSONException e) {
-              reportError("GAE POST JSON error: " + e.toString());
             }
           }
-        }
-      });
+        });
     httpConnection.send();
   }
 
@@ -425,8 +413,7 @@
 
   // Converts a JSON candidate to a Java object.
   IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
-    return new IceCandidate(json.getString("id"),
-                            json.getInt("label"),
-                            json.getString("candidate"));
+    return new IceCandidate(
+        json.getString("id"), json.getInt("label"), json.getString("candidate"));
   }
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
index 95cda34..ee7f8c0 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AppRTCUtils.java
@@ -17,9 +17,7 @@
  * AppRTCUtils provides helper functions for managing thread safety.
  */
 public final class AppRTCUtils {
-
-  private AppRTCUtils() {
-  }
+  private AppRTCUtils() {}
 
   /** Helper method which throws an exception  when an assertion has failed. */
   public static void assertIsTrue(boolean condition) {
@@ -30,20 +28,20 @@
 
   /** Helper method for building a string of thread information.*/
   public static String getThreadInfo() {
-    return "@[name=" + Thread.currentThread().getName()
-        + ", id=" + Thread.currentThread().getId() + "]";
+    return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+        + "]";
   }
 
   /** Information about the current build, taken from system properties. */
   public static void logDeviceInfo(String tag) {
     Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
-        + "Release: " + Build.VERSION.RELEASE + ", "
-        + "Brand: " + Build.BRAND + ", "
-        + "Device: " + Build.DEVICE + ", "
-        + "Id: " + Build.ID + ", "
-        + "Hardware: " + Build.HARDWARE + ", "
-        + "Manufacturer: " + Build.MANUFACTURER + ", "
-        + "Model: " + Build.MODEL + ", "
-        + "Product: " + Build.PRODUCT);
+            + "Release: " + Build.VERSION.RELEASE + ", "
+            + "Brand: " + Build.BRAND + ", "
+            + "Device: " + Build.DEVICE + ", "
+            + "Id: " + Build.ID + ", "
+            + "Hardware: " + Build.HARDWARE + ", "
+            + "Manufacturer: " + Build.MANUFACTURER + ", "
+            + "Model: " + Build.MODEL + ", "
+            + "Product: " + Build.PRODUCT);
   }
 }
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
index ee2a4ab..1f0b8bb 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/util/AsyncHttpURLConnection.java
@@ -38,8 +38,7 @@
     void onHttpComplete(String response);
   }
 
-  public AsyncHttpURLConnection(String method, String url, String message,
-      AsyncHttpEvents events) {
+  public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
     this.method = method;
     this.url = url;
     this.message = message;
@@ -61,8 +60,7 @@
 
   private void sendHttpMessage() {
     try {
-      HttpURLConnection connection =
-        (HttpURLConnection) new URL(url).openConnection();
+      HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
       byte[] postData = new byte[0];
       if (message != null) {
         postData = message.getBytes("UTF-8");
@@ -96,8 +94,8 @@
       // Get response.
       int responseCode = connection.getResponseCode();
       if (responseCode != 200) {
-        events.onHttpError("Non-200 response to " + method + " to URL: "
-            + url + " : " + connection.getHeaderField(null));
+        events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
+            + connection.getHeaderField(null));
         connection.disconnect();
         return;
       }
@@ -109,8 +107,7 @@
     } catch (SocketTimeoutException e) {
       events.onHttpError("HTTP " + method + " to " + url + " timeout");
     } catch (IOException e) {
-      events.onHttpError("HTTP " + method + " to " + url + " error: "
-          + e.getMessage());
+      events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
     }
   }
 
diff --git a/webrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java b/webrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java
index 9c72c47..6550b6b 100644
--- a/webrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java
+++ b/webrtc/examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java
@@ -62,6 +62,7 @@
   @Test
   public void testValidIpPattern() {
     // Strings that should match the pattern.
+    // clang-format off
     final String[] ipAddresses = new String[] {
         "0.0.0.0",
         "127.0.0.1",
@@ -79,6 +80,7 @@
         "[::1]:8888",
         "[2001:0db8:85a3:0000:0000:8a2e:0370:7946]:8888"
     };
+    // clang-format on
 
     for (String ip : ipAddresses) {
       assertTrue(ip + " didn't match IP_PATTERN even though it should.",
@@ -89,6 +91,7 @@
   @Test
   public void testInvalidIpPattern() {
     // Strings that shouldn't match the pattern.
+    // clang-format off
     final String[] invalidIpAddresses = new String[] {
         "Hello, World!",
         "aaaa",
@@ -96,6 +99,7 @@
         "[hello world]",
         "hello:world"
     };
+    // clang-format on
 
     for (String invalidIp : invalidIpAddresses) {
       assertFalse(invalidIp + " matched IP_PATTERN even though it shouldn't.",
@@ -121,8 +125,8 @@
     verify(clientEvents, timeout(NETWORK_TIMEOUT))
         .onConnectedToRoom(any(AppRTCClient.SignalingParameters.class));
 
-    SessionDescription answerSdp
-        = new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
+    SessionDescription answerSdp =
+        new SessionDescription(SessionDescription.Type.ANSWER, DUMMY_SDP);
     client.sendAnswerSdp(answerSdp);
     verify(serverEvents, timeout(NETWORK_TIMEOUT))
         .onRemoteDescription(isNotNull(SessionDescription.class));
diff --git a/webrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java b/webrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java
index be8487f..b20157f 100644
--- a/webrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java
+++ b/webrtc/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java
@@ -52,7 +52,6 @@
   private TCPChannelClient server;
   private TCPChannelClient client;
 
-
   @Before
   public void setUp() {
     ShadowLog.stream = System.out;
diff --git a/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
index 5b60e71..aef9b57 100644
--- a/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
+++ b/webrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
@@ -37,8 +37,8 @@
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-public class PeerConnectionClientTest extends InstrumentationTestCase
-    implements PeerConnectionEvents {
+public class PeerConnectionClientTest
+    extends InstrumentationTestCase implements PeerConnectionEvents {
   private static final String TAG = "RTCClientTest";
   private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000;
   private static final int WAIT_TIMEOUT = 7000;
@@ -103,8 +103,8 @@
     public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
       if (!renderFrameCalled) {
         if (rendererName != null) {
-          Log.d(TAG, rendererName + " render frame: "
-              + frame.rotatedWidth() + " x " + frame.rotatedHeight());
+          Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x "
+                  + frame.rotatedHeight());
         } else {
           Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight());
         }
@@ -114,11 +114,9 @@
       doneRendering.countDown();
     }
 
-
     // This method shouldn't hold any locks or touch member variables since it
     // blocks.
-    public boolean waitForFramesRendered(int timeoutMs)
-        throws InterruptedException {
+    public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
       doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
       return (doneRendering.getCount() <= 0);
     }
@@ -136,7 +134,7 @@
 
   @Override
   public void onIceCandidate(final IceCandidate candidate) {
-    synchronized(iceCandidateEvent) {
+    synchronized (iceCandidateEvent) {
       Log.d(TAG, "IceCandidate #" + iceCandidates.size() + " : " + candidate.toString());
       if (loopback) {
         // Loopback local ICE candidate in a separate thread to avoid adding
@@ -161,7 +159,7 @@
   @Override
   public void onIceConnected() {
     Log.d(TAG, "ICE Connected");
-    synchronized(iceConnectedEvent) {
+    synchronized (iceConnectedEvent) {
       isIceConnected = true;
       iceConnectedEvent.notifyAll();
     }
@@ -170,7 +168,7 @@
   @Override
   public void onIceDisconnected() {
     Log.d(TAG, "ICE Disconnected");
-    synchronized(iceConnectedEvent) {
+    synchronized (iceConnectedEvent) {
       isIceConnected = false;
       iceConnectedEvent.notifyAll();
     }
@@ -179,7 +177,7 @@
   @Override
   public void onPeerConnectionClosed() {
     Log.d(TAG, "PeerConnection closed");
-    synchronized(closeEvent) {
+    synchronized (closeEvent) {
       isClosed = true;
       closeEvent.notifyAll();
     }
@@ -191,13 +189,11 @@
   }
 
   @Override
-  public void onPeerConnectionStatsReady(StatsReport[] reports) {
-  }
+  public void onPeerConnectionStatsReady(StatsReport[] reports) {}
 
   // Helper wait functions.
-  private boolean waitForLocalSDP(int timeoutMs)
-      throws InterruptedException {
-    synchronized(localSdpEvent) {
+  private boolean waitForLocalSDP(int timeoutMs) throws InterruptedException {
+    synchronized (localSdpEvent) {
       if (localSdp == null) {
         localSdpEvent.wait(timeoutMs);
       }
@@ -205,9 +201,8 @@
     }
   }
 
-  private boolean waitForIceCandidates(int timeoutMs)
-      throws InterruptedException {
-    synchronized(iceCandidateEvent) {
+  private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException {
+    synchronized (iceCandidateEvent) {
       if (iceCandidates.size() == 0) {
         iceCandidateEvent.wait(timeoutMs);
       }
@@ -215,9 +210,8 @@
     }
   }
 
-  private boolean waitForIceConnected(int timeoutMs)
-      throws InterruptedException {
-    synchronized(iceConnectedEvent) {
+  private boolean waitForIceConnected(int timeoutMs) throws InterruptedException {
+    synchronized (iceConnectedEvent) {
       if (!isIceConnected) {
         iceConnectedEvent.wait(timeoutMs);
       }
@@ -229,9 +223,8 @@
     }
   }
 
-  private boolean waitForPeerConnectionClosed(int timeoutMs)
-      throws InterruptedException {
-    synchronized(closeEvent) {
+  private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException {
+    synchronized (closeEvent) {
       if (!isClosed) {
         closeEvent.wait(timeoutMs);
       }
@@ -239,15 +232,14 @@
     }
   }
 
-  PeerConnectionClient createPeerConnectionClient(
-      MockRenderer localRenderer, MockRenderer remoteRenderer,
-      PeerConnectionParameters peerConnectionParameters, EglBase.Context eglContext) {
-    List<PeerConnection.IceServer> iceServers =
-        new LinkedList<PeerConnection.IceServer>();
-    SignalingParameters signalingParameters = new SignalingParameters(
-        iceServers, true, // iceServers, initiator.
-        null, null, null, // clientId, wssUrl, wssPostUrl.
-        null, null); // offerSdp, iceCandidates.
+  PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
+      MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
+      EglBase.Context eglContext) {
+    List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
+    SignalingParameters signalingParameters =
+        new SignalingParameters(iceServers, true, // iceServers, initiator.
+            null, null, null, // clientId, wssUrl, wssPostUrl.
+            null, null); // offerSdp, iceCandidates.
 
     PeerConnectionClient client = PeerConnectionClient.getInstance();
     PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
@@ -263,8 +255,7 @@
 
   private PeerConnectionParameters createParametersForAudioCall() {
     PeerConnectionParameters peerConnectionParameters =
-        new PeerConnectionParameters(
-            false, /* videoCallEnabled */
+        new PeerConnectionParameters(false, /* videoCallEnabled */
             true, /* loopback */
             false, /* tracing */
             // Video codec parameters.
@@ -281,22 +272,18 @@
             "OPUS", /* audioCodec */
             false, /* noAudioProcessing */
             false, /* aecDump */
-            false /* useOpenSLES */,
-            false /* disableBuiltInAEC */,
-            false /* disableBuiltInAGC */,
-            false /* disableBuiltInNS */,
-            false /* enableLevelControl */);
+            false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
+            false /* disableBuiltInNS */, false /* enableLevelControl */);
     return peerConnectionParameters;
   }
 
   private PeerConnectionParameters createParametersForVideoCall(
       String videoCodec, boolean captureToTexture) {
-    final boolean useCamera2 = captureToTexture
-        && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
+    final boolean useCamera2 =
+        captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
 
     PeerConnectionParameters peerConnectionParameters =
-        new PeerConnectionParameters(
-            true, /* videoCallEnabled */
+        new PeerConnectionParameters(true, /* videoCallEnabled */
             true, /* loopback */
             false, /* tracing */
             // Video codec parameters.
@@ -313,11 +300,8 @@
             "OPUS", /* audioCodec */
             false, /* noAudioProcessing */
             false, /* aecDump */
-            false /* useOpenSLES */,
-            false /* disableBuiltInAEC */,
-            false /* disableBuiltInAGC */,
-            false /* disableBuiltInNS */,
-            false /* enableLevelControl */);
+            false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
+            false /* disableBuiltInNS */, false /* enableLevelControl */);
     return peerConnectionParameters;
   }
 
@@ -338,26 +322,23 @@
   }
 
   @SmallTest
-  public void testSetLocalOfferMakesVideoFlowLocally()
-      throws InterruptedException {
+  public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException {
     Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
     MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
-    pcClient = createPeerConnectionClient(
-        localRenderer, new MockRenderer(0, null),
+    pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
         createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
 
     // Wait for local SDP and ice candidates set events.
     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
-    assertTrue("ICE candidates were not generated.",
-        waitForIceCandidates(WAIT_TIMEOUT));
+    assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT));
 
     // Check that local video frames were rendered.
-    assertTrue("Local video frames were not rendered.",
-        localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+    assertTrue(
+        "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
 
     pcClient.close();
-    assertTrue("PeerConnection close event was not received.",
-        waitForPeerConnectionClosed(WAIT_TIMEOUT));
+    assertTrue(
+        "PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT));
     Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
   }
 
@@ -379,8 +360,7 @@
     // Wait for local SDP, rename it to answer and set as remote SDP.
     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
     SessionDescription remoteSdp = new SessionDescription(
-        SessionDescription.Type.fromCanonicalForm("answer"),
-        localSdp.description);
+        SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
     pcClient.setRemoteDescription(remoteSdp);
 
     // Wait for ICE connection.
@@ -492,15 +472,14 @@
     eglBase = null;
 
     SessionDescription remoteSdp = new SessionDescription(
-        SessionDescription.Type.fromCanonicalForm("answer"),
-        localSdp.description);
+        SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
     pcClient.setRemoteDescription(remoteSdp);
 
     // Wait for ICE connection.
     assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
     // Check that local and remote video frames were rendered.
-    assertTrue("Local video frames were not rendered.",
-        localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
+    assertTrue(
+        "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
     assertTrue("Remote video frames were not rendered.",
         remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
 
@@ -524,7 +503,6 @@
     doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
   }
 
-
   // Checks if default front camera can be switched to back camera and then
   // again to front camera.
   @SmallTest
@@ -541,8 +519,7 @@
     // Wait for local SDP, rename it to answer and set as remote SDP.
     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
     SessionDescription remoteSdp = new SessionDescription(
-        SessionDescription.Type.fromCanonicalForm("answer"),
-        localSdp.description);
+        SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
     pcClient.setRemoteDescription(remoteSdp);
 
     // Wait for ICE connection.
@@ -588,8 +565,7 @@
     // Wait for local SDP, rename it to answer and set as remote SDP.
     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
     SessionDescription remoteSdp = new SessionDescription(
-        SessionDescription.Type.fromCanonicalForm("answer"),
-        localSdp.description);
+        SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
     pcClient.setRemoteDescription(remoteSdp);
 
     // Wait for ICE connection.
@@ -636,8 +612,7 @@
     // Wait for local SDP, rename it to answer and set as remote SDP.
     assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
     SessionDescription remoteSdp = new SessionDescription(
-        SessionDescription.Type.fromCanonicalForm("answer"),
-        localSdp.description);
+        SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
     pcClient.setRemoteDescription(remoteSdp);
 
     // Wait for ICE connection.
@@ -671,5 +646,4 @@
     assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
     Log.d(TAG, "testCaptureFormatChange done.");
   }
-
 }
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java
index aaf07f1..aed8a06 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java
@@ -46,6 +46,6 @@
   }
 
   public static int getSdkVersion() {
-      return Build.VERSION.SDK_INT;
+    return Build.VERSION.SDK_INT;
   }
 }
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
index 042f0ba..14295c2 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
@@ -67,8 +67,7 @@
     // Note: we're using isAcousticEchoCancelerEffectAvailable() instead of
     // AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects()
     // OS API call.
-    return WebRtcAudioUtils.runningOnJellyBeanOrHigher()
-        && isAcousticEchoCancelerEffectAvailable();
+    return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isAcousticEchoCancelerEffectAvailable();
   }
 
   // Checks if the device implements Automatic Gain Control (AGC).
@@ -77,8 +76,7 @@
     // Note: we're using isAutomaticGainControlEffectAvailable() instead of
     // AutomaticGainControl.isAvailable() to avoid the expensive getEffects()
     // OS API call.
-    return WebRtcAudioUtils.runningOnJellyBeanOrHigher()
-        && isAutomaticGainControlEffectAvailable();
+    return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isAutomaticGainControlEffectAvailable();
   }
 
   // Checks if the device implements Noise Suppression (NS).
@@ -87,14 +85,12 @@
     // Note: we're using isNoiseSuppressorEffectAvailable() instead of
     // NoiseSuppressor.isAvailable() to avoid the expensive getEffects()
     // OS API call.
-    return WebRtcAudioUtils.runningOnJellyBeanOrHigher()
-        && isNoiseSuppressorEffectAvailable();
+    return WebRtcAudioUtils.runningOnJellyBeanOrHigher() && isNoiseSuppressorEffectAvailable();
   }
 
   // Returns true if the device is blacklisted for HW AEC usage.
   public static boolean isAcousticEchoCancelerBlacklisted() {
-    List<String> blackListedModels =
-        WebRtcAudioUtils.getBlackListedModelsForAecUsage();
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage();
     boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
     if (isBlacklisted) {
       Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!");
@@ -104,8 +100,7 @@
 
   // Returns true if the device is blacklisted for HW AGC usage.
   public static boolean isAutomaticGainControlBlacklisted() {
-   List<String> blackListedModels =
-        WebRtcAudioUtils.getBlackListedModelsForAgcUsage();
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAgcUsage();
     boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
     if (isBlacklisted) {
       Logging.w(TAG, Build.MODEL + " is blacklisted for HW AGC usage!");
@@ -115,8 +110,7 @@
 
   // Returns true if the device is blacklisted for HW NS usage.
   public static boolean isNoiseSuppressorBlacklisted() {
-    List<String> blackListedModels =
-        WebRtcAudioUtils.getBlackListedModelsForNsUsage();
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage();
     boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
     if (isBlacklisted) {
       Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!");
@@ -129,8 +123,8 @@
   @TargetApi(18)
   private static boolean isAcousticEchoCancelerExcludedByUUID() {
     for (Descriptor d : getAvailableEffects()) {
-      if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) &&
-          d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
+          && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
         return true;
       }
     }
@@ -142,8 +136,8 @@
   @TargetApi(18)
   private static boolean isAutomaticGainControlExcludedByUUID() {
     for (Descriptor d : getAvailableEffects()) {
-      if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) &&
-          d.uuid.equals(AOSP_AUTOMATIC_GAIN_CONTROL)) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC)
+          && d.uuid.equals(AOSP_AUTOMATIC_GAIN_CONTROL)) {
         return true;
       }
     }
@@ -155,8 +149,7 @@
   @TargetApi(18)
   private static boolean isNoiseSuppressorExcludedByUUID() {
     for (Descriptor d : getAvailableEffects()) {
-      if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) &&
-          d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
         return true;
       }
     }
@@ -184,36 +177,28 @@
   // Returns true if all conditions for supporting the HW AEC are fulfilled.
   // It will not be possible to enable the HW AEC if this method returns false.
   public static boolean canUseAcousticEchoCanceler() {
-    boolean canUseAcousticEchoCanceler =
-        isAcousticEchoCancelerSupported()
+    boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported()
         && !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler()
-        && !isAcousticEchoCancelerBlacklisted()
-        && !isAcousticEchoCancelerExcludedByUUID();
-    Logging.d(TAG, "canUseAcousticEchoCanceler: "
-        + canUseAcousticEchoCanceler);
+        && !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID();
+    Logging.d(TAG, "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler);
     return canUseAcousticEchoCanceler;
   }
 
   // Returns true if all conditions for supporting the HW AGC are fulfilled.
   // It will not be possible to enable the HW AGC if this method returns false.
   public static boolean canUseAutomaticGainControl() {
-    boolean canUseAutomaticGainControl =
-        isAutomaticGainControlSupported()
+    boolean canUseAutomaticGainControl = isAutomaticGainControlSupported()
         && !WebRtcAudioUtils.useWebRtcBasedAutomaticGainControl()
-        && !isAutomaticGainControlBlacklisted()
-        && !isAutomaticGainControlExcludedByUUID();
-    Logging.d(TAG, "canUseAutomaticGainControl: "
-        + canUseAutomaticGainControl);
+        && !isAutomaticGainControlBlacklisted() && !isAutomaticGainControlExcludedByUUID();
+    Logging.d(TAG, "canUseAutomaticGainControl: " + canUseAutomaticGainControl);
     return canUseAutomaticGainControl;
   }
 
   // Returns true if all conditions for supporting the HW NS are fulfilled.
   // It will not be possible to enable the HW NS if this method returns false.
   public static boolean canUseNoiseSuppressor() {
-    boolean canUseNoiseSuppressor =
-        isNoiseSuppressorSupported()
-        && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor()
-        && !isNoiseSuppressorBlacklisted()
+    boolean canUseNoiseSuppressor = isNoiseSuppressorSupported()
+        && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted()
         && !isNoiseSuppressorExcludedByUUID();
     Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor);
     return canUseNoiseSuppressor;
@@ -300,9 +285,9 @@
     for (Descriptor d : AudioEffect.queryEffects()) {
       if (effectTypeIsVoIP(d.type) || DEBUG) {
         Logging.d(TAG, "name: " + d.name + ", "
-            + "mode: " + d.connectMode + ", "
-            + "implementor: " + d.implementor + ", "
-            + "UUID: " + d.uuid);
+                + "mode: " + d.connectMode + ", "
+                + "implementor: " + d.implementor + ", "
+                + "UUID: " + d.uuid);
       }
     }
 
@@ -316,10 +301,9 @@
         if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
           Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
         }
-        Logging.d(TAG, "AcousticEchoCanceler: was "
-            + (enabled ? "enabled" : "disabled")
-            + ", enable: " + enable + ", is now: "
-            + (aec.getEnabled() ? "enabled" : "disabled"));
+        Logging.d(TAG, "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled")
+                + ", enable: " + enable + ", is now: "
+                + (aec.getEnabled() ? "enabled" : "disabled"));
       } else {
         Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance");
       }
@@ -335,10 +319,9 @@
         if (agc.setEnabled(enable) != AudioEffect.SUCCESS) {
           Logging.e(TAG, "Failed to set the AutomaticGainControl state");
         }
-        Logging.d(TAG, "AutomaticGainControl: was "
-            + (enabled ? "enabled" : "disabled")
-            + ", enable: " + enable + ", is now: "
-            + (agc.getEnabled() ? "enabled" : "disabled"));
+        Logging.d(TAG, "AutomaticGainControl: was " + (enabled ? "enabled" : "disabled")
+                + ", enable: " + enable + ", is now: "
+                + (agc.getEnabled() ? "enabled" : "disabled"));
       } else {
         Logging.e(TAG, "Failed to create the AutomaticGainControl instance");
       }
@@ -354,10 +337,8 @@
         if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
           Logging.e(TAG, "Failed to set the NoiseSuppressor state");
         }
-        Logging.d(TAG, "NoiseSuppressor: was "
-            + (enabled ? "enabled" : "disabled")
-            + ", enable: " + enable + ", is now: "
-            + (ns.getEnabled() ? "enabled" : "disabled"));
+        Logging.d(TAG, "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+                + enable + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
       } else {
         Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
       }
@@ -395,12 +376,9 @@
     if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
       return false;
 
-    return (AudioEffect.EFFECT_TYPE_AEC.equals(type)
-        && isAcousticEchoCancelerSupported())
-        || (AudioEffect.EFFECT_TYPE_AGC.equals(type)
-        && isAutomaticGainControlSupported())
-        || (AudioEffect.EFFECT_TYPE_NS.equals(type)
-        && isNoiseSuppressorSupported());
+    return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
+        || (AudioEffect.EFFECT_TYPE_AGC.equals(type) && isAutomaticGainControlSupported())
+        || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
   }
 
   // Helper method which throws an exception when an assertion has failed.
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
index 64c9c7c..19ee09a 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
@@ -45,8 +45,7 @@
   // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
   // Allows an app to take control over which devices to exlude from using
   // the OpenSL ES audio output path
-  public static synchronized void setBlacklistDeviceForOpenSLESUsage(
-      boolean enable) {
+  public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
     blacklistDeviceForOpenSLESUsageIsOverridden = true;
     blacklistDeviceForOpenSLESUsage = enable;
   }
@@ -62,10 +61,7 @@
 
   // List of possible audio modes.
   private static final String[] AUDIO_MODES = new String[] {
-      "MODE_NORMAL",
-      "MODE_RINGTONE",
-      "MODE_IN_CALL",
-      "MODE_IN_COMMUNICATION",
+      "MODE_NORMAL", "MODE_RINGTONE", "MODE_IN_CALL", "MODE_IN_COMMUNICATION",
   };
 
   // Private utility class that periodically checks and logs the volume level
@@ -85,9 +81,8 @@
 
     public void start() {
       timer = new Timer(THREAD_NAME);
-      timer.schedule(new LogVolumeTask(
-          audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
-          audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
+      timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
+                         audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
           0, TIMER_PERIOD_IN_SECONDS * 1000);
     }
 
@@ -104,12 +99,12 @@
         final int mode = audioManager.getMode();
         if (mode == AudioManager.MODE_RINGTONE) {
           Logging.d(TAG, "STREAM_RING stream volume: "
-              + audioManager.getStreamVolume(AudioManager.STREAM_RING)
-              + " (max=" + maxRingVolume + ")");
+                  + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max="
+                  + maxRingVolume + ")");
         } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
           Logging.d(TAG, "VOICE_CALL stream volume: "
-              + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL)
-              + " (max=" + maxVoiceCallVolume + ")");
+                  + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max="
+                  + maxVoiceCallVolume + ")");
         }
       }
     }
@@ -147,16 +142,15 @@
     Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
     this.context = context;
     this.nativeAudioManager = nativeAudioManager;
-    audioManager = (AudioManager) context.getSystemService(
-        Context.AUDIO_SERVICE);
+    audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
     if (DEBUG) {
       WebRtcAudioUtils.logDeviceInfo(TAG);
     }
     volumeLogger = new VolumeLogger(audioManager);
     storeAudioParameters();
     nativeCacheAudioParameters(sampleRate, channels, hardwareAEC, hardwareAGC, hardwareNS,
-            lowLatencyOutput, lowLatencyInput, proAudio, outputBufferSize, inputBufferSize,
-            nativeAudioManager);
+        lowLatencyOutput, lowLatencyInput, proAudio, outputBufferSize, inputBufferSize,
+        nativeAudioManager);
   }
 
   private boolean init() {
@@ -183,9 +177,9 @@
   }
 
   private boolean isDeviceBlacklistedForOpenSLESUsage() {
-    boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden ?
-        blacklistDeviceForOpenSLESUsage :
-        WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
+    boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
+        ? blacklistDeviceForOpenSLESUsage
+        : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
     if (blacklisted) {
       Logging.e(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
     }
@@ -203,24 +197,21 @@
     lowLatencyOutput = isLowLatencyOutputSupported();
     lowLatencyInput = isLowLatencyInputSupported();
     proAudio = isProAudioSupported();
-    outputBufferSize = lowLatencyOutput ?
-        getLowLatencyOutputFramesPerBuffer() :
-        getMinOutputFrameSize(sampleRate, channels);
+    outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer()
+                                        : getMinOutputFrameSize(sampleRate, channels);
     inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer()
                                       : getMinInputFrameSize(sampleRate, channels);
   }
 
   // Gets the current earpiece state.
   private boolean hasEarpiece() {
-    return context.getPackageManager().hasSystemFeature(
-        PackageManager.FEATURE_TELEPHONY);
+    return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
   }
 
   // Returns true if low-latency audio output is supported.
   private boolean isLowLatencyOutputSupported() {
-    return isOpenSLESSupported() &&
-        context.getPackageManager().hasSystemFeature(
-            PackageManager.FEATURE_AUDIO_LOW_LATENCY);
+    return isOpenSLESSupported()
+        && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
   }
 
   // Returns true if low-latency audio input is supported.
@@ -231,16 +222,14 @@
     // as well. The NDK doc states that: "As of API level 21, lower latency
     // audio input is supported on select devices. To take advantage of this
     // feature, first confirm that lower latency output is available".
-    return WebRtcAudioUtils.runningOnLollipopOrHigher() &&
-        isLowLatencyOutputSupported();
+    return WebRtcAudioUtils.runningOnLollipopOrHigher() && isLowLatencyOutputSupported();
   }
 
   // Returns true if the device has professional audio level of functionality
   // and therefore supports the lowest possible round-trip latency.
   private boolean isProAudioSupported() {
     return WebRtcAudioUtils.runningOnMarshmallowOrHigher()
-        && context.getPackageManager().hasSystemFeature(
-            PackageManager.FEATURE_AUDIO_PRO);
+        && context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_PRO);
   }
 
   // Returns the native output sample rate for this device's output stream.
@@ -254,8 +243,8 @@
     // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz().
     // If so, use that value and return here.
     if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) {
-      Logging.d(TAG, "Default sample rate is overriden to " +
-          WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
+      Logging.d(TAG, "Default sample rate is overriden to "
+              + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
       return WebRtcAudioUtils.getDefaultSampleRateHz();
     }
     // No overrides available. Deliver best possible estimate based on default
@@ -272,11 +261,9 @@
 
   @TargetApi(17)
   private int getSampleRateOnJellyBeanMR10OrHigher() {
-    String sampleRateString = audioManager.getProperty(
-        AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
-    return (sampleRateString == null)
-        ? WebRtcAudioUtils.getDefaultSampleRateHz()
-        : Integer.parseInt(sampleRateString);
+    String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+    return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
+                                      : Integer.parseInt(sampleRateString);
   }
 
   // Returns the native output buffer size for low-latency output streams.
@@ -286,10 +273,9 @@
     if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
       return DEFAULT_FRAME_PER_BUFFER;
     }
-    String framesPerBuffer = audioManager.getProperty(
-        AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
-    return framesPerBuffer == null ?
-        DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
+    String framesPerBuffer =
+        audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+    return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
   }
 
   // Returns true if the device supports an audio effect (AEC, AGC or NS).
@@ -322,8 +308,8 @@
       return -1;
     }
     return AudioTrack.getMinBufferSize(
-        sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) /
-        bytesPerFrame;
+               sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+        / bytesPerFrame;
   }
 
   // Returns the native input buffer size for input streams.
@@ -338,9 +324,9 @@
   private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
     final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
     assertTrue(numChannels == CHANNELS);
-    return AudioRecord.getMinBufferSize(sampleRateInHz,
-        AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) /
-        bytesPerFrame;
+    return AudioRecord.getMinBufferSize(
+               sampleRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
+        / bytesPerFrame;
   }
 
   // Returns true if OpenSL ES audio is supported.
@@ -357,7 +343,6 @@
   }
 
   private native void nativeCacheAudioParameters(int sampleRate, int channels, boolean hardwareAEC,
-          boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput,
-          boolean lowLatencyInput, boolean proAudio, int outputBufferSize, int inputBufferSize,
-          long nativeAudioManager);
+      boolean hardwareAGC, boolean hardwareNS, boolean lowLatencyOutput, boolean lowLatencyInput,
+      boolean proAudio, int outputBufferSize, int inputBufferSize, long nativeAudioManager);
 }
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
index 181910a..aa9608d 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
@@ -23,7 +23,7 @@
 import java.nio.ByteBuffer;
 import java.util.concurrent.TimeUnit;
 
-public class  WebRtcAudioRecord {
+public class WebRtcAudioRecord {
   private static final boolean DEBUG = false;
 
   private static final String TAG = "WebRtcAudioRecord";
@@ -77,8 +77,7 @@
     public void run() {
       Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
       Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
-      assertTrue(audioRecord.getRecordingState()
-          == AudioRecord.RECORDSTATE_RECORDING);
+      assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
 
       long lastTime = System.nanoTime();
       while (keepAlive) {
@@ -90,15 +89,14 @@
           }
           nativeDataIsRecorded(bytesRead, nativeAudioRecord);
         } else {
-          Logging.e(TAG,"AudioRecord.read failed: " + bytesRead);
+          Logging.e(TAG, "AudioRecord.read failed: " + bytesRead);
           if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
             keepAlive = false;
           }
         }
         if (DEBUG) {
           long nowTime = System.nanoTime();
-          long durationInMs =
-              TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
+          long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
           lastTime = nowTime;
           Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead);
         }
@@ -159,10 +157,8 @@
   }
 
   private int initRecording(int sampleRate, int channels) {
-    Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" +
-        channels + ")");
-    if (!WebRtcAudioUtils.hasPermission(
-        context, android.Manifest.permission.RECORD_AUDIO)) {
+    Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+    if (!WebRtcAudioUtils.hasPermission(context, android.Manifest.permission.RECORD_AUDIO)) {
       Logging.e(TAG, "RECORD_AUDIO permission is missing");
       return -1;
     }
@@ -184,11 +180,8 @@
     // an AudioRecord object, in byte units.
     // Note that this size doesn't guarantee a smooth recording under load.
     int minBufferSize = AudioRecord.getMinBufferSize(
-          sampleRate,
-          AudioFormat.CHANNEL_IN_MONO,
-          AudioFormat.ENCODING_PCM_16BIT);
-    if (minBufferSize == AudioRecord.ERROR
-        || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+        sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
+    if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
       Logging.e(TAG, "AudioRecord.getMinBufferSize failed: " + minBufferSize);
       return -1;
     }
@@ -197,43 +190,38 @@
     // Use a larger buffer size than the minimum required when creating the
     // AudioRecord instance to ensure smooth recording under load. It has been
     // verified that it does not increase the actual recording latency.
-    int bufferSizeInBytes =
-        Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
+    int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
     Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
     try {
-      audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION,
-                                    sampleRate,
-                                    AudioFormat.CHANNEL_IN_MONO,
-                                    AudioFormat.ENCODING_PCM_16BIT,
-                                    bufferSizeInBytes);
+      audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, sampleRate,
+          AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
     } catch (IllegalArgumentException e) {
-      Logging.e(TAG,e.getMessage());
+      Logging.e(TAG, e.getMessage());
       return -1;
     }
-    if (audioRecord == null ||
-        audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+    if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
       Logging.e(TAG, "Failed to create a new AudioRecord instance");
       return -1;
     }
     Logging.d(TAG, "AudioRecord "
-        + "session ID: " + audioRecord.getAudioSessionId() + ", "
-        + "audio format: " + audioRecord.getAudioFormat() + ", "
-        + "channels: " + audioRecord.getChannelCount() + ", "
-        + "sample rate: " + audioRecord.getSampleRate());
+            + "session ID: " + audioRecord.getAudioSessionId() + ", "
+            + "audio format: " + audioRecord.getAudioFormat() + ", "
+            + "channels: " + audioRecord.getChannelCount() + ", "
+            + "sample rate: " + audioRecord.getSampleRate());
     if (effects != null) {
       effects.enable(audioRecord.getAudioSessionId());
     }
     // TODO(phoglund): put back audioRecord.getBufferSizeInFrames when
     // all known downstream users supports M.
     // if (WebRtcAudioUtils.runningOnMOrHigher()) {
-      // Returns the frame count of the native AudioRecord buffer. This is
-      // greater than or equal to the bufferSizeInBytes converted to frame
-      // units. The native frame count may be enlarged to accommodate the
-      // requirements of the source on creation or if the AudioRecord is
-      // subsequently rerouted.
+    // Returns the frame count of the native AudioRecord buffer. This is
+    // greater than or equal to the bufferSizeInBytes converted to frame
+    // units. The native frame count may be enlarged to accommodate the
+    // requirements of the source on creation or if the AudioRecord is
+    // subsequently rerouted.
 
-      // Logging.d(TAG, "bufferSizeInFrames: "
-      //     + audioRecord.getBufferSizeInFrames());
+    // Logging.d(TAG, "bufferSizeInFrames: "
+    //     + audioRecord.getBufferSizeInFrames());
     //}
     return framesPerBuffer;
   }
@@ -261,8 +249,7 @@
     Logging.d(TAG, "stopRecording");
     assertTrue(audioThread != null);
     audioThread.stopThread();
-    if (!ThreadUtils.joinUninterruptibly(
-        audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
+    if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
       Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
     }
     audioThread = null;
@@ -281,15 +268,14 @@
     }
   }
 
-  private native void nativeCacheDirectBufferAddress(
-      ByteBuffer byteBuffer, long nativeAudioRecord);
+  private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
 
   private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
 
   // Sets all recorded samples to zero if |mute| is true, i.e., ensures that
   // the microphone is muted.
   public static void setMicrophoneMute(boolean mute) {
-     Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
+    Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
     microphoneMute = mute;
   }
 }
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index 4ce35c6..c287431 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -77,7 +77,7 @@
         audioTrack.play();
         assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
       } catch (IllegalStateException e) {
-          Logging.e(TAG, "AudioTrack.play failed: " + e.getMessage());
+        Logging.e(TAG, "AudioTrack.play failed: " + e.getMessage());
         return;
       }
 
@@ -155,19 +155,16 @@
     Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
     this.context = context;
     this.nativeAudioTrack = nativeAudioTrack;
-    audioManager = (AudioManager) context.getSystemService(
-        Context.AUDIO_SERVICE);
+    audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
     if (DEBUG) {
       WebRtcAudioUtils.logDeviceInfo(TAG);
     }
   }
 
   private boolean initPlayout(int sampleRate, int channels) {
-    Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels="
-        + channels + ")");
+    Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
     final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
-    byteBuffer = byteBuffer.allocateDirect(
-        bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
+    byteBuffer = byteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
     Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
     emptyBytes = new byte[byteBuffer.capacity()];
     // Rather than passing the ByteBuffer with every callback (requiring
@@ -180,9 +177,7 @@
     // Note that this size doesn't guarantee a smooth playback under load.
     // TODO(henrika): should we extend the buffer size to avoid glitches?
     final int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
-        sampleRate,
-        AudioFormat.CHANNEL_OUT_MONO,
-        AudioFormat.ENCODING_PCM_16BIT);
+        sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
     Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
     // For the streaming mode, data must be written to the audio sink in
     // chunks of size (given by byteBuffer.capacity()) less than or equal
@@ -204,12 +199,9 @@
       // Create an AudioTrack object and initialize its associated audio buffer.
       // The size of this buffer determines how long an AudioTrack can play
       // before running out of data.
-      audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL,
-                                  sampleRate,
-                                  AudioFormat.CHANNEL_OUT_MONO,
-                                  AudioFormat.ENCODING_PCM_16BIT,
-                                  minBufferSizeInBytes,
-                                  AudioTrack.MODE_STREAM);
+      audioTrack =
+          new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
+              AudioFormat.ENCODING_PCM_16BIT, minBufferSizeInBytes, AudioTrack.MODE_STREAM);
     } catch (IllegalArgumentException e) {
       Logging.d(TAG, e.getMessage());
       return false;
@@ -290,8 +282,7 @@
     }
   }
 
-  private native void nativeCacheDirectBufferAddress(
-      ByteBuffer byteBuffer, long nativeAudioRecord);
+  private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
 
   private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
 
diff --git a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
index 63eb13b..420633d 100644
--- a/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
+++ b/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
@@ -28,25 +28,23 @@
   // the low latency output mode in combination with OpenSL ES.
   // The device name is given by Build.MODEL.
   private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] {
-    // This list is currently empty ;-)
+      // This list is currently empty ;-)
   };
 
   // List of devices where it has been verified that the built-in effect
   // bad and where it makes sense to avoid using it and instead rely on the
   // native WebRTC version instead. The device name is given by Build.MODEL.
   private static final String[] BLACKLISTED_AEC_MODELS = new String[] {
-      "D6503",      // Sony Xperia Z2 D6503
-      "ONE A2005",  // OnePlus 2
-      "MotoG3",     // Moto G (3rd Generation)
+      "D6503", // Sony Xperia Z2 D6503
+      "ONE A2005", // OnePlus 2
+      "MotoG3", // Moto G (3rd Generation)
   };
   private static final String[] BLACKLISTED_AGC_MODELS = new String[] {
-      "Nexus 10",
-      "Nexus 9",
+      "Nexus 10", "Nexus 9",
   };
   private static final String[] BLACKLISTED_NS_MODELS = new String[] {
-      "Nexus 10",
-      "Nexus 9",
-      "ONE A2005",  // OnePlus 2
+      "Nexus 10", "Nexus 9",
+      "ONE A2005", // OnePlus 2
   };
 
   // Use 16kHz as the default sample rate. A higher sample rate might prevent
@@ -63,16 +61,13 @@
 
   // Call these methods if any hardware based effect shall be replaced by a
   // software based version provided by the WebRTC stack instead.
-  public static synchronized void setWebRtcBasedAcousticEchoCanceler(
-      boolean enable) {
+  public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) {
     useWebRtcBasedAcousticEchoCanceler = enable;
   }
-  public static synchronized void setWebRtcBasedAutomaticGainControl(
-      boolean enable) {
+  public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) {
     useWebRtcBasedAutomaticGainControl = enable;
   }
-  public static synchronized void setWebRtcBasedNoiseSuppressor(
-      boolean enable) {
+  public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) {
     useWebRtcBasedNoiseSuppressor = enable;
   }
 
@@ -171,41 +166,37 @@
 
   // Helper method for building a string of thread information.
   public static String getThreadInfo() {
-    return "@[name=" + Thread.currentThread().getName()
-        + ", id=" + Thread.currentThread().getId() + "]";
+    return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+        + "]";
   }
 
   // Returns true if we're running on emulator.
   public static boolean runningOnEmulator() {
-    return Build.HARDWARE.equals("goldfish") &&
-        Build.BRAND.startsWith("generic_");
+    return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
   }
 
   // Returns true if the device is blacklisted for OpenSL ES usage.
   public static boolean deviceIsBlacklistedForOpenSLESUsage() {
-    List<String> blackListedModels =
-        Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
+    List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
     return blackListedModels.contains(Build.MODEL);
   }
 
   // Information about the current build, taken from system properties.
   public static void logDeviceInfo(String tag) {
     Logging.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
-        + "Release: " + Build.VERSION.RELEASE + ", "
-        + "Brand: " + Build.BRAND + ", "
-        + "Device: " + Build.DEVICE + ", "
-        + "Id: " + Build.ID + ", "
-        + "Hardware: " + Build.HARDWARE + ", "
-        + "Manufacturer: " + Build.MANUFACTURER + ", "
-        + "Model: " + Build.MODEL + ", "
-        + "Product: " + Build.PRODUCT);
+            + "Release: " + Build.VERSION.RELEASE + ", "
+            + "Brand: " + Build.BRAND + ", "
+            + "Device: " + Build.DEVICE + ", "
+            + "Id: " + Build.ID + ", "
+            + "Hardware: " + Build.HARDWARE + ", "
+            + "Manufacturer: " + Build.MANUFACTURER + ", "
+            + "Model: " + Build.MODEL + ", "
+            + "Product: " + Build.PRODUCT);
   }
 
   // Checks if the process has as specified permission or not.
   public static boolean hasPermission(Context context, String permission) {
-    return context.checkPermission(
-        permission,
-        Process.myPid(),
-        Process.myUid()) == PackageManager.PERMISSION_GRANTED;
-    }
+    return context.checkPermission(permission, Process.myPid(), Process.myUid())
+        == PackageManager.PERMISSION_GRANTED;
+  }
 }