Reland of Add content type information to encoded images and corresponding rtp extension header (patchset #1 id:1 of https://codereview.webrtc.org/2809653004/ )

Reason for revert:
Fix failing bots.

BUG=webrtc:7420

Review-Url: https://codereview.webrtc.org/2816493002
Cr-Commit-Position: refs/heads/master@{#17658}
diff --git a/webrtc/api/video/video_content_type.h b/webrtc/api/video/video_content_type.h
new file mode 100644
index 0000000..5c468c0
--- /dev/null
+++ b/webrtc/api/video/video_content_type.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
+#define WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
+
+#include <stdint.h>
+
+namespace webrtc {
+
+enum class VideoContentType : uint8_t {
+  UNSPECIFIED = 0,
+  SCREENSHARE = 1,
+  TOTAL_CONTENT_TYPES  // Must be the last value in the enum.
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
diff --git a/webrtc/common_types.cc b/webrtc/common_types.cc
index f5b487f..17bb265 100644
--- a/webrtc/common_types.cc
+++ b/webrtc/common_types.cc
@@ -31,7 +31,9 @@
       voiceActivity(false),
       audioLevel(0),
       hasVideoRotation(false),
-      videoRotation(kVideoRotation_0) {}
+      videoRotation(kVideoRotation_0),
+      hasVideoContentType(false),
+      videoContentType(VideoContentType::UNSPECIFIED) {}
 
 RTPHeader::RTPHeader()
     : markerBit(false),
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index e1a4c77..7504201 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -18,6 +18,7 @@
 #include <string>
 #include <vector>
 
+#include "webrtc/api/video/video_content_type.h"
 #include "webrtc/api/video/video_rotation.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/optional.h"
@@ -716,6 +717,11 @@
   bool hasVideoRotation;
   VideoRotation videoRotation;
 
+  // TODO(ilnik): Refactor this and one above to be rtc::Optional() and remove
+  // a corresponding bool flag.
+  bool hasVideoContentType;
+  VideoContentType videoContentType;
+
   PlayoutDelay playout_delay = {-1, -1};
 };
 
diff --git a/webrtc/config.cc b/webrtc/config.cc
index e0c490d..ab2f394 100644
--- a/webrtc/config.cc
+++ b/webrtc/config.cc
@@ -64,6 +64,10 @@
     "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01";
 const int RtpExtension::kTransportSequenceNumberDefaultId = 5;
 
+const char* RtpExtension::kVideoContentTypeUri =
+    "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type";
+const int RtpExtension::kVideoContentTypeDefaultId = 6;
+
 // This extension allows applications to adaptively limit the playout delay
 // on frames as per the current needs. For example, a gaming application
 // has very different needs on end-to-end delay compared to a video-conference
@@ -85,7 +89,8 @@
          uri == webrtc::RtpExtension::kAbsSendTimeUri ||
          uri == webrtc::RtpExtension::kVideoRotationUri ||
          uri == webrtc::RtpExtension::kTransportSequenceNumberUri ||
-         uri == webrtc::RtpExtension::kPlayoutDelayUri;
+         uri == webrtc::RtpExtension::kPlayoutDelayUri ||
+         uri == webrtc::RtpExtension::kVideoContentTypeUri;
 }
 
 VideoStream::VideoStream()
diff --git a/webrtc/config.h b/webrtc/config.h
index f8c9e8b..f0039b3 100644
--- a/webrtc/config.h
+++ b/webrtc/config.h
@@ -88,6 +88,10 @@
   static const char* kVideoRotationUri;
   static const int kVideoRotationDefaultId;
 
+  // Header extension for video content type. E.g. default or screenshare.
+  static const char* kVideoContentTypeUri;
+  static const int kVideoContentTypeDefaultId;
+
   // Header extension for transport sequence number, see url for details:
   // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions
   static const char* kTransportSequenceNumberUri;
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index f422ffc..37e227f 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -3852,7 +3852,7 @@
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
   webrtc::VideoFrame video_frame2(CreateBlackFrameBuffer(4, 4), 200, 0,
-                                 webrtc::kVideoRotation_0);
+                                  webrtc::kVideoRotation_0);
   recv_stream->InjectFrame(video_frame2);
   EXPECT_EQ(2, renderer.num_rendered_frames());
 
@@ -3869,7 +3869,7 @@
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
   webrtc::VideoFrame video_frame3(CreateBlackFrameBuffer(4, 4), 300, 0,
-                                 webrtc::kVideoRotation_0);
+                                  webrtc::kVideoRotation_0);
   recv_stream->InjectFrame(video_frame3);
   EXPECT_EQ(3, renderer.num_rendered_frames());
 #endif
diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h
index a16c939..ffa0798 100644
--- a/webrtc/modules/include/module_common_types.h
+++ b/webrtc/modules/include/module_common_types.h
@@ -58,6 +58,8 @@
 
   PlayoutDelay playout_delay;
 
+  VideoContentType content_type;
+
   union {
     bool is_first_packet_in_frame;
     RTC_DEPRECATED bool isFirstPacket;  // first packet in frame
@@ -87,7 +89,7 @@
         fragmentationOffset(NULL),
         fragmentationLength(NULL),
         fragmentationTimeDiff(NULL),
-        fragmentationPlType(NULL) {};
+        fragmentationPlType(NULL) {}
 
   ~RTPFragmentationHeader() {
     delete[] fragmentationOffset;
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index ddfec4d..56aa9bd 100644
--- a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -76,7 +76,8 @@
   kRtpExtensionVideoRotation,
   kRtpExtensionTransportSequenceNumber,
   kRtpExtensionPlayoutDelay,
-  kRtpExtensionNumberOfExtensions,
+  kRtpExtensionVideoContentType,
+  kRtpExtensionNumberOfExtensions  // Must be the last entity in the enum.
 };
 
 enum RTCPAppSubTypes { kAppSubtypeBwe = 0x00 };
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
index bbbb143..1d39259 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
@@ -39,6 +39,7 @@
     CreateExtensionInfo<VideoOrientation>(),
     CreateExtensionInfo<TransportSequenceNumber>(),
     CreateExtensionInfo<PlayoutDelayLimits>(),
+    CreateExtensionInfo<VideoContentTypeExtension>(),
 };
 
 // Because of kRtpExtensionNone, NumberOfExtension is 1 bigger than the actual
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
index 1b311e6..8141f02 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -215,4 +215,33 @@
   return true;
 }
 
+// Video Content Type.
+//
+// E.g. default video or screenshare.
+//
+//    0                   1
+//    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |  ID   | len=0 | Content type  |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+constexpr RTPExtensionType VideoContentTypeExtension::kId;
+constexpr uint8_t VideoContentTypeExtension::kValueSizeBytes;
+constexpr const char* VideoContentTypeExtension::kUri;
+
+bool VideoContentTypeExtension::Parse(rtc::ArrayView<const uint8_t> data,
+                                      VideoContentType* content_type) {
+  if (data.size() == 1 &&
+      data[0] < static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
+    *content_type = static_cast<VideoContentType>(data[0]);
+    return true;
+  }
+  return false;
+}
+
+bool VideoContentTypeExtension::Write(uint8_t* data,
+                                      VideoContentType content_type) {
+  data[0] = static_cast<uint8_t>(content_type);
+  return true;
+}
+
 }  // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
index 543688c..0d30848 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -12,6 +12,7 @@
 
 #include <stdint.h>
 
+#include "webrtc/api/video/video_content_type.h"
 #include "webrtc/api/video/video_rotation.h"
 #include "webrtc/base/array_view.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -98,5 +99,17 @@
   static bool Write(uint8_t* data, const PlayoutDelay& playout_delay);
 };
 
+class VideoContentTypeExtension {
+ public:
+  static constexpr RTPExtensionType kId = kRtpExtensionVideoContentType;
+  static constexpr uint8_t kValueSizeBytes = 1;
+  static constexpr const char* kUri =
+      "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type";
+
+  static bool Parse(rtc::ArrayView<const uint8_t> data,
+                    VideoContentType* content_type);
+  static bool Write(uint8_t* data, VideoContentType content_type);
+};
+
 }  // namespace webrtc
 #endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
index 7a7c45d..2e87528 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
@@ -169,6 +169,9 @@
       &header->extension.voiceActivity, &header->extension.audioLevel);
   header->extension.hasVideoRotation =
       GetExtension<VideoOrientation>(&header->extension.videoRotation);
+  header->extension.hasVideoContentType =
+      GetExtension<VideoContentTypeExtension>(
+          &header->extension.videoContentType);
 }
 
 size_t Packet::headers_size() const {
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index d6c5e5c..debe836 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -90,6 +90,7 @@
   rtp_header->frameType = parsed_payload.frame_type;
   rtp_header->type = parsed_payload.type;
   rtp_header->type.Video.rotation = kVideoRotation_0;
+  rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED;
 
   // Retrieve the video rotation information.
   if (rtp_header->header.extension.hasVideoRotation) {
@@ -97,6 +98,11 @@
         rtp_header->header.extension.videoRotation;
   }
 
+  if (rtp_header->header.extension.hasVideoContentType) {
+    rtp_header->type.Video.content_type =
+        rtp_header->header.extension.videoContentType;
+  }
+
   rtp_header->type.Video.playout_delay =
       rtp_header->header.extension.playout_delay;
 
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index f77e59c..d6c54d0 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -40,6 +40,8 @@
     return kRtpExtensionTransportSequenceNumber;
   if (extension == RtpExtension::kPlayoutDelayUri)
     return kRtpExtensionPlayoutDelay;
+  if (extension == RtpExtension::kVideoContentTypeUri)
+    return kRtpExtensionVideoContentType;
   RTC_NOTREACHED() << "Looking up unsupported RTP extension.";
   return kRtpExtensionNone;
 }
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 66ee51f..75e2dc1 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -203,6 +203,7 @@
     rtp_video_header.width = codec_.width;
     rtp_video_header.height = codec_.height;
     rtp_video_header.rotation = kVideoRotation_0;
+    rtp_video_header.content_type = VideoContentType::UNSPECIFIED;
     rtp_video_header.playout_delay = {-1, -1};
     rtp_video_header.is_first_packet_in_frame = true;
     rtp_video_header.simulcastIdx = 0;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 849ed78..b89aefe 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -324,6 +324,12 @@
           current_rotation != kVideoRotation_0)
         rtp_header->SetExtension<VideoOrientation>(current_rotation);
       last_rotation_ = current_rotation;
+      // Report content type only for key frames.
+      if (frame_type == kVideoFrameKey &&
+          video_header->content_type != VideoContentType::UNSPECIFIED) {
+        rtp_header->SetExtension<VideoContentTypeExtension>(
+            video_header->content_type);
+      }
     }
 
     // FEC settings.
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index def431f..1c12c89 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -254,6 +254,10 @@
   header->extension.playout_delay.min_ms = -1;
   header->extension.playout_delay.max_ms = -1;
 
+  // May not be present in packet.
+  header->extension.hasVideoContentType = false;
+  header->extension.videoContentType = VideoContentType::UNSPECIFIED;
+
   if (X) {
     /* RTP header extension, RFC 3550.
      0                   1                   2                   3
@@ -446,6 +450,25 @@
               max_playout_delay * PlayoutDelayLimits::kGranularityMs;
           break;
         }
+        case kRtpExtensionVideoContentType: {
+          if (len != 0) {
+            LOG(LS_WARNING) << "Incorrect video content type len: " << len;
+            return;
+          }
+          //    0                   1
+          //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+          //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+          //   |  ID   | len=0 | Content type  |
+          //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+          if (ptr[0] <
+              static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
+            header->extension.hasVideoContentType = true;
+            header->extension.videoContentType =
+                static_cast<VideoContentType>(ptr[0]);
+          }
+          break;
+        }
         case kRtpExtensionNone:
         case kRtpExtensionNumberOfExtensions: {
           RTC_NOTREACHED() << "Invalid extension type: " << type;
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index a0908f0..0afa9aa 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -32,9 +32,8 @@
   return implementation;
 }
 
-const char* VideoCaptureImpl::CurrentDeviceName() const
-{
-    return _deviceUniqueId;
+const char* VideoCaptureImpl::CurrentDeviceName() const {
+  return _deviceUniqueId;
 }
 
 // static
@@ -136,14 +135,13 @@
 
     // Not encoded, convert to I420.
     const VideoType commonVideoType =
-              RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
+        RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
 
     if (frameInfo.rawType != kVideoMJPEG &&
-        CalcBufferSize(commonVideoType, width,
-                       abs(height)) != videoFrameLength)
-    {
-        LOG(LS_ERROR) << "Wrong incoming frame length.";
-        return -1;
+        CalcBufferSize(commonVideoType, width, abs(height)) !=
+            videoFrameLength) {
+      LOG(LS_ERROR) << "Wrong incoming frame length.";
+      return -1;
     }
 
     int stride_y = width;
@@ -174,16 +172,14 @@
         commonVideoType, videoFrame, 0, 0,  // No cropping
         width, height, videoFrameLength,
         apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
-    if (conversionResult < 0)
-    {
+    if (conversionResult < 0) {
       LOG(LS_ERROR) << "Failed to convert capture frame from type "
                     << frameInfo.rawType << "to I420.";
-        return -1;
+      return -1;
     }
 
-    VideoFrame captureFrame(
-        buffer, 0, rtc::TimeMillis(),
-        !apply_rotation ? _rotateFrame : kVideoRotation_0);
+    VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(),
+                            !apply_rotation ? _rotateFrame : kVideoRotation_0);
     captureFrame.set_ntp_time_ms(captureTime);
 
     DeliverCapturedFrame(captureFrame);
@@ -205,52 +201,40 @@
   return true;
 }
 
-void VideoCaptureImpl::UpdateFrameCount()
-{
-  if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0)
-    {
-        // first no shift
+void VideoCaptureImpl::UpdateFrameCount() {
+  if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) {
+    // first no shift
+  } else {
+    // shift
+    for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) {
+      _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
     }
-    else
-    {
-        // shift
-        for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
-        {
-            _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
-        }
-    }
-    _incomingFrameTimesNanos[0] = rtc::TimeNanos();
+  }
+  _incomingFrameTimesNanos[0] = rtc::TimeNanos();
 }
 
-uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns)
-{
-    int32_t num = 0;
-    int32_t nrOfFrames = 0;
-    for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
-    {
-        if (_incomingFrameTimesNanos[num] <= 0 ||
-            (now_ns - _incomingFrameTimesNanos[num]) /
-            rtc::kNumNanosecsPerMillisec >
-                kFrameRateHistoryWindowMs) // don't use data older than 2sec
-        {
-            break;
-        }
-        else
-        {
-            nrOfFrames++;
-        }
+uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) {
+  int32_t num = 0;
+  int32_t nrOfFrames = 0;
+  for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) {
+    if (_incomingFrameTimesNanos[num] <= 0 ||
+        (now_ns - _incomingFrameTimesNanos[num]) /
+                rtc::kNumNanosecsPerMillisec >
+            kFrameRateHistoryWindowMs) {  // don't use data older than 2sec
+      break;
+    } else {
+      nrOfFrames++;
     }
-    if (num > 1)
-    {
-        int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
-                       rtc::kNumNanosecsPerMillisec;
-        if (diff > 0)
-        {
-            return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
-        }
+  }
+  if (num > 1) {
+    int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
+                   rtc::kNumNanosecsPerMillisec;
+    if (diff > 0) {
+      return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
     }
+  }
 
-    return nrOfFrames;
+  return nrOfFrames;
 }
 }  // namespace videocapturemodule
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 84bfafb..315d347 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -367,6 +367,9 @@
   encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
   encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
   encoded_image_.rotation_ = input_frame.rotation();
+  encoded_image_.content_type_ = (mode_ == kScreensharing)
+                                     ? VideoContentType::SCREENSHARE
+                                     : VideoContentType::UNSPECIFIED;
   encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
 
   // Split encoded image up into fragments. This also updates |encoded_image_|.
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 41fd7ff..66db72c 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -878,6 +878,9 @@
     encoded_images_[encoder_idx].capture_time_ms_ =
         input_image.render_time_ms();
     encoded_images_[encoder_idx].rotation_ = input_image.rotation();
+    encoded_images_[encoder_idx].content_type_ =
+        (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
+                                        : VideoContentType::UNSPECIFIED;
 
     int qp = -1;
     vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 4d7df86..4b0f99e 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -706,6 +706,9 @@
     encoded_image_._timeStamp = input_image_->timestamp();
     encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
     encoded_image_.rotation_ = input_image_->rotation();
+    encoded_image_.content_type_ = (codec_.mode == kScreensharing)
+                                       ? VideoContentType::SCREENSHARE
+                                       : VideoContentType::UNSPECIFIED;
     encoded_image_._encodedHeight = raw_->d_h;
     encoded_image_._encodedWidth = raw_->d_w;
     int qp = -1;
diff --git a/webrtc/modules/video_coding/encoded_frame.cc b/webrtc/modules/video_coding/encoded_frame.cc
index fb12c5b..1807fa5 100644
--- a/webrtc/modules/video_coding/encoded_frame.cc
+++ b/webrtc/modules/video_coding/encoded_frame.cc
@@ -87,6 +87,7 @@
   _codecSpecificInfo.codecType = kVideoCodecUnknown;
   _codec = kVideoCodecUnknown;
   rotation_ = kVideoRotation_0;
+  content_type_ = VideoContentType::UNSPECIFIED;
   _rotation_set = false;
 }
 
diff --git a/webrtc/modules/video_coding/encoded_frame.h b/webrtc/modules/video_coding/encoded_frame.h
index 840cd20..96f9d00 100644
--- a/webrtc/modules/video_coding/encoded_frame.h
+++ b/webrtc/modules/video_coding/encoded_frame.h
@@ -77,8 +77,12 @@
   */
   VideoRotation rotation() const { return rotation_; }
   /**
-  *   True if this frame is complete, false otherwise
-  */
+   *  Get video content type
+   */
+  VideoContentType contentType() const { return content_type_; }
+  /**
+   *   True if this frame is complete, false otherwise
+   */
   bool Complete() const { return _completeFrame; }
   /**
   *   True if there's a frame missing before this frame
diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc
index 1439a17..5ea12dc 100644
--- a/webrtc/modules/video_coding/frame_buffer.cc
+++ b/webrtc/modules/video_coding/frame_buffer.cc
@@ -163,6 +163,7 @@
     RTC_DCHECK(!_rotation_set);
     rotation_ = packet.video_header.rotation;
     _rotation_set = true;
+    content_type_ = packet.video_header.content_type;
   }
 
   if (packet.is_first_packet_in_frame) {
diff --git a/webrtc/modules/video_coding/frame_object.cc b/webrtc/modules/video_coding/frame_object.cc
index 70b0a02..9e5ce09 100644
--- a/webrtc/modules/video_coding/frame_object.cc
+++ b/webrtc/modules/video_coding/frame_object.cc
@@ -79,6 +79,7 @@
   // (HEVC)).
   rotation_ = last_packet->video_header.rotation;
   _rotation_set = true;
+  content_type_ = last_packet->video_header.content_type;
 }
 
 RtpFrameObject::~RtpFrameObject() {
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
index 2121ab6..f5d9cfe 100644
--- a/webrtc/modules/video_coding/generic_decoder.cc
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -87,7 +87,7 @@
   decodedImage.set_timestamp_us(
       frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec);
   decodedImage.set_rotation(frameInfo->rotation);
-  _receiveCallback->FrameToRender(decodedImage, qp);
+  _receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type);
 }
 
 int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
@@ -131,7 +131,8 @@
       _decoder(decoder),
       _codecType(kVideoCodecUnknown),
       _isExternal(isExternal),
-      _keyFrameDecoded(false) {}
+      _keyFrameDecoded(false),
+      _last_keyframe_content_type(VideoContentType::UNSPECIFIED) {}
 
 VCMGenericDecoder::~VCMGenericDecoder() {}
 
@@ -149,6 +150,15 @@
     _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
     _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
     _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation();
+    // Set correctly only for key frames. Thus, use latest key frame
+    // content type. If the corresponding key frame was lost, decode will fail
+    // and content type will be ignored.
+    if (frame.FrameType() == kVideoFrameKey) {
+      _frameInfos[_nextFrameInfoIdx].content_type = frame.contentType();
+      _last_keyframe_content_type = frame.contentType();
+    } else {
+      _frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type;
+    }
     _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
 
     _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
diff --git a/webrtc/modules/video_coding/generic_decoder.h b/webrtc/modules/video_coding/generic_decoder.h
index 891ec89..71b8d81 100644
--- a/webrtc/modules/video_coding/generic_decoder.h
+++ b/webrtc/modules/video_coding/generic_decoder.h
@@ -30,6 +30,7 @@
   int64_t decodeStartTimeMs;
   void* userData;
   VideoRotation rotation;
+  VideoContentType content_type;
 };
 
 class VCMDecodedFrameCallback : public DecodedImageCallback {
@@ -109,6 +110,7 @@
   VideoCodecType _codecType;
   bool _isExternal;
   bool _keyFrameDecoded;
+  VideoContentType _last_keyframe_content_type;
 };
 
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
index 8a53c1d..b8f27e6 100644
--- a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
+++ b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
@@ -33,7 +33,10 @@
   MockVCMReceiveCallback() {}
   virtual ~MockVCMReceiveCallback() {}
 
-  MOCK_METHOD2(FrameToRender, int32_t(VideoFrame&, rtc::Optional<uint8_t>));
+  // TODO(ilnik): Remove using ... once deprecation is done.
+  using VCMReceiveCallback::FrameToRender;
+  MOCK_METHOD3(FrameToRender,
+               int32_t(VideoFrame&, rtc::Optional<uint8_t>, VideoContentType));
   MOCK_METHOD1(ReceivedDecodedReferenceFrame, int32_t(const uint64_t));
   MOCK_METHOD1(OnIncomingPayloadType, void(int));
   MOCK_METHOD1(OnDecoderImplementationName, void(const char*));
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
index 4ed80a6..8eaac74 100644
--- a/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -61,8 +61,17 @@
 // rendered.
 class VCMReceiveCallback {
  public:
+  // TODO(ilnik): Once deprecation is complete, change this to pure virtual.
   virtual int32_t FrameToRender(VideoFrame& videoFrame,  // NOLINT
-                                rtc::Optional<uint8_t> qp) = 0;
+                                rtc::Optional<uint8_t> qp,
+                                VideoContentType /*content_type*/) {
+    return FrameToRender(videoFrame, qp);
+  }
+  // DEPRECATED. Use the other overloaded version.
+  virtual int32_t FrameToRender(VideoFrame& videoFrame,  // NOLINT
+                                rtc::Optional<uint8_t> qp) {
+    return -1;
+  }
   virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
     return -1;
   }
diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
index 44acccf..6425015 100644
--- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
+++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
@@ -1042,6 +1042,10 @@
       image->_timeStamp = output_timestamp_;
       image->capture_time_ms_ = output_render_time_ms_;
       image->rotation_ = output_rotation_;
+      image->content_type_ =
+          (codec_mode_ == webrtc::VideoCodecMode::kScreensharing)
+              ? webrtc::VideoContentType::SCREENSHARE
+              : webrtc::VideoContentType::UNSPECIFIED;
       image->_frameType =
           (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
       image->_completeFrame = true;
diff --git a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h
index 5de9a24..09aa7db 100644
--- a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h
+++ b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h
@@ -85,6 +85,7 @@
   uint32_t encoder_bitrate_bps_;
   int32_t width_;
   int32_t height_;
+  VideoCodecMode mode_;
   const CFStringRef profile_;
 
   H264BitstreamParser h264_bitstream_parser_;
diff --git a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm
index e50b225..bc46b35 100644
--- a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm
+++ b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm
@@ -364,6 +364,7 @@
 
   width_ = codec_settings->width;
   height_ = codec_settings->height;
+  mode_ = codec_settings->mode;
   // We can only set average bitrate on the HW encoder.
   target_bitrate_bps_ = codec_settings->startBitrate;
   bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_);
@@ -722,6 +723,9 @@
   frame._timeStamp = timestamp;
   frame.rotation_ = rotation;
 
+  frame.content_type_ =
+      (mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED;
+
   h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size());
   h264_bitstream_parser_.GetLastSliceQp(&frame.qp_);
 
diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc
index 5c0b42c..6ec3fda 100644
--- a/webrtc/test/call_test.cc
+++ b/webrtc/test/call_test.cc
@@ -208,6 +208,8 @@
     video_send_config_.rtp.extensions.push_back(
         RtpExtension(RtpExtension::kTransportSequenceNumberUri,
                      kTransportSequenceNumberExtensionId));
+    video_send_config_.rtp.extensions.push_back(RtpExtension(
+        RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId));
     FillEncoderConfiguration(num_video_streams, &video_encoder_config_);
 
     for (size_t i = 0; i < num_video_streams; ++i)
diff --git a/webrtc/test/constants.cc b/webrtc/test/constants.cc
index 43f9adc..a789cc0 100644
--- a/webrtc/test/constants.cc
+++ b/webrtc/test/constants.cc
@@ -17,5 +17,7 @@
 const int kAbsSendTimeExtensionId = 7;
 const int kTransportSequenceNumberExtensionId = 8;
 const int kVideoRotationExtensionId = 9;
+const int kVideoContentTypeExtensionId = 10;
+
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/constants.h b/webrtc/test/constants.h
index 1b5b0cb..d0f73d0 100644
--- a/webrtc/test/constants.h
+++ b/webrtc/test/constants.h
@@ -15,5 +15,6 @@
 extern const int kAbsSendTimeExtensionId;
 extern const int kTransportSequenceNumberExtensionId;
 extern const int kVideoRotationExtensionId;
+extern const int kVideoContentTypeExtensionId;
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index 1db93eb..fce12c6 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -61,6 +61,7 @@
   int max_target_bitrate_kbps;
   int64_t last_encode_time_ms;
   size_t num_encoded_bytes;
+  VideoCodecMode mode;
   {
     rtc::CritScope cs(&crit_sect_);
     max_framerate = config_.maxFramerate;
@@ -73,6 +74,7 @@
     max_target_bitrate_kbps = max_target_bitrate_kbps_;
     last_encode_time_ms = last_encode_time_ms_;
     num_encoded_bytes = sizeof(encoded_buffer_);
+    mode = config_.mode;
   }
 
   int64_t time_now_ms = clock_->TimeInMilliseconds();
@@ -142,6 +144,9 @@
     encoded._encodedWidth = simulcast_streams[i].width;
     encoded._encodedHeight = simulcast_streams[i].height;
     encoded.rotation_ = input_image.rotation();
+    encoded.content_type_ = (mode == kScreensharing)
+                                ? VideoContentType::SCREENSHARE
+                                : VideoContentType::UNSPECIFIED;
     specifics.codec_name = ImplementationName();
     RTC_DCHECK(callback);
     if (callback->OnEncodedImage(encoded, &specifics, nullptr).error !=
diff --git a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
index 613f125..7cf65cf 100644
--- a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
+++ b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
@@ -85,6 +85,10 @@
         PlayoutDelay playout;
         packet.GetExtension<PlayoutDelayLimits>(&playout);
         break;
+      case kRtpExtensionVideoContentType:
+        VideoContentType content_type;
+        packet.GetExtension<VideoContentTypeExtension>(&content_type);
+        break;
     }
   }
 }
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index f31a68e..f171c5b 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -2652,7 +2652,8 @@
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
 
-  EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
+  EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs"));
+  EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs"));
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
 
   EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs"));
@@ -2692,6 +2693,118 @@
             metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent"));
 }
 
+TEST_F(EndToEndTest, ContentTypeSwitches) {
+  class StatsObserver : public test::BaseTest,
+                        public rtc::VideoSinkInterface<VideoFrame> {
+   public:
+    StatsObserver() : BaseTest(kLongTimeoutMs), num_frames_received_(0) {}
+
+    bool ShouldCreateReceivers() const override { return true; }
+
+    void OnFrame(const VideoFrame& video_frame) override {
+      // The RTT is needed to estimate |ntp_time_ms| which is used by
+      // end-to-end delay stats. Therefore, start counting received frames once
+      // |ntp_time_ms| is valid.
+      if (video_frame.ntp_time_ms() > 0 &&
+          Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
+              video_frame.ntp_time_ms()) {
+        rtc::CritScope lock(&crit_);
+        ++num_frames_received_;
+      }
+    }
+
+    Action OnSendRtp(const uint8_t* packet, size_t length) override {
+      if (MinNumberOfFramesReceived())
+        observation_complete_.Set();
+      return SEND_PACKET;
+    }
+
+    bool MinNumberOfFramesReceived() const {
+      const int kMinRequiredHistogramSamples = 200;
+      rtc::CritScope lock(&crit_);
+      return num_frames_received_ > kMinRequiredHistogramSamples;
+    }
+
+    // May be called several times.
+    void PerformTest() override {
+      EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets.";
+      // Reset frame counter so next PerformTest() call will do something.
+      {
+        rtc::CritScope lock(&crit_);
+        num_frames_received_ = 0;
+      }
+    }
+
+    rtc::CriticalSection crit_;
+    int num_frames_received_ GUARDED_BY(&crit_);
+  } test;
+
+  metrics::Reset();
+
+  Call::Config send_config(test.GetSenderCallConfig());
+  CreateSenderCall(send_config);
+  Call::Config recv_config(test.GetReceiverCallConfig());
+  CreateReceiverCall(recv_config);
+  receive_transport_.reset(test.CreateReceiveTransport());
+  send_transport_.reset(test.CreateSendTransport(sender_call_.get()));
+  send_transport_->SetReceiver(receiver_call_->Receiver());
+  receive_transport_->SetReceiver(sender_call_->Receiver());
+  receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+  CreateSendConfig(1, 0, 0, send_transport_.get());
+  CreateMatchingReceiveConfigs(receive_transport_.get());
+
+  // Modify send and receive configs.
+  video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+  video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
+  video_receive_configs_[0].renderer = &test;
+  // RTT needed for RemoteNtpTimeEstimator for the receive stream.
+  video_receive_configs_[0].rtp.rtcp_xr.receiver_reference_time_report = true;
+  // Start with realtime video.
+  video_encoder_config_.content_type =
+      VideoEncoderConfig::ContentType::kRealtimeVideo;
+  // Second encoder config for the second part of the test uses screenshare
+  VideoEncoderConfig encoder_config_with_screenshare_ =
+      video_encoder_config_.Copy();
+  encoder_config_with_screenshare_.content_type =
+      VideoEncoderConfig::ContentType::kScreen;
+
+  CreateVideoStreams();
+  CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
+                               kDefaultHeight);
+  Start();
+
+  test.PerformTest();
+
+  // Replace old send stream.
+  sender_call_->DestroyVideoSendStream(video_send_stream_);
+  video_send_stream_ = sender_call_->CreateVideoSendStream(
+      video_send_config_.Copy(), encoder_config_with_screenshare_.Copy());
+  video_send_stream_->SetSource(
+      frame_generator_capturer_.get(),
+      VideoSendStream::DegradationPreference::kBalanced);
+  video_send_stream_->Start();
+
+  // Continue to run test but now with screenshare.
+  test.PerformTest();
+
+  send_transport_->StopSending();
+  receive_transport_->StopSending();
+  Stop();
+  DestroyStreams();
+  DestroyCalls();
+  // Delete the call for Call stats to be reported.
+  sender_call_.reset();
+  receiver_call_.reset();
+
+  // Verify that stats have been updated for both screenshare and video.
+  EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
+  EXPECT_EQ(1,
+            metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayInMs"));
+  EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayMaxInMs"));
+  EXPECT_EQ(
+      1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs"));
+}
+
 TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) {
   const bool kEnabledRtx = true;
   const bool kEnabledRed = false;
diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc
index f2f4309..52e9d46 100644
--- a/webrtc/video/payload_router.cc
+++ b/webrtc/video/payload_router.cc
@@ -129,6 +129,7 @@
   if (codec_specific_info)
     CopyCodecSpecific(codec_specific_info, &rtp_video_header);
   rtp_video_header.rotation = encoded_image.rotation_;
+  rtp_video_header.content_type = encoded_image.content_type_;
   rtp_video_header.playout_delay = encoded_image.playout_delay_;
 
   int stream_index = rtp_video_header.simulcastIdx;
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index 2ed2fae..e40a7ef 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -74,9 +74,12 @@
       render_fps_tracker_(100, 10u),
       render_pixel_tracker_(100, 10u),
       total_byte_tracker_(100, 10u),  // bucket_interval_ms, bucket_count
+      e2e_delay_max_ms_video_(-1),
+      e2e_delay_max_ms_screenshare_(-1),
       freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
       first_report_block_time_ms_(-1),
-      avg_rtt_ms_(0) {
+      avg_rtt_ms_(0),
+      last_content_type_(VideoContentType::UNSPECIFIED) {
   stats_.ssrc = config_.rtp.remote_ssrc;
   // TODO(brandtr): Replace |rtx_stats_| with a single instance of
   // StreamDataCounters.
@@ -169,9 +172,30 @@
   if (delay_ms != -1)
     RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
 
-  int e2e_delay_ms = e2e_delay_counter_.Avg(kMinRequiredSamples);
-  if (e2e_delay_ms != -1)
-    RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", e2e_delay_ms);
+  int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples);
+  if (e2e_delay_ms_video != -1) {
+    RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs",
+                               e2e_delay_ms_video);
+  }
+
+  int e2e_delay_ms_screenshare =
+      e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples);
+  if (e2e_delay_ms_screenshare != -1) {
+    RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs",
+                               e2e_delay_ms_screenshare);
+  }
+
+  int e2e_delay_max_ms_video = e2e_delay_max_ms_video_;
+  if (e2e_delay_max_ms_video != -1) {
+    RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs",
+                                e2e_delay_max_ms_video);
+  }
+
+  int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_;
+  if (e2e_delay_max_ms_screenshare != -1) {
+    RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs",
+                                e2e_delay_max_ms_screenshare);
+  }
 
   StreamDataCounters rtp = stats_.rtp_stats;
   StreamDataCounters rtx;
@@ -431,7 +455,8 @@
     total_byte_tracker_.AddSamples(total_bytes - last_total_bytes);
 }
 
-void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp) {
+void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
+                                            VideoContentType content_type) {
   uint64_t now = clock_->TimeInMilliseconds();
 
   rtc::CritScope lock(&crit_);
@@ -451,6 +476,7 @@
         << "QP sum was already set and no QP was given for a frame.";
     stats_.qp_sum = rtc::Optional<uint64_t>();
   }
+  last_content_type_ = content_type;
   decode_fps_estimator_.Update(1, now);
   stats_.decode_frame_rate = decode_fps_estimator_.Rate(now).value_or(0);
 }
@@ -475,8 +501,16 @@
 
   if (frame.ntp_time_ms() > 0) {
     int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
-    if (delay_ms >= 0)
-      e2e_delay_counter_.Add(delay_ms);
+    if (delay_ms >= 0) {
+      if (last_content_type_ == VideoContentType::SCREENSHARE) {
+        e2e_delay_max_ms_screenshare_ =
+            std::max(delay_ms, e2e_delay_max_ms_screenshare_);
+        e2e_delay_counter_screenshare_.Add(delay_ms);
+      } else {
+        e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_);
+        e2e_delay_counter_video_.Add(delay_ms);
+      }
+    }
   }
 }
 
diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h
index 07e59b4..e1d0971 100644
--- a/webrtc/video/receive_statistics_proxy.h
+++ b/webrtc/video/receive_statistics_proxy.h
@@ -46,7 +46,7 @@
 
   VideoReceiveStream::Stats GetStats() const;
 
-  void OnDecodedFrame(rtc::Optional<uint8_t> qp);
+  void OnDecodedFrame(rtc::Optional<uint8_t> qp, VideoContentType content_type);
   void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
   void OnRenderedFrame(const VideoFrame& frame);
   void OnIncomingPayloadType(int payload_type);
@@ -140,7 +140,10 @@
   SampleCounter target_delay_counter_ GUARDED_BY(crit_);
   SampleCounter current_delay_counter_ GUARDED_BY(crit_);
   SampleCounter delay_counter_ GUARDED_BY(crit_);
-  SampleCounter e2e_delay_counter_ GUARDED_BY(crit_);
+  SampleCounter e2e_delay_counter_video_ GUARDED_BY(crit_);
+  SampleCounter e2e_delay_counter_screenshare_ GUARDED_BY(crit_);
+  int64_t e2e_delay_max_ms_video_ GUARDED_BY(crit_);
+  int64_t e2e_delay_max_ms_screenshare_ GUARDED_BY(crit_);
   MaxCounter freq_offset_counter_ GUARDED_BY(crit_);
   int64_t first_report_block_time_ms_ GUARDED_BY(crit_);
   ReportBlockStats report_block_stats_ GUARDED_BY(crit_);
@@ -148,6 +151,7 @@
   std::map<uint32_t, StreamDataCounters> rtx_stats_ GUARDED_BY(crit_);
   int64_t avg_rtt_ms_ GUARDED_BY(crit_);
   mutable std::map<int64_t, size_t> frame_window_ GUARDED_BY(&crit_);
+  VideoContentType last_content_type_ GUARDED_BY(&crit_);
 };
 
 }  // namespace webrtc
diff --git a/webrtc/video/receive_statistics_proxy_unittest.cc b/webrtc/video/receive_statistics_proxy_unittest.cc
index af7ae68..84943e2 100644
--- a/webrtc/video/receive_statistics_proxy_unittest.cc
+++ b/webrtc/video/receive_statistics_proxy_unittest.cc
@@ -54,7 +54,8 @@
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
   for (uint32_t i = 1; i <= 3; ++i) {
-    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
+    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+                                      VideoContentType::UNSPECIFIED);
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
   }
 }
@@ -62,40 +63,47 @@
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithQpResetsFramesDecoded) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
   for (uint32_t i = 1; i <= 3; ++i) {
-    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
+    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+                                      VideoContentType::UNSPECIFIED);
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
   }
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u));
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u),
+                                    VideoContentType::UNSPECIFIED);
   EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_decoded);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) {
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u));
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u),
+                                    VideoContentType::UNSPECIFIED);
   EXPECT_EQ(rtc::Optional<uint64_t>(3u), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(127u));
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(127u),
+                                    VideoContentType::UNSPECIFIED);
   EXPECT_EQ(rtc::Optional<uint64_t>(130u),
             statistics_proxy_->GetStats().qp_sum);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpQpSumWontExist) {
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+                                    VideoContentType::UNSPECIFIED);
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpResetsQpSum) {
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u));
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u),
+                                    VideoContentType::UNSPECIFIED);
   EXPECT_EQ(rtc::Optional<uint64_t>(3u), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+                                    VideoContentType::UNSPECIFIED);
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered);
-  webrtc::VideoFrame frame(
-      webrtc::I420Buffer::Create(1, 1), 0, 0, webrtc::kVideoRotation_0);
+  webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0,
+                           webrtc::kVideoRotation_0);
   for (uint32_t i = 1; i <= 3; ++i) {
     statistics_proxy_->OnRenderedFrame(frame);
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered);
diff --git a/webrtc/video/rtp_stream_receiver.cc b/webrtc/video/rtp_stream_receiver.cc
index 90dd0da..00f1c46 100644
--- a/webrtc/video/rtp_stream_receiver.cc
+++ b/webrtc/video/rtp_stream_receiver.cc
@@ -502,6 +502,10 @@
   if (header.extension.hasVideoRotation) {
     rtp_header.type.Video.rotation = header.extension.videoRotation;
   }
+  rtp_header.type.Video.content_type = VideoContentType::UNSPECIFIED;
+  if (header.extension.hasVideoContentType) {
+    rtp_header.type.Video.content_type = header.extension.videoContentType;
+  }
   rtp_header.type.Video.playout_delay = header.extension.playout_delay;
 
   OnReceivedPayloadData(nullptr, 0, &rtp_header);
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 0baf42c..cea8af1 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -1301,6 +1301,8 @@
     video_send_config_.rtp.extensions.push_back(RtpExtension(
         RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId));
   }
+  video_send_config_.rtp.extensions.push_back(RtpExtension(
+      RtpExtension::kVideoContentTypeUri, test::kVideoContentTypeExtensionId));
 
   video_encoder_config_.min_transmit_bitrate_bps =
       params_.video.min_transmit_bps;
@@ -1328,6 +1330,8 @@
         kSendRtxPayloadType;
     video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe;
     video_receive_configs_[i].rtp.remb = !params_.call.send_side_bwe;
+    // Enable RTT calculation so NTP time estimator will work.
+    video_receive_configs_[i].rtp.rtcp_xr.receiver_reference_time_report = true;
     // Force fake decoders on non-selected simulcast streams.
     if (i != params_.ss.selected_stream) {
       VideoReceiveStream::Decoder decoder;
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index 894b840..e24cb41 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -291,6 +291,43 @@
   RunBaseTest(&test);
 }
 
+TEST_F(VideoSendStreamTest, SupportsVideoContentType) {
+  class VideoRotationObserver : public test::SendTest {
+   public:
+    VideoRotationObserver() : SendTest(kDefaultTimeoutMs) {
+      EXPECT_TRUE(parser_->RegisterRtpHeaderExtension(
+          kRtpExtensionVideoContentType, test::kVideoContentTypeExtensionId));
+    }
+
+    Action OnSendRtp(const uint8_t* packet, size_t length) override {
+      RTPHeader header;
+      EXPECT_TRUE(parser_->Parse(packet, length, &header));
+      EXPECT_TRUE(header.extension.hasVideoContentType);
+      EXPECT_EQ(VideoContentType::SCREENSHARE,
+                header.extension.videoContentType);
+      observation_complete_.Set();
+      return SEND_PACKET;
+    }
+
+    void ModifyVideoConfigs(
+        VideoSendStream::Config* send_config,
+        std::vector<VideoReceiveStream::Config>* receive_configs,
+        VideoEncoderConfig* encoder_config) override {
+      send_config->rtp.extensions.clear();
+      send_config->rtp.extensions.push_back(
+          RtpExtension(RtpExtension::kVideoContentTypeUri,
+                       test::kVideoContentTypeExtensionId));
+      encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
+    }
+
+    void PerformTest() override {
+      EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
+    }
+  } test;
+
+  RunBaseTest(&test);
+}
+
 class FakeReceiveStatistics : public NullReceiveStatistics {
  public:
   FakeReceiveStatistics(uint32_t send_ssrc,
diff --git a/webrtc/video/video_stream_decoder.cc b/webrtc/video/video_stream_decoder.cc
index 6eea13f..a7688ce 100644
--- a/webrtc/video/video_stream_decoder.cc
+++ b/webrtc/video/video_stream_decoder.cc
@@ -76,10 +76,10 @@
 // thread may have held the lock when calling VideoDecoder::Decode, Reset, or
 // Release. Acquiring the same lock in the path of decode callback can deadlock.
 int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame,
-                                          rtc::Optional<uint8_t> qp) {
-  receive_stats_callback_->OnDecodedFrame(qp);
+                                          rtc::Optional<uint8_t> qp,
+                                          VideoContentType content_type) {
+  receive_stats_callback_->OnDecodedFrame(qp, content_type);
   incoming_video_stream_->OnFrame(video_frame);
-
   return 0;
 }
 
diff --git a/webrtc/video/video_stream_decoder.h b/webrtc/video/video_stream_decoder.h
index 4bca3ed..6f64934 100644
--- a/webrtc/video/video_stream_decoder.h
+++ b/webrtc/video/video_stream_decoder.h
@@ -57,9 +57,12 @@
       rtc::VideoSinkInterface<VideoFrame>* incoming_video_stream);
   ~VideoStreamDecoder();
 
+  // TODO(ilnik): remove this once deprecated API is removed.
+  using VCMReceiveCallback::FrameToRender;
   // Implements VCMReceiveCallback.
   int32_t FrameToRender(VideoFrame& video_frame,
-                        rtc::Optional<uint8_t> qp) override;
+                        rtc::Optional<uint8_t> qp,
+                        VideoContentType content_type) override;
   int32_t ReceivedDecodedReferenceFrame(const uint64_t picture_id) override;
   void OnIncomingPayloadType(int payload_type) override;
   void OnDecoderImplementationName(const char* implementation_name) override;
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index 3b0c16c..47e58a1 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -57,6 +57,7 @@
   size_t _length;
   size_t _size;
   VideoRotation rotation_ = kVideoRotation_0;
+  VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
   bool _completeFrame = false;
   AdaptReason adapt_reason_;
   int qp_ = -1;  // Quantizer value.