Report timing frames info in GetStats.

Some frames are already marked as 'timing frames' via video-timing RTP header extension. Timestamps along full WebRTC pipeline are gathered for these frames. This CL implements reporting of these timestamps for a single
timing frame since the last GetStats(). The frame with the longest end-to-end delay between two consecutive GetStats calls is reported.

The purpose of this timing information is not to provide a realtime statistics but to provide debugging information as it will help identify problematic places in video pipeline for outliers (frames which took longest to process).

BUG=webrtc:7594

Review-Url: https://codereview.webrtc.org/2946413002
Cr-Commit-Position: refs/heads/master@{#18909}
diff --git a/webrtc/api/BUILD.gn b/webrtc/api/BUILD.gn
index 40741bc..249411b 100644
--- a/webrtc/api/BUILD.gn
+++ b/webrtc/api/BUILD.gn
@@ -173,6 +173,7 @@
     "video/video_frame_buffer.cc",
     "video/video_frame_buffer.h",
     "video/video_rotation.h",
+    "video/video_timing.cc",
     "video/video_timing.h",
   ]
 
diff --git a/webrtc/api/statstypes.cc b/webrtc/api/statstypes.cc
index b7459fa..f6f2689 100644
--- a/webrtc/api/statstypes.cc
+++ b/webrtc/api/statstypes.cc
@@ -598,6 +598,8 @@
       return "googTransportType";
     case kStatsValueNameTrackId:
       return "googTrackId";
+    case kStatsValueNameTimingFrameInfo:
+      return "googTimingFrameInfo";
     case kStatsValueNameTypingNoiseState:
       return "googTypingNoiseState";
     case kStatsValueNameWritable:
diff --git a/webrtc/api/statstypes.h b/webrtc/api/statstypes.h
index 8ef79d9..6fe6f37 100644
--- a/webrtc/api/statstypes.h
+++ b/webrtc/api/statstypes.h
@@ -210,6 +210,7 @@
     kStatsValueNameSrtpCipher,
     kStatsValueNameTargetDelayMs,
     kStatsValueNameTargetEncBitrate,
+    kStatsValueNameTimingFrameInfo,  // Result of |TimingFrameInfo::ToString|
     kStatsValueNameTrackId,
     kStatsValueNameTransmitBitrate,
     kStatsValueNameTransportType,
diff --git a/webrtc/api/video/video_timing.cc b/webrtc/api/video/video_timing.cc
new file mode 100644
index 0000000..a0a3f4d
--- /dev/null
+++ b/webrtc/api/video/video_timing.cc
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/video/video_timing.h"
+
+#include <sstream>
+
+namespace webrtc {
+
+TimingFrameInfo::TimingFrameInfo()
+    : rtp_timestamp(0),
+      capture_time_ms(-1),
+      encode_start_ms(-1),
+      encode_finish_ms(-1),
+      packetization_finish_ms(-1),
+      pacer_exit_ms(-1),
+      network_timestamp_ms(-1),
+      network2_timestamp_ms(-1),
+      receive_start_ms(-1),
+      receive_finish_ms(-1),
+      decode_start_ms(-1),
+      decode_finish_ms(-1),
+      render_time_ms(-1) {}
+
+int64_t TimingFrameInfo::EndToEndDelay() const {
+  return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
+}
+
+bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const {
+  int64_t other_delay = other.EndToEndDelay();
+  return other_delay == -1 || EndToEndDelay() > other_delay;
+}
+
+std::string TimingFrameInfo::ToString() const {
+  std::stringstream out;
+  out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms
+      << ',' << encode_finish_ms << ',' << packetization_finish_ms << ','
+      << pacer_exit_ms << ',' << network_timestamp_ms << ','
+      << network2_timestamp_ms << ',' << receive_start_ms << ','
+      << receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
+      << ',' << render_time_ms;
+  return out.str();
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/video/video_timing.h b/webrtc/api/video/video_timing.h
index a44a8ef..05414de 100644
--- a/webrtc/api/video/video_timing.h
+++ b/webrtc/api/video/video_timing.h
@@ -12,14 +12,17 @@
 #define WEBRTC_API_VIDEO_VIDEO_TIMING_H_
 
 #include <stdint.h>
-#include <limits>
+
+#include <string>
+
 #include "webrtc/base/checks.h"
 #include "webrtc/base/safe_conversions.h"
 
 namespace webrtc {
 
-// Video timing timstamps in ms counted from capture_time_ms of a frame.
-struct VideoTiming {
+// Video timing timestamps in ms counted from capture_time_ms of a frame.
+// This structure represents data sent in video-timing RTP header extension.
+struct VideoSendTiming {
   static const uint8_t kEncodeStartDeltaIdx = 0;
   static const uint8_t kEncodeFinishDeltaIdx = 1;
   static const uint8_t kPacketizationFinishDeltaIdx = 2;
@@ -45,6 +48,44 @@
   bool is_timing_frame;
 };
 
+// Used to report precise timings of a 'timing frames'. Contains all important
+// timestamps for a lifetime of that specific frame. Reported as a string via
+// GetStats(). Only frame which took the longest between two GetStats calls is
+// reported.
+struct TimingFrameInfo {
+  TimingFrameInfo();
+
+  // Returns end-to-end delay of a frame, if sender and receiver timestamps are
+  // synchronized, -1 otherwise.
+  int64_t EndToEndDelay() const;
+
+  // Returns true if current frame took longer to process than |other| frame.
+  // If other frame's clocks are not synchronized, current frame is always
+  // preferred.
+  bool IsLongerThan(const TimingFrameInfo& other) const;
+
+  std::string ToString() const;
+
+  uint32_t rtp_timestamp;  // Identifier of a frame.
+  // All timestamps below are in local monotonous clock of a receiver.
+  // If sender clock is not yet estimated, sender timestamps
+  // (capture_time_ms ... pacer_exit_ms) are negative values, still
+  // relatively correct.
+  int64_t capture_time_ms;          // Captrue time of a frame.
+  int64_t encode_start_ms;          // Encode start time.
+  int64_t encode_finish_ms;         // Encode completion time.
+  int64_t packetization_finish_ms;  // Time when frame was passed to pacer.
+  int64_t pacer_exit_ms;  // Time when last packet was pushed out of pacer.
+  // Two in-network RTP processor timestamps: meaning is application specific.
+  int64_t network_timestamp_ms;
+  int64_t network2_timestamp_ms;
+  int64_t receive_start_ms;   // First received packet time.
+  int64_t receive_finish_ms;  // Last received packet time.
+  int64_t decode_start_ms;    // Decode start time.
+  int64_t decode_finish_ms;   // Decode completion time.
+  int64_t render_time_ms;     // Proposed render time to insure smooth playback.
+};
+
 }  // namespace webrtc
 
 #endif  // WEBRTC_API_VIDEO_VIDEO_TIMING_H_
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index 7b62dbc..bf7566c 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -13,7 +13,6 @@
 
 #include <stddef.h>
 #include <string.h>
-
 #include <ostream>
 #include <string>
 #include <vector>
@@ -782,7 +781,7 @@
   VideoContentType videoContentType;
 
   bool has_video_timing;
-  VideoTiming video_timing;
+  VideoSendTiming video_timing;
 
   PlayoutDelay playout_delay = {-1, -1};
 
diff --git a/webrtc/media/base/mediachannel.h b/webrtc/media/base/mediachannel.h
index 816dfd1..1bea916 100644
--- a/webrtc/media/base/mediachannel.h
+++ b/webrtc/media/base/mediachannel.h
@@ -17,6 +17,7 @@
 
 #include "webrtc/api/rtpparameters.h"
 #include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/video/video_timing.h"
 #include "webrtc/base/basictypes.h"
 #include "webrtc/base/buffer.h"
 #include "webrtc/base/copyonwritebuffer.h"
@@ -747,8 +748,7 @@
         render_delay_ms(0),
         target_delay_ms(0),
         current_delay_ms(0),
-        capture_start_ntp_time_ms(-1) {
-  }
+        capture_start_ntp_time_ms(-1) {}
 
   std::vector<SsrcGroup> ssrc_groups;
   // TODO(hbos): Move this to |VideoMediaInfo::receive_codecs|?
@@ -793,6 +793,10 @@
 
   // Estimated capture start time in NTP time in ms.
   int64_t capture_start_ntp_time_ms;
+
+  // Timing frame info: all important timestamps for a full lifetime of a
+  // single 'timing frame'.
+  rtc::Optional<webrtc::TimingFrameInfo> timing_frame_info;
 };
 
 struct DataSenderInfo : public MediaSenderInfo {
diff --git a/webrtc/media/engine/fakewebrtccall.cc b/webrtc/media/engine/fakewebrtccall.cc
index ec8f6b8..0f26a51 100644
--- a/webrtc/media/engine/fakewebrtccall.cc
+++ b/webrtc/media/engine/fakewebrtccall.cc
@@ -309,6 +309,11 @@
   return stats_;
 }
 
+rtc::Optional<webrtc::TimingFrameInfo>
+FakeVideoReceiveStream::GetAndResetTimingFrameInfo() {
+  return rtc::Optional<webrtc::TimingFrameInfo>();
+}
+
 void FakeVideoReceiveStream::Start() {
   receiving_ = true;
 }
diff --git a/webrtc/media/engine/fakewebrtccall.h b/webrtc/media/engine/fakewebrtccall.h
index 34db106..e0e0f7a 100644
--- a/webrtc/media/engine/fakewebrtccall.h
+++ b/webrtc/media/engine/fakewebrtccall.h
@@ -205,6 +205,8 @@
 
   webrtc::VideoReceiveStream::Stats GetStats() const override;
 
+  rtc::Optional<webrtc::TimingFrameInfo> GetAndResetTimingFrameInfo() override;
+
   webrtc::VideoReceiveStream::Config config_;
   bool receiving_;
   webrtc::VideoReceiveStream::Stats stats_;
diff --git a/webrtc/media/engine/webrtcvideoengine.cc b/webrtc/media/engine/webrtcvideoengine.cc
index e592740..2eab3ce 100644
--- a/webrtc/media/engine/webrtcvideoengine.cc
+++ b/webrtc/media/engine/webrtcvideoengine.cc
@@ -2483,6 +2483,8 @@
   info.plis_sent = stats.rtcp_packet_type_counts.pli_packets;
   info.nacks_sent = stats.rtcp_packet_type_counts.nack_packets;
 
+  info.timing_frame_info = stream_->GetAndResetTimingFrameInfo();
+
   if (log_stats)
     LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
 
diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h
index b719226..abf8b94 100644
--- a/webrtc/modules/include/module_common_types.h
+++ b/webrtc/modules/include/module_common_types.h
@@ -61,7 +61,7 @@
 
   VideoContentType content_type;
 
-  VideoTiming video_timing;
+  VideoSendTiming video_timing;
 
   bool is_first_packet_in_frame;
   uint8_t simulcastIdx;  // Index if the simulcast encoder creating
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
index f630baf..c3fc9dc 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -266,40 +266,41 @@
 constexpr const char* VideoTimingExtension::kUri;
 
 bool VideoTimingExtension::Parse(rtc::ArrayView<const uint8_t> data,
-                                 VideoTiming* timing) {
+                                 VideoSendTiming* timing) {
   RTC_DCHECK(timing);
   if (data.size() != kValueSizeBytes)
     return false;
   timing->encode_start_delta_ms =
       ByteReader<uint16_t>::ReadBigEndian(data.data());
   timing->encode_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoTiming::kEncodeFinishDeltaIdx);
+      data.data() + 2 * VideoSendTiming::kEncodeFinishDeltaIdx);
   timing->packetization_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoTiming::kPacketizationFinishDeltaIdx);
+      data.data() + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx);
   timing->pacer_exit_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoTiming::kPacerExitDeltaIdx);
+      data.data() + 2 * VideoSendTiming::kPacerExitDeltaIdx);
   timing->network_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoTiming::kNetworkTimestampDeltaIdx);
+      data.data() + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx);
   timing->network2_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoTiming::kNetwork2TimestampDeltaIdx);
+      data.data() + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx);
   timing->is_timing_frame = true;
   return true;
 }
 
-bool VideoTimingExtension::Write(uint8_t* data, const VideoTiming& timing) {
+bool VideoTimingExtension::Write(uint8_t* data, const VideoSendTiming& timing) {
   ByteWriter<uint16_t>::WriteBigEndian(data, timing.encode_start_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoTiming::kEncodeFinishDeltaIdx,
+      data + 2 * VideoSendTiming::kEncodeFinishDeltaIdx,
       timing.encode_finish_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoTiming::kPacketizationFinishDeltaIdx,
+      data + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx,
       timing.packetization_finish_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoTiming::kPacerExitDeltaIdx, timing.pacer_exit_delta_ms);
+      data + 2 * VideoSendTiming::kPacerExitDeltaIdx,
+      timing.pacer_exit_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoTiming::kNetworkTimestampDeltaIdx, 0);  // reserved
+      data + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx, 0);  // reserved
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoTiming::kNetwork2TimestampDeltaIdx, 0);  // reserved
+      data + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx, 0);  // reserved
   return true;
 }
 
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
index c0a4bca..c637a84 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -134,9 +134,10 @@
   static constexpr const char* kUri =
       "http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
 
-  static bool Parse(rtc::ArrayView<const uint8_t> data, VideoTiming* timing);
-  static size_t ValueSize(const VideoTiming&) { return kValueSizeBytes; }
-  static bool Write(uint8_t* data, const VideoTiming& timing);
+  static bool Parse(rtc::ArrayView<const uint8_t> data,
+                    VideoSendTiming* timing);
+  static size_t ValueSize(const VideoSendTiming&) { return kValueSizeBytes; }
+  static bool Write(uint8_t* data, const VideoSendTiming& timing);
 
   static size_t ValueSize(uint16_t time_delta_ms, uint8_t idx) {
     return kValueSizeBytes;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
index 55bad2d..5d5b31b 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -32,26 +32,26 @@
 
   void set_packetization_finish_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
-        VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoTiming::kPacketizationFinishDeltaIdx);
+        VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+        VideoSendTiming::kPacketizationFinishDeltaIdx);
   }
 
   void set_pacer_exit_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
-        VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoTiming::kPacerExitDeltaIdx);
+        VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+        VideoSendTiming::kPacerExitDeltaIdx);
   }
 
   void set_network_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
-        VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoTiming::kNetworkTimestampDeltaIdx);
+        VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+        VideoSendTiming::kNetworkTimestampDeltaIdx);
   }
 
   void set_network2_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
-        VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoTiming::kNetwork2TimestampDeltaIdx);
+        VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+        VideoSendTiming::kNetwork2TimestampDeltaIdx);
   }
 
  private:
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index bcfa650..89c1d1d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -473,7 +473,7 @@
   packet->SetMarker(true);
   packet->SetTimestamp(kTimestamp);
   packet->set_capture_time_ms(capture_time_ms);
-  const VideoTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
+  const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
   packet->SetExtension<VideoTimingExtension>(kVideoTiming);
   EXPECT_TRUE(rtp_sender_->AssignSequenceNumber(packet.get()));
   size_t packet_size = packet->size();
@@ -1578,7 +1578,7 @@
   rtp_sender_video_->SendVideo(kRtpVideoGeneric, kVideoFrameKey, kPayload,
                                kTimestamp, kCaptureTimestamp, kFrame,
                                sizeof(kFrame), nullptr, &hdr);
-  VideoTiming timing;
+  VideoSendTiming timing;
   EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
       &timing));
   EXPECT_EQ(kPacketizationTimeMs, timing.packetization_finish_delta_ms);
diff --git a/webrtc/modules/video_coding/frame_buffer2.cc b/webrtc/modules/video_coding/frame_buffer2.cc
index 1114e7c..2beffc7 100644
--- a/webrtc/modules/video_coding/frame_buffer2.cc
+++ b/webrtc/modules/video_coding/frame_buffer2.cc
@@ -149,6 +149,7 @@
       }
 
       UpdateJitterDelay();
+      UpdateTimingFrameInfo();
       PropagateDecodability(next_frame_it_->second);
 
       // Sanity check for RTP timestamp monotonicity.
@@ -534,8 +535,15 @@
   }
 }
 
+void FrameBuffer::UpdateTimingFrameInfo() {
+  TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
+  rtc::Optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
+  if (info)
+    stats_callback_->OnTimingFrameInfoUpdated(*info);
+}
+
 void FrameBuffer::ClearFramesAndHistory() {
-  TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
+  TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
   frames_.clear();
   last_decoded_frame_it_ = frames_.end();
   last_continuous_frame_it_ = frames_.end();
diff --git a/webrtc/modules/video_coding/frame_buffer2.h b/webrtc/modules/video_coding/frame_buffer2.h
index ffeb2aa..25ba0a9 100644
--- a/webrtc/modules/video_coding/frame_buffer2.h
+++ b/webrtc/modules/video_coding/frame_buffer2.h
@@ -153,6 +153,8 @@
 
   void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_);
 
+  void UpdateTimingFrameInfo() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
   void ClearFramesAndHistory() EXCLUSIVE_LOCKS_REQUIRED(crit_);
 
   bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms)
diff --git a/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
index 58b3f7a..9d15302 100644
--- a/webrtc/modules/video_coding/frame_buffer2_unittest.cc
+++ b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -116,6 +116,7 @@
                     int jitter_buffer_ms,
                     int min_playout_delay_ms,
                     int render_delay_ms));
+  MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info));
 };
 
 class TestFrameBuffer2 : public ::testing::Test {
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
index 42ee8b6..80b08b5 100644
--- a/webrtc/modules/video_coding/generic_decoder.cc
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -10,6 +10,8 @@
 
 #include "webrtc/modules/video_coding/generic_decoder.h"
 
+#include <algorithm>
+
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/timeutils.h"
@@ -91,6 +93,7 @@
 
   // Report timing information.
   if (frameInfo->timing.is_timing_frame) {
+    int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
     // Convert remote timestamps to local time from ntp timestamps.
     frameInfo->timing.encode_start_ms -= ntp_offset_;
     frameInfo->timing.encode_finish_ms -= ntp_offset_;
@@ -98,19 +101,44 @@
     frameInfo->timing.pacer_exit_ms -= ntp_offset_;
     frameInfo->timing.network_timestamp_ms -= ntp_offset_;
     frameInfo->timing.network2_timestamp_ms -= ntp_offset_;
-    // TODO(ilnik): Report timing information here.
-    // Capture time: decodedImage.ntp_time_ms() - ntp_offset
-    // Encode start: frameInfo->timing.encode_start_ms
-    // Encode finish: frameInfo->timing.encode_finish_ms
-    // Packetization done: frameInfo->timing.packetization_finish_ms
-    // Pacer exit: frameInfo->timing.pacer_exit_ms
-    // Network timestamp: frameInfo->timing.network_timestamp_ms
-    // Network2 timestamp: frameInfo->timing.network2_timestamp_ms
-    // Receive start: frameInfo->timing.receive_start_ms
-    // Receive finish: frameInfo->timing.receive_finish_ms
-    // Decode start: frameInfo->decodeStartTimeMs
-    // Decode finish: now_ms
-    // Render time: frameInfo->renderTimeMs
+
+    int64_t sender_delta_ms = 0;
+    if (decodedImage.ntp_time_ms() < 0) {
+      // Sender clock is not estimated yet. Make sure that sender times are all
+      // negative to indicate that. Yet they still should be relatively correct.
+      sender_delta_ms =
+          std::max({capture_time_ms, frameInfo->timing.encode_start_ms,
+                    frameInfo->timing.encode_finish_ms,
+                    frameInfo->timing.packetization_finish_ms,
+                    frameInfo->timing.pacer_exit_ms,
+                    frameInfo->timing.network_timestamp_ms,
+                    frameInfo->timing.network2_timestamp_ms}) +
+          1;
+    }
+
+    TimingFrameInfo timing_frame_info;
+
+    timing_frame_info.capture_time_ms = capture_time_ms - sender_delta_ms;
+    timing_frame_info.encode_start_ms =
+        frameInfo->timing.encode_start_ms - sender_delta_ms;
+    timing_frame_info.encode_finish_ms =
+        frameInfo->timing.encode_finish_ms - sender_delta_ms;
+    timing_frame_info.packetization_finish_ms =
+        frameInfo->timing.packetization_finish_ms - sender_delta_ms;
+    timing_frame_info.pacer_exit_ms =
+        frameInfo->timing.pacer_exit_ms - sender_delta_ms;
+    timing_frame_info.network_timestamp_ms =
+        frameInfo->timing.network_timestamp_ms - sender_delta_ms;
+    timing_frame_info.network2_timestamp_ms =
+        frameInfo->timing.network2_timestamp_ms - sender_delta_ms;
+    timing_frame_info.receive_start_ms = frameInfo->timing.receive_start_ms;
+    timing_frame_info.receive_finish_ms = frameInfo->timing.receive_finish_ms;
+    timing_frame_info.decode_start_ms = frameInfo->decodeStartTimeMs;
+    timing_frame_info.decode_finish_ms = now_ms;
+    timing_frame_info.render_time_ms = frameInfo->renderTimeMs;
+    timing_frame_info.rtp_timestamp = decodedImage.timestamp();
+
+    _timing->SetTimingFrameInfo(timing_frame_info);
   }
 
   decodedImage.set_timestamp_us(
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
index 6ed472d..19d3813 100644
--- a/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -109,6 +109,8 @@
                                            int min_playout_delay_ms,
                                            int render_delay_ms) = 0;
 
+  virtual void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) = 0;
+
  protected:
   virtual ~VCMReceiveStatisticsCallback() {}
 };
diff --git a/webrtc/modules/video_coding/timing.cc b/webrtc/modules/video_coding/timing.cc
index 21f4a3d..340f2c1 100644
--- a/webrtc/modules/video_coding/timing.cc
+++ b/webrtc/modules/video_coding/timing.cc
@@ -21,21 +21,22 @@
 namespace webrtc {
 
 VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing)
-  : clock_(clock),
-    master_(false),
-    ts_extrapolator_(),
-    codec_timer_(new VCMCodecTimer()),
-    render_delay_ms_(kDefaultRenderDelayMs),
-    min_playout_delay_ms_(0),
-    max_playout_delay_ms_(10000),
-    jitter_delay_ms_(0),
-    current_delay_ms_(0),
-    last_decode_ms_(0),
-    prev_frame_timestamp_(0),
-    num_decoded_frames_(0),
-    num_delayed_decoded_frames_(0),
-    first_decoded_frame_ms_(-1),
-    sum_missed_render_deadline_ms_(0) {
+    : clock_(clock),
+      master_(false),
+      ts_extrapolator_(),
+      codec_timer_(new VCMCodecTimer()),
+      render_delay_ms_(kDefaultRenderDelayMs),
+      min_playout_delay_ms_(0),
+      max_playout_delay_ms_(10000),
+      jitter_delay_ms_(0),
+      current_delay_ms_(0),
+      last_decode_ms_(0),
+      prev_frame_timestamp_(0),
+      timing_frame_info_(),
+      num_decoded_frames_(0),
+      num_delayed_decoded_frames_(0),
+      first_decoded_frame_ms_(-1),
+      sum_missed_render_deadline_ms_(0) {
   if (master_timing == NULL) {
     master_ = true;
     ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
@@ -304,4 +305,14 @@
   return (num_decoded_frames_ > 0);
 }
 
+void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) {
+  rtc::CritScope cs(&crit_sect_);
+  timing_frame_info_.emplace(info);
+}
+
+rtc::Optional<TimingFrameInfo> VCMTiming::GetTimingFrameInfo() {
+  rtc::CritScope cs(&crit_sect_);
+  return timing_frame_info_;
+}
+
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/timing.h b/webrtc/modules/video_coding/timing.h
index bfd6778..82252bf 100644
--- a/webrtc/modules/video_coding/timing.h
+++ b/webrtc/modules/video_coding/timing.h
@@ -102,6 +102,9 @@
                           int* min_playout_delay_ms,
                           int* render_delay_ms) const;
 
+  void SetTimingFrameInfo(const TimingFrameInfo& info);
+  rtc::Optional<TimingFrameInfo> GetTimingFrameInfo();
+
   enum { kDefaultRenderDelayMs = 10 };
   enum { kDelayMaxChangeMsPerS = 100 };
 
@@ -131,6 +134,7 @@
   int current_delay_ms_ GUARDED_BY(crit_sect_);
   int last_decode_ms_ GUARDED_BY(crit_sect_);
   uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_);
+  rtc::Optional<TimingFrameInfo> timing_frame_info_ GUARDED_BY(crit_sect_);
 
   // Statistics.
   size_t num_decoded_frames_ GUARDED_BY(crit_sect_);
diff --git a/webrtc/pc/statscollector.cc b/webrtc/pc/statscollector.cc
index c932583..27cb6c3 100644
--- a/webrtc/pc/statscollector.cc
+++ b/webrtc/pc/statscollector.cc
@@ -249,6 +249,10 @@
   for (const auto& i : ints)
     report->AddInt(i.name, i.value);
   report->AddString(StatsReport::kStatsValueNameMediaType, "video");
+  if (info.timing_frame_info) {
+    report->AddString(StatsReport::kStatsValueNameTimingFrameInfo,
+                      info.timing_frame_info->ToString());
+  }
 }
 
 void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) {
diff --git a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
index db41e1a..fc53216 100644
--- a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
+++ b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
@@ -91,7 +91,7 @@
         packet.GetExtension<VideoContentTypeExtension>(&content_type);
         break;
       case kRtpExtensionVideoTiming:
-        VideoTiming timing;
+        VideoSendTiming timing;
         packet.GetExtension<VideoTimingExtension>(&timing);
         break;
       case kRtpExtensionRtpStreamId: {
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index f876621..b76a57d 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -3442,6 +3442,59 @@
   RunBaseTest(&test);
 }
 
+TEST_F(EndToEndTest, GetTimingFrameInfoReportsTimingFrames) {
+  static const int kExtensionId = 5;
+
+  class StatsObserver : public test::EndToEndTest {
+   public:
+    StatsObserver() : EndToEndTest(kLongTimeoutMs) {}
+
+   private:
+    std::string CompoundKey(const char* name, uint32_t ssrc) {
+      std::ostringstream oss;
+      oss << name << "_" << ssrc;
+      return oss.str();
+    }
+
+    void ModifyVideoConfigs(
+        VideoSendStream::Config* send_config,
+        std::vector<VideoReceiveStream::Config>* receive_configs,
+        VideoEncoderConfig* encoder_config) override {
+      send_config->rtp.extensions.clear();
+      send_config->rtp.extensions.push_back(
+          RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId));
+      for (size_t i = 0; i < receive_configs->size(); ++i) {
+        (*receive_configs)[i].rtp.extensions.clear();
+        (*receive_configs)[i].rtp.extensions.push_back(
+            RtpExtension(RtpExtension::kVideoTimingUri, kExtensionId));
+      }
+    }
+
+    void OnVideoStreamsCreated(
+        VideoSendStream* send_stream,
+        const std::vector<VideoReceiveStream*>& receive_streams) override {
+      receive_streams_ = receive_streams;
+    }
+
+    void PerformTest() override {
+      // No frames reported initially.
+      for (size_t i = 0; i < receive_streams_.size(); ++i) {
+        EXPECT_FALSE(receive_streams_[i]->GetAndResetTimingFrameInfo());
+      }
+      // Wait for at least one timing frame to be sent with 100ms grace period.
+      SleepMs(kDefaultTimingFramesDelayMs + 100);
+      // Check that timing frames are reported for each stream.
+      for (size_t i = 0; i < receive_streams_.size(); ++i) {
+        EXPECT_TRUE(receive_streams_[i]->GetAndResetTimingFrameInfo());
+      }
+    }
+
+    std::vector<VideoReceiveStream*> receive_streams_;
+  } test;
+
+  RunBaseTest(&test);
+}
+
 class RtcpXrObserver : public test::EndToEndTest {
  public:
   RtcpXrObserver(bool enable_rrtr, bool enable_target_bitrate)
diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc
index e0e95ae..58fcfd6 100644
--- a/webrtc/video/payload_router.cc
+++ b/webrtc/video/payload_router.cc
@@ -132,11 +132,13 @@
   rtp_video_header.content_type = encoded_image.content_type_;
   if (encoded_image.timing_.is_timing_frame) {
     rtp_video_header.video_timing.encode_start_delta_ms =
-        VideoTiming::GetDeltaCappedMs(encoded_image.capture_time_ms_,
-                                      encoded_image.timing_.encode_start_ms);
+        VideoSendTiming::GetDeltaCappedMs(
+            encoded_image.capture_time_ms_,
+            encoded_image.timing_.encode_start_ms);
     rtp_video_header.video_timing.encode_finish_delta_ms =
-        VideoTiming::GetDeltaCappedMs(encoded_image.capture_time_ms_,
-                                      encoded_image.timing_.encode_finish_ms);
+        VideoSendTiming::GetDeltaCappedMs(
+            encoded_image.capture_time_ms_,
+            encoded_image.timing_.encode_finish_ms);
     rtp_video_header.video_timing.packetization_finish_delta_ms = 0;
     rtp_video_header.video_timing.pacer_exit_delta_ms = 0;
     rtp_video_header.video_timing.network_timstamp_delta_ms = 0;
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index 2af1912..62d0b1c 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -406,6 +406,17 @@
   return stats_;
 }
 
+rtc::Optional<TimingFrameInfo>
+ReceiveStatisticsProxy::GetAndResetTimingFrameInfo() {
+  rtc::CritScope lock(&crit_);
+  rtc::Optional<TimingFrameInfo> info = timing_frame_info_;
+  // Reset reported value to empty, so it will be always
+  // overwritten in |OnTimingFrameInfoUpdated|, thus allowing to store new
+  // value instead of reported one.
+  timing_frame_info_.reset();
+  return info;
+}
+
 void ReceiveStatisticsProxy::OnIncomingPayloadType(int payload_type) {
   rtc::CritScope lock(&crit_);
   stats_.current_payload_type = payload_type;
@@ -464,6 +475,17 @@
                        "ssrc", stats_.ssrc);
 }
 
+void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated(
+    const TimingFrameInfo& info) {
+  rtc::CritScope lock(&crit_);
+  // Only the frame which was processed the longest since the last
+  // GetStats() call is reported. Therefore, only single 'longest' frame is
+  // stored.
+  if (!timing_frame_info_ || info.IsLongerThan(*timing_frame_info_)) {
+    timing_frame_info_.emplace(info);
+  }
+}
+
 void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
     uint32_t ssrc,
     const RtcpPacketTypeCounter& packet_counter) {
diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h
index 626b56d..1cfeca6 100644
--- a/webrtc/video/receive_statistics_proxy.h
+++ b/webrtc/video/receive_statistics_proxy.h
@@ -46,6 +46,8 @@
 
   VideoReceiveStream::Stats GetStats() const;
 
+  rtc::Optional<TimingFrameInfo> GetAndResetTimingFrameInfo();
+
   void OnDecodedFrame(rtc::Optional<uint8_t> qp, VideoContentType content_type);
   void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
   void OnRenderedFrame(const VideoFrame& frame);
@@ -69,6 +71,8 @@
                                    int min_playout_delay_ms,
                                    int render_delay_ms) override;
 
+  void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override;
+
   // Overrides RtcpStatisticsCallback.
   void StatisticsUpdated(const webrtc::RtcpStatistics& statistics,
                          uint32_t ssrc) override;
@@ -157,6 +161,7 @@
   mutable std::map<int64_t, size_t> frame_window_ GUARDED_BY(&crit_);
   VideoContentType last_content_type_ GUARDED_BY(&crit_);
   rtc::Optional<int64_t> last_decoded_frame_time_ms_;
+  rtc::Optional<TimingFrameInfo> timing_frame_info_ GUARDED_BY(&crit_);
 };
 
 }  // namespace webrtc
diff --git a/webrtc/video/receive_statistics_proxy_unittest.cc b/webrtc/video/receive_statistics_proxy_unittest.cc
index e219542..2c4d330 100644
--- a/webrtc/video/receive_statistics_proxy_unittest.cc
+++ b/webrtc/video/receive_statistics_proxy_unittest.cc
@@ -247,6 +247,48 @@
   EXPECT_STREQ("", statistics_proxy_->GetStats().c_name.c_str());
 }
 
+TEST_F(ReceiveStatisticsProxyTest,
+       GetTimingFrameInfoReportsLongestTimingFrame) {
+  const int64_t kShortEndToEndDelay = 10;
+  const int64_t kMedEndToEndDelay = 20;
+  const int64_t kLongEndToEndDelay = 100;
+  const uint32_t kExpectedRtpTimestamp = 2;
+  TimingFrameInfo info;
+  rtc::Optional<TimingFrameInfo> result;
+  info.rtp_timestamp = kExpectedRtpTimestamp - 1;
+  info.capture_time_ms = 0;
+  info.decode_finish_ms = kShortEndToEndDelay;
+  statistics_proxy_->OnTimingFrameInfoUpdated(info);
+  info.rtp_timestamp =
+      kExpectedRtpTimestamp;  // this frame should be reported in the end.
+  info.capture_time_ms = 0;
+  info.decode_finish_ms = kLongEndToEndDelay;
+  statistics_proxy_->OnTimingFrameInfoUpdated(info);
+  info.rtp_timestamp = kExpectedRtpTimestamp + 1;
+  info.capture_time_ms = 0;
+  info.decode_finish_ms = kMedEndToEndDelay;
+  statistics_proxy_->OnTimingFrameInfoUpdated(info);
+  result = statistics_proxy_->GetAndResetTimingFrameInfo();
+  EXPECT_TRUE(result);
+  EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp);
+}
+
+TEST_F(ReceiveStatisticsProxyTest, GetTimingFrameInfoTimingFramesReportedOnce) {
+  const int64_t kShortEndToEndDelay = 10;
+  const uint32_t kExpectedRtpTimestamp = 2;
+  TimingFrameInfo info;
+  rtc::Optional<TimingFrameInfo> result;
+  info.rtp_timestamp = kExpectedRtpTimestamp;
+  info.capture_time_ms = 0;
+  info.decode_finish_ms = kShortEndToEndDelay;
+  statistics_proxy_->OnTimingFrameInfoUpdated(info);
+  result = statistics_proxy_->GetAndResetTimingFrameInfo();
+  EXPECT_TRUE(result);
+  EXPECT_EQ(kExpectedRtpTimestamp, result->rtp_timestamp);
+  result = statistics_proxy_->GetAndResetTimingFrameInfo();
+  EXPECT_FALSE(result);
+}
+
 TEST_F(ReceiveStatisticsProxyTest, LifetimeHistogramIsUpdated) {
   const int64_t kTimeSec = 3;
   fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000);
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
index acc497b..f21cf72 100644
--- a/webrtc/video/video_receive_stream.cc
+++ b/webrtc/video/video_receive_stream.cc
@@ -344,6 +344,11 @@
   return stats_proxy_.GetStats();
 }
 
+rtc::Optional<TimingFrameInfo>
+VideoReceiveStream::GetAndResetTimingFrameInfo() {
+  return stats_proxy_.GetAndResetTimingFrameInfo();
+}
+
 void VideoReceiveStream::EnableEncodedFrameRecording(rtc::PlatformFile file,
                                                      size_t byte_limit) {
   {
diff --git a/webrtc/video/video_receive_stream.h b/webrtc/video/video_receive_stream.h
index a32573d..6e2b20a 100644
--- a/webrtc/video/video_receive_stream.h
+++ b/webrtc/video/video_receive_stream.h
@@ -72,6 +72,8 @@
 
   webrtc::VideoReceiveStream::Stats GetStats() const override;
 
+  rtc::Optional<TimingFrameInfo> GetAndResetTimingFrameInfo() override;
+
   // Takes ownership of the file, is responsible for closing it later.
   // Calling this method will close and finalize any current log.
   // Giving rtc::kInvalidPlatformFileValue disables logging.
diff --git a/webrtc/video/video_stream_decoder.cc b/webrtc/video/video_stream_decoder.cc
index a7688ce..126d6e5 100644
--- a/webrtc/video/video_stream_decoder.cc
+++ b/webrtc/video/video_stream_decoder.cc
@@ -119,6 +119,9 @@
                                                      int min_playout_delay_ms,
                                                      int render_delay_ms) {}
 
+void VideoStreamDecoder::OnTimingFrameInfoUpdated(const TimingFrameInfo& info) {
+}
+
 void VideoStreamDecoder::OnCompleteFrame(bool is_keyframe, size_t size_bytes) {}
 
 void VideoStreamDecoder::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
diff --git a/webrtc/video/video_stream_decoder.h b/webrtc/video/video_stream_decoder.h
index b670b12..3903fb5 100644
--- a/webrtc/video/video_stream_decoder.h
+++ b/webrtc/video/video_stream_decoder.h
@@ -78,6 +78,8 @@
                                    int min_playout_delay_ms,
                                    int render_delay_ms) override;
 
+  void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) override;
+
   void RegisterReceiveStatisticsProxy(
       ReceiveStatisticsProxy* receive_statistics_proxy);
 
diff --git a/webrtc/video_receive_stream.h b/webrtc/video_receive_stream.h
index b3a98c6..66dd78b 100644
--- a/webrtc/video_receive_stream.h
+++ b/webrtc/video_receive_stream.h
@@ -207,6 +207,8 @@
   // TODO(pbos): Add info on currently-received codec to Stats.
   virtual Stats GetStats() const = 0;
 
+  virtual rtc::Optional<TimingFrameInfo> GetAndResetTimingFrameInfo() = 0;
+
   // Takes ownership of the file, is responsible for closing it later.
   // Calling this method will close and finalize any current log.
   // Giving rtc::kInvalidPlatformFileValue disables logging.