Reland of Add a flags field to video timing extension. (patchset #1 id:1 of https://codereview.webrtc.org/2995953002/ )

Reason for revert:
Create reland CL to add fix to.

Original issue's description:
> Revert of Add a flags field to video timing extension. (patchset #15 id:280001 of https://codereview.webrtc.org/3000753002/ )
>
> Reason for revert:
> Speculative revet for breaking remoting_unittests in fyi bots.
> https://build.chromium.org/p/chromium.webrtc.fyi/waterfall?builder=Win7%20Tester
>
> Original issue's description:
> > Add a flags field to video timing extension.
> >
> > The rtp header extension for video timing shuold have an additional
> > field for signaling metadata, such as what triggered the extension for
> > this particular frame. This will allow separating frames select because
> > of outlier sizes from regular frames, for more accurate stats.
> >
> > This implementation is backwards compatible in that it can read video
> > timing extensions without the new flag field, but it always sends with
> > it included.
> >
> > BUG=webrtc:7594
> >
> > Review-Url: https://codereview.webrtc.org/3000753002
> > Cr-Commit-Position: refs/heads/master@{#19353}
> > Committed: https://chromium.googlesource.com/external/webrtc/+/cf5d485e147f7d7b3081692f101e496ce9e1d257
>
> TBR=danilchap@webrtc.org,kthelgason@webrtc.org,stefan@webrtc.org,sprang@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:7594
>
> Review-Url: https://codereview.webrtc.org/2995953002
> Cr-Commit-Position: refs/heads/master@{#19360}
> Committed: https://chromium.googlesource.com/external/webrtc/+/f0f7378b059501bb2bc5d006bf0f43546e47328f

TBR=danilchap@webrtc.org,kthelgason@webrtc.org,stefan@webrtc.org,emircan@google.com
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=webrtc:7594

Review-Url: https://codereview.webrtc.org/2996153002
Cr-Commit-Position: refs/heads/master@{#19405}
diff --git a/webrtc/api/video/video_timing.cc b/webrtc/api/video/video_timing.cc
index a0a3f4d..0d22f90 100644
--- a/webrtc/api/video/video_timing.cc
+++ b/webrtc/api/video/video_timing.cc
@@ -27,7 +27,8 @@
       receive_finish_ms(-1),
       decode_start_ms(-1),
       decode_finish_ms(-1),
-      render_time_ms(-1) {}
+      render_time_ms(-1),
+      flags(TimingFrameFlags::kDefault) {}
 
 int64_t TimingFrameInfo::EndToEndDelay() const {
   return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
@@ -38,14 +39,31 @@
   return other_delay == -1 || EndToEndDelay() > other_delay;
 }
 
+bool TimingFrameInfo::IsOutlier() const {
+  return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredBySize);
+}
+
+bool TimingFrameInfo::IsTimerTriggered() const {
+  return !IsInvalid() && (flags & TimingFrameFlags::kTriggeredByTimer);
+}
+
+bool TimingFrameInfo::IsInvalid() const {
+  return flags == TimingFrameFlags::kInvalid;
+}
+
 std::string TimingFrameInfo::ToString() const {
   std::stringstream out;
-  out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms
-      << ',' << encode_finish_ms << ',' << packetization_finish_ms << ','
-      << pacer_exit_ms << ',' << network_timestamp_ms << ','
-      << network2_timestamp_ms << ',' << receive_start_ms << ','
-      << receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
-      << ',' << render_time_ms;
+  if (IsInvalid()) {
+    out << "[Invalid]";
+  } else {
+    out << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms
+        << ',' << encode_finish_ms << ',' << packetization_finish_ms << ','
+        << pacer_exit_ms << ',' << network_timestamp_ms << ','
+        << network2_timestamp_ms << ',' << receive_start_ms << ','
+        << receive_finish_ms << ',' << decode_start_ms << ','
+        << decode_finish_ms << ',' << render_time_ms << ", outlier_triggered "
+        << IsOutlier() << ", timer_triggered " << IsTimerTriggered();
+  }
   return out.str();
 }
 
diff --git a/webrtc/api/video/video_timing.h b/webrtc/api/video/video_timing.h
index 44991df..f513463 100644
--- a/webrtc/api/video/video_timing.h
+++ b/webrtc/api/video/video_timing.h
@@ -13,6 +13,7 @@
 
 #include <stdint.h>
 
+#include <limits>
 #include <string>
 
 #include "webrtc/rtc_base/checks.h"
@@ -20,15 +21,25 @@
 
 namespace webrtc {
 
+enum TimingFrameFlags : uint8_t {
+  kDefault = 0,                // No flags set (used by old protocol)
+  kTriggeredByTimer = 1 << 0,  // Frame marked for tracing by periodic timer.
+  kTriggeredBySize = 1 << 1,   // Frame marked for tracing due to size.
+  kInvalid = std::numeric_limits<uint8_t>::max()  // Invalid, ignore!
+};
+
 // Video timing timestamps in ms counted from capture_time_ms of a frame.
 // This structure represents data sent in video-timing RTP header extension.
 struct VideoSendTiming {
-  static const uint8_t kEncodeStartDeltaIdx = 0;
-  static const uint8_t kEncodeFinishDeltaIdx = 1;
-  static const uint8_t kPacketizationFinishDeltaIdx = 2;
-  static const uint8_t kPacerExitDeltaIdx = 3;
-  static const uint8_t kNetworkTimestampDeltaIdx = 4;
-  static const uint8_t kNetwork2TimestampDeltaIdx = 5;
+  // Offsets of the fields in the RTP header extension, counting from the first
+  // byte after the one-byte header.
+  static constexpr uint8_t kFlagsOffset = 0;
+  static constexpr uint8_t kEncodeStartDeltaOffset = 1;
+  static constexpr uint8_t kEncodeFinishDeltaOffset = 3;
+  static constexpr uint8_t kPacketizationFinishDeltaOffset = 5;
+  static constexpr uint8_t kPacerExitDeltaOffset = 7;
+  static constexpr uint8_t kNetworkTimestampDeltaOffset = 9;
+  static constexpr uint8_t kNetwork2TimestampDeltaOffset = 11;
 
   // Returns |time_ms - base_ms| capped at max 16-bit value.
   // Used to fill this data structure as per
@@ -45,7 +56,7 @@
   uint16_t pacer_exit_delta_ms;
   uint16_t network_timstamp_delta_ms;
   uint16_t network2_timstamp_delta_ms;
-  bool is_timing_frame;
+  uint8_t flags;
 };
 
 // Used to report precise timings of a 'timing frames'. Contains all important
@@ -64,6 +75,18 @@
   // preferred.
   bool IsLongerThan(const TimingFrameInfo& other) const;
 
+  // Returns true if flags are set to indicate this frame was marked for tracing
+  // due to the size being outside some limit.
+  bool IsOutlier() const;
+
+  // Returns true if flags are set to indicate this frame was marked fro tracing
+  // due to cyclic timer.
+  bool IsTimerTriggered() const;
+
+  // Returns true if the timing data is marked as invalid, in which case it
+  // should be ignored.
+  bool IsInvalid() const;
+
   std::string ToString() const;
 
   uint32_t rtp_timestamp;  // Identifier of a frame.
@@ -84,6 +107,8 @@
   int64_t decode_start_ms;    // Decode start time.
   int64_t decode_finish_ms;   // Decode completion time.
   int64_t render_time_ms;     // Proposed render time to insure smooth playback.
+
+  uint8_t flags;  // Flags indicating validity and/or why tracing was triggered.
 };
 
 }  // namespace webrtc
diff --git a/webrtc/common_video/include/video_frame.h b/webrtc/common_video/include/video_frame.h
index a5f0e52..4f8ed08 100644
--- a/webrtc/common_video/include/video_frame.h
+++ b/webrtc/common_video/include/video_frame.h
@@ -66,7 +66,7 @@
 
   // Timing information should be updatable on const instances.
   mutable struct Timing {
-    bool is_timing_frame = false;
+    uint8_t flags = TimingFrameFlags::kInvalid;
     int64_t encode_start_ms = 0;
     int64_t encode_finish_ms = 0;
     int64_t packetization_finish_ms = 0;
diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc
index e01aa01..8678700 100644
--- a/webrtc/common_video/video_frame.cc
+++ b/webrtc/common_video/video_frame.cc
@@ -49,7 +49,6 @@
 
 void EncodedImage::SetEncodeTime(int64_t encode_start_ms,
                                 int64_t encode_finish_ms) const {
-    timing_.is_timing_frame = true;
     timing_.encode_start_ms = encode_start_ms;
     timing_.encode_finish_ms = encode_finish_ms;
 }
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
index 5e172c4..28685b4 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -248,18 +248,24 @@
 // Video Timing.
 // 6 timestamps in milliseconds counted from capture time stored in rtp header:
 // encode start/finish, packetization complete, pacer exit and reserved for
-// modification by the network modification.
+// modification by the network modification. |flags| is a bitmask and has the
+// following allowed values:
+// 0 = Valid data, but no flags available (backwards compatibility)
+// 1 = Frame marked as timing frame due to cyclic timer.
+// 2 = Frame marked as timing frame due to size being outside limit.
+// 255 = Invalid. The whole timing frame extension should be ignored.
+//
 //    0                   1                   2                   3
 //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2
 //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//   |  ID   | len=11|  encode start ms delta          | encode finish |
+//   |  ID   | len=12|     flags     |     encode start ms delta       |
 //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//   | ms delta      |  packetizer finish ms delta     | pacer exit    |
+//   |    encode finish ms delta     |   packetizer finish ms delta    |
 //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//   | ms delta      |  network timestamp ms delta     | network2 time-|
+//   |     pacer exit ms delta       |   network timestamp ms delta    |
 //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//   | stamp ms delta|
-//   +-+-+-+-+-+-+-+-+
+//   |  network2 timestamp ms delta  |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
 
 constexpr RTPExtensionType VideoTimingExtension::kId;
 constexpr uint8_t VideoTimingExtension::kValueSizeBytes;
@@ -268,47 +274,62 @@
 bool VideoTimingExtension::Parse(rtc::ArrayView<const uint8_t> data,
                                  VideoSendTiming* timing) {
   RTC_DCHECK(timing);
-  if (data.size() != kValueSizeBytes)
-    return false;
-  timing->encode_start_delta_ms =
-      ByteReader<uint16_t>::ReadBigEndian(data.data());
+  // TODO(sprang): Deprecate support for old wire format.
+  ptrdiff_t off = 0;
+  switch (data.size()) {
+    case kValueSizeBytes - 1:
+      timing->flags = 0;
+      off = 1;  // Old wire format without the flags field.
+      break;
+    case kValueSizeBytes:
+      timing->flags = ByteReader<uint8_t>::ReadBigEndian(data.data());
+      break;
+    default:
+      return false;
+  }
+
+  timing->encode_start_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
+      data.data() + VideoSendTiming::kEncodeStartDeltaOffset - off);
   timing->encode_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoSendTiming::kEncodeFinishDeltaIdx);
+      data.data() + VideoSendTiming::kEncodeFinishDeltaOffset - off);
   timing->packetization_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx);
+      data.data() + VideoSendTiming::kPacketizationFinishDeltaOffset - off);
   timing->pacer_exit_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoSendTiming::kPacerExitDeltaIdx);
+      data.data() + VideoSendTiming::kPacerExitDeltaOffset - off);
   timing->network_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx);
+      data.data() + VideoSendTiming::kNetworkTimestampDeltaOffset - off);
   timing->network2_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
-      data.data() + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx);
-  timing->is_timing_frame = true;
+      data.data() + VideoSendTiming::kNetwork2TimestampDeltaOffset - off);
   return true;
 }
 
 bool VideoTimingExtension::Write(uint8_t* data, const VideoSendTiming& timing) {
-  ByteWriter<uint16_t>::WriteBigEndian(data, timing.encode_start_delta_ms);
+  ByteWriter<uint8_t>::WriteBigEndian(data + VideoSendTiming::kFlagsOffset,
+                                      timing.flags);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoSendTiming::kEncodeFinishDeltaIdx,
+      data + VideoSendTiming::kEncodeStartDeltaOffset,
+      timing.encode_start_delta_ms);
+  ByteWriter<uint16_t>::WriteBigEndian(
+      data + VideoSendTiming::kEncodeFinishDeltaOffset,
       timing.encode_finish_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx,
+      data + VideoSendTiming::kPacketizationFinishDeltaOffset,
       timing.packetization_finish_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoSendTiming::kPacerExitDeltaIdx,
+      data + VideoSendTiming::kPacerExitDeltaOffset,
       timing.pacer_exit_delta_ms);
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx, 0);  // reserved
+      data + VideoSendTiming::kNetworkTimestampDeltaOffset, 0);  // reserved
   ByteWriter<uint16_t>::WriteBigEndian(
-      data + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx, 0);  // reserved
+      data + VideoSendTiming::kNetwork2TimestampDeltaOffset, 0);  // reserved
   return true;
 }
 
 bool VideoTimingExtension::Write(uint8_t* data,
                                  uint16_t time_delta_ms,
-                                 uint8_t idx) {
-  RTC_DCHECK_LT(idx, 6);
-  ByteWriter<uint16_t>::WriteBigEndian(data + 2 * idx, time_delta_ms);
+                                 uint8_t offset) {
+  RTC_DCHECK_LT(offset, kValueSizeBytes - sizeof(uint16_t));
+  ByteWriter<uint16_t>::WriteBigEndian(data + offset, time_delta_ms);
   return true;
 }
 
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
index 9dabca2..dc63140 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -130,7 +130,7 @@
 class VideoTimingExtension {
  public:
   static constexpr RTPExtensionType kId = kRtpExtensionVideoTiming;
-  static constexpr uint8_t kValueSizeBytes = 12;
+  static constexpr uint8_t kValueSizeBytes = 13;
   static constexpr const char kUri[] =
       "http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
 
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
index 5d5b31b..d557ecf 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -33,25 +33,25 @@
   void set_packetization_finish_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
         VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoSendTiming::kPacketizationFinishDeltaIdx);
+        VideoSendTiming::kPacketizationFinishDeltaOffset);
   }
 
   void set_pacer_exit_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
         VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoSendTiming::kPacerExitDeltaIdx);
+        VideoSendTiming::kPacerExitDeltaOffset);
   }
 
   void set_network_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
         VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoSendTiming::kNetworkTimestampDeltaIdx);
+        VideoSendTiming::kNetworkTimestampDeltaOffset);
   }
 
   void set_network2_time_ms(int64_t time) {
     SetExtension<VideoTimingExtension>(
         VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
-        VideoSendTiming::kNetwork2TimestampDeltaIdx);
+        VideoSendTiming::kNetwork2TimestampDeltaOffset);
   }
 
  private:
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
index 1075d9e..776db53 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
@@ -32,6 +32,7 @@
 constexpr uint8_t kAudioLevelExtensionId = 9;
 constexpr uint8_t kRtpStreamIdExtensionId = 0xa;
 constexpr uint8_t kRtpMidExtensionId = 0xb;
+constexpr uint8_t kVideoTimingExtensionId = 0xc;
 constexpr int32_t kTimeOffset = 0x56ce;
 constexpr bool kVoiceActive = true;
 constexpr uint8_t kAudioLevel = 0x5a;
@@ -97,8 +98,19 @@
     (kTransmissionOffsetExtensionId << 4) | 6,  // (6+1)-byte extension, but
            'e',  'x',  't',                     // Transmission Offset
      'd',  'a',  't',  'a',                     // expected to be 3-bytes.
-     'p',  'a',  'y',  'l',  'o',  'a',  'd'
-};
+     'p',  'a',  'y',  'l',  'o',  'a',  'd'};
+
+constexpr uint8_t kPacketWithLegacyTimingExtension[] = {
+    0x90, kPayloadType, kSeqNumFirstByte, kSeqNumSecondByte,
+    0x65, 0x43, 0x12, 0x78,  // kTimestamp.
+    0x12, 0x34, 0x56, 0x78,  // kSSrc.
+    0xbe, 0xde, 0x00, 0x04,    // Extension block of size 4 x 32bit words.
+    (kVideoTimingExtensionId << 4)
+      | VideoTimingExtension::kValueSizeBytes - 2,  // Old format without flags.
+          0x00, 0x01, 0x00,
+    0x02, 0x00, 0x03, 0x00,
+    0x04, 0x00, 0x00, 0x00,
+    0x00, 0x00, 0x00, 0x00};
 // clang-format on
 }  // namespace
 
@@ -498,4 +510,57 @@
   EXPECT_THAT(packet.AllocateRawExtension(kInvalidId, 3), IsEmpty());
 }
 
+TEST(RtpPacketTest, CreateAndParseTimingFrameExtension) {
+  // Create a packet with video frame timing extension populated.
+  RtpPacketToSend::ExtensionManager send_extensions;
+  send_extensions.Register(kRtpExtensionVideoTiming, kVideoTimingExtensionId);
+  RtpPacketToSend send_packet(&send_extensions);
+  send_packet.SetPayloadType(kPayloadType);
+  send_packet.SetSequenceNumber(kSeqNum);
+  send_packet.SetTimestamp(kTimestamp);
+  send_packet.SetSsrc(kSsrc);
+
+  VideoSendTiming timing;
+  timing.encode_start_delta_ms = 1;
+  timing.encode_finish_delta_ms = 2;
+  timing.packetization_finish_delta_ms = 3;
+  timing.pacer_exit_delta_ms = 4;
+  timing.flags =
+      TimingFrameFlags::kTriggeredByTimer + TimingFrameFlags::kTriggeredBySize;
+
+  send_packet.SetExtension<VideoTimingExtension>(timing);
+
+  // Serialize the packet and then parse it again.
+  RtpPacketReceived::ExtensionManager extensions;
+  extensions.Register<VideoTimingExtension>(kVideoTimingExtensionId);
+  RtpPacketReceived receive_packet(&extensions);
+  EXPECT_TRUE(receive_packet.Parse(send_packet.Buffer()));
+
+  VideoSendTiming receivied_timing;
+  EXPECT_TRUE(
+      receive_packet.GetExtension<VideoTimingExtension>(&receivied_timing));
+
+  // Only check first and last timestamp (covered by other tests) plus flags.
+  EXPECT_EQ(receivied_timing.encode_start_delta_ms,
+            timing.encode_start_delta_ms);
+  EXPECT_EQ(receivied_timing.pacer_exit_delta_ms, timing.pacer_exit_delta_ms);
+  EXPECT_EQ(receivied_timing.flags, timing.flags);
+}
+
+TEST(RtpPacketTest, ParseLegacyTimingFrameExtension) {
+  // Parse the modified packet.
+  RtpPacketReceived::ExtensionManager extensions;
+  extensions.Register<VideoTimingExtension>(kVideoTimingExtensionId);
+  RtpPacketReceived packet(&extensions);
+  EXPECT_TRUE(packet.Parse(kPacketWithLegacyTimingExtension,
+                           sizeof(kPacketWithLegacyTimingExtension)));
+  VideoSendTiming receivied_timing;
+  EXPECT_TRUE(packet.GetExtension<VideoTimingExtension>(&receivied_timing));
+
+  // Check first and last timestamp are still OK. Flags should now be 0.
+  EXPECT_EQ(receivied_timing.encode_start_delta_ms, 1);
+  EXPECT_EQ(receivied_timing.pacer_exit_delta_ms, 4);
+  EXPECT_EQ(receivied_timing.flags, 0);
+}
+
 }  // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index 845e7ac..106f056 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -91,7 +91,7 @@
   rtp_header->type = parsed_payload.type;
   rtp_header->type.Video.rotation = kVideoRotation_0;
   rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED;
-  rtp_header->type.Video.video_timing.is_timing_frame = false;
+  rtp_header->type.Video.video_timing.flags = TimingFrameFlags::kInvalid;
 
   // Retrieve the video rotation information.
   if (rtp_header->header.extension.hasVideoRotation) {
@@ -107,7 +107,6 @@
   if (rtp_header->header.extension.has_video_timing) {
     rtp_header->type.Video.video_timing =
         rtp_header->header.extension.video_timing;
-    rtp_header->type.Video.video_timing.is_timing_frame = true;
   }
 
   rtp_header->type.Video.playout_delay =
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index 3e6e452..14bbff7 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -11,6 +11,7 @@
 #include <memory>
 #include <vector>
 
+#include "webrtc/api/video/video_timing.h"
 #include "webrtc/logging/rtc_event_log/mock/mock_rtc_event_log.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h"
@@ -994,7 +995,7 @@
                                             0, 1500));
   RTPVideoHeader video_header;
   memset(&video_header, 0, sizeof(RTPVideoHeader));
-  video_header.video_timing.is_timing_frame = true;
+  video_header.video_timing.flags = TimingFrameFlags::kTriggeredByTimer;
   EXPECT_TRUE(rtp_sender_->SendOutgoingData(
       kVideoFrameKey, kPayloadType, kTimestamp, kCaptureTimeMs, kPayloadData,
       sizeof(kPayloadData), nullptr, &video_header, nullptr));
@@ -1019,7 +1020,7 @@
   EXPECT_CALL(mock_paced_sender_,
               InsertPacket(RtpPacketSender::kLowPriority, kMediaSsrc,
                            kSeqNum + 1, _, _, false));
-  video_header.video_timing.is_timing_frame = false;
+  video_header.video_timing.flags = TimingFrameFlags::kInvalid;
   EXPECT_TRUE(rtp_sender_->SendOutgoingData(
       kVideoFrameKey, kPayloadType, kTimestamp + 1, kCaptureTimeMs + 1,
       kPayloadData, sizeof(kPayloadData), nullptr, &video_header, nullptr));
@@ -1571,7 +1572,7 @@
   const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds();
 
   RTPVideoHeader hdr = {0};
-  hdr.video_timing.is_timing_frame = true;
+  hdr.video_timing.flags = TimingFrameFlags::kTriggeredByTimer;
   hdr.video_timing.encode_start_delta_ms = kEncodeStartDeltaMs;
   hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs;
 
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index d12936c..3845074 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -332,7 +332,7 @@
         last_packet->SetExtension<VideoContentTypeExtension>(
             video_header->content_type);
       }
-      if (video_header->video_timing.is_timing_frame) {
+      if (video_header->video_timing.flags != TimingFrameFlags::kInvalid) {
         last_packet->SetExtension<VideoTimingExtension>(
             video_header->video_timing);
       }
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index e84dffd..f5a1910 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -370,7 +370,7 @@
   encoded_image_.content_type_ = (mode_ == kScreensharing)
                                      ? VideoContentType::SCREENSHARE
                                      : VideoContentType::UNSPECIFIED;
-  encoded_image_.timing_.is_timing_frame = false;
+  encoded_image_.timing_.flags = TimingFrameFlags::kInvalid;
   encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
 
   // Split encoded image up into fragments. This also updates |encoded_image_|.
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 5933073..3bf85d5 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -928,7 +928,7 @@
     encoded_images_[encoder_idx].content_type_ =
         (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
                                         : VideoContentType::UNSPECIFIED;
-    encoded_images_[encoder_idx].timing_.is_timing_frame = false;
+    encoded_images_[encoder_idx].timing_.flags = TimingFrameFlags::kInvalid;
 
     int qp = -1;
     vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 0d4334e..dc8bba2 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -710,7 +710,7 @@
                                        : VideoContentType::UNSPECIFIED;
     encoded_image_._encodedHeight = raw_->d_h;
     encoded_image_._encodedWidth = raw_->d_w;
-    encoded_image_.timing_.is_timing_frame = false;
+    encoded_image_.timing_.flags = TimingFrameFlags::kInvalid;
     int qp = -1;
     vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
     encoded_image_.qp_ = qp;
diff --git a/webrtc/modules/video_coding/encoded_frame.cc b/webrtc/modules/video_coding/encoded_frame.cc
index ec7f560..bd407c7 100644
--- a/webrtc/modules/video_coding/encoded_frame.cc
+++ b/webrtc/modules/video_coding/encoded_frame.cc
@@ -88,7 +88,7 @@
   _codec = kVideoCodecUnknown;
   rotation_ = kVideoRotation_0;
   content_type_ = VideoContentType::UNSPECIFIED;
-  timing_.is_timing_frame = false;
+  timing_.flags = TimingFrameFlags::kInvalid;
   _rotation_set = false;
 }
 
diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc
index f67210f..41c7fc8 100644
--- a/webrtc/modules/video_coding/frame_buffer.cc
+++ b/webrtc/modules/video_coding/frame_buffer.cc
@@ -164,8 +164,7 @@
     rotation_ = packet.video_header.rotation;
     _rotation_set = true;
     content_type_ = packet.video_header.content_type;
-    if (packet.video_header.video_timing.is_timing_frame) {
-      timing_.is_timing_frame = true;
+    if (packet.video_header.video_timing.flags != TimingFrameFlags::kInvalid) {
       timing_.encode_start_ms =
           ntp_time_ms_ + packet.video_header.video_timing.encode_start_delta_ms;
       timing_.encode_finish_ms =
@@ -182,9 +181,8 @@
       timing_.network2_timestamp_ms =
           ntp_time_ms_ +
           packet.video_header.video_timing.network2_timstamp_delta_ms;
-    } else {
-      timing_.is_timing_frame = false;
     }
+    timing_.flags = packet.video_header.video_timing.flags;
   }
 
   if (packet.is_first_packet_in_frame) {
diff --git a/webrtc/modules/video_coding/frame_object.cc b/webrtc/modules/video_coding/frame_object.cc
index 1d858fc..86bcffb 100644
--- a/webrtc/modules/video_coding/frame_object.cc
+++ b/webrtc/modules/video_coding/frame_object.cc
@@ -32,6 +32,7 @@
     : packet_buffer_(packet_buffer),
       first_seq_num_(first_seq_num),
       last_seq_num_(last_seq_num),
+      timestamp_(0),
       received_time_(received_time),
       times_nacked_(times_nacked) {
   VCMPacket* first_packet = packet_buffer_->GetPacket(first_seq_num);
@@ -113,10 +114,10 @@
   rotation_ = last_packet->video_header.rotation;
   _rotation_set = true;
   content_type_ = last_packet->video_header.content_type;
-  if (last_packet->video_header.video_timing.is_timing_frame) {
+  if (last_packet->video_header.video_timing.flags !=
+      TimingFrameFlags::kInvalid) {
     // ntp_time_ms_ may be -1 if not estimated yet. This is not a problem,
     // as this will be dealt with at the time of reporting.
-    timing_.is_timing_frame = true;
     timing_.encode_start_ms =
         ntp_time_ms_ +
         last_packet->video_header.video_timing.encode_start_delta_ms;
@@ -138,9 +139,8 @@
 
     timing_.receive_start_ms = first_packet->receive_time_ms;
     timing_.receive_finish_ms = last_packet->receive_time_ms;
-  } else {
-    timing_.is_timing_frame = false;
   }
+  timing_.flags = last_packet->video_header.video_timing.flags;
 }
 
 RtpFrameObject::~RtpFrameObject() {
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
index cc14794..3b2a620 100644
--- a/webrtc/modules/video_coding/generic_decoder.cc
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -92,7 +92,7 @@
                            frameInfo->renderTimeMs);
 
   // Report timing information.
-  if (frameInfo->timing.is_timing_frame) {
+  if (frameInfo->timing.flags != TimingFrameFlags::kInvalid) {
     int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
     // Convert remote timestamps to local time from ntp timestamps.
     frameInfo->timing.encode_start_ms -= ntp_offset_;
@@ -137,6 +137,7 @@
     timing_frame_info.decode_finish_ms = now_ms;
     timing_frame_info.render_time_ms = frameInfo->renderTimeMs;
     timing_frame_info.rtp_timestamp = decodedImage.timestamp();
+    timing_frame_info.flags = frameInfo->timing.flags;
 
     _timing->SetTimingFrameInfo(timing_frame_info);
   }
diff --git a/webrtc/modules/video_coding/generic_encoder.cc b/webrtc/modules/video_coding/generic_encoder.cc
index 78eeef2..cdb244d 100644
--- a/webrtc/modules/video_coding/generic_encoder.cc
+++ b/webrtc/modules/video_coding/generic_encoder.cc
@@ -231,7 +231,7 @@
 
   rtc::Optional<size_t> outlier_frame_size;
   rtc::Optional<int64_t> encode_start_ms;
-  bool is_timing_frame = false;
+  uint8_t timing_flags = TimingFrameFlags::kInvalid;
   {
     rtc::CritScope crit(&timing_params_lock_);
 
@@ -274,14 +274,16 @@
     if (last_timing_frame_time_ms_ == -1 ||
         timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
         timing_frame_delay_ms == 0) {
-      is_timing_frame = true;
+      timing_flags = TimingFrameFlags::kTriggeredByTimer;
       last_timing_frame_time_ms_ = encoded_image.capture_time_ms_;
     }
 
     // Outliers trigger timing frames, but do not affect scheduled timing
     // frames.
     if (outlier_frame_size && encoded_image._length >= *outlier_frame_size) {
-      is_timing_frame = true;
+      if (timing_flags == TimingFrameFlags::kInvalid)
+        timing_flags = 0;
+      timing_flags |= TimingFrameFlags::kTriggeredBySize;
     }
   }
 
@@ -290,8 +292,11 @@
   // drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
   // because to being sent in the network capture time required to be less than
   // all the other timestamps.
-  if (is_timing_frame && encode_start_ms) {
+  if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) {
     encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis());
+    encoded_image.timing_.flags = timing_flags;
+  } else {
+    encoded_image.timing_.flags = TimingFrameFlags::kInvalid;
   }
 
   Result result = post_encode_callback_->OnEncodedImage(
diff --git a/webrtc/modules/video_coding/generic_encoder_unittest.cc b/webrtc/modules/video_coding/generic_encoder_unittest.cc
index eec5b98..2a5aeff 100644
--- a/webrtc/modules/video_coding/generic_encoder_unittest.cc
+++ b/webrtc/modules/video_coding/generic_encoder_unittest.cc
@@ -31,7 +31,8 @@
   Result OnEncodedImage(const EncodedImage& encoded_image,
                         const CodecSpecificInfo* codec_specific_info,
                         const RTPFragmentationHeader* fragmentation) override {
-    last_frame_was_timing_ = encoded_image.timing_.is_timing_frame;
+    last_frame_was_timing_ =
+        encoded_image.timing_.flags != TimingFrameFlags::kInvalid;
     return Result(Result::OK);
   };
 
@@ -164,5 +165,30 @@
   }
 }
 
+TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) {
+  EncodedImage image;
+  CodecSpecificInfo codec_specific;
+  int64_t timestamp = 1;
+  image._length = 500;
+  image.capture_time_ms_ = timestamp;
+  codec_specific.codecType = kVideoCodecGeneric;
+  codec_specific.codecSpecific.generic.simulcast_idx = 0;
+  FakeEncodedImageCallback sink;
+  VCMEncodedFrameCallback callback(&sink, nullptr);
+  VideoCodec::TimingFrameTriggerThresholds thresholds;
+  thresholds.delay_ms = 1;  // Make all frames timing frames.
+  callback.SetTimingFramesThresholds(thresholds);
+
+  // Verify a single frame works with encode start time set.
+  callback.OnEncodeStarted(timestamp, 0);
+  callback.OnEncodedImage(image, &codec_specific, nullptr);
+  EXPECT_TRUE(sink.WasTimingFrame());
+
+  // New frame, now skip OnEncodeStarted. Should not result in timing frame.
+  image.capture_time_ms_ = ++timestamp;
+  callback.OnEncodedImage(image, &codec_specific, nullptr);
+  EXPECT_FALSE(sink.WasTimingFrame());
+}
+
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
index 8181e3d..efbf2c2 100644
--- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
+++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
@@ -1162,7 +1162,7 @@
           (codec_mode_ == webrtc::VideoCodecMode::kScreensharing)
               ? webrtc::VideoContentType::SCREENSHARE
               : webrtc::VideoContentType::UNSPECIFIED;
-      image->timing_.is_timing_frame = false;
+      image->timing_.flags = webrtc::TimingFrameFlags::kInvalid;
       image->_frameType =
           (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
       image->_completeFrame = true;
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
index ac57844..3c39acb 100644
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
+++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCEncodedImage.mm
@@ -20,7 +20,7 @@
 @synthesize timeStamp = _timeStamp;
 @synthesize captureTimeMs = _captureTimeMs;
 @synthesize ntpTimeMs = _ntpTimeMs;
-@synthesize isTimingFrame = _isTimingFrame;
+@synthesize flags = _flags;
 @synthesize encodeStartMs = _encodeStartMs;
 @synthesize encodeFinishMs = _encodeFinishMs;
 @synthesize frameType = _frameType;
@@ -40,7 +40,7 @@
     _timeStamp = encodedImage._timeStamp;
     _captureTimeMs = encodedImage.capture_time_ms_;
     _ntpTimeMs = encodedImage.ntp_time_ms_;
-    _isTimingFrame = encodedImage.timing_.is_timing_frame;
+    _flags = encodedImage.timing_.flags;
     _encodeStartMs = encodedImage.timing_.encode_start_ms;
     _encodeFinishMs = encodedImage.timing_.encode_finish_ms;
     _frameType = (RTCFrameType)encodedImage._frameType;
@@ -64,7 +64,7 @@
   encodedImage._timeStamp = _timeStamp;
   encodedImage.capture_time_ms_ = _captureTimeMs;
   encodedImage.ntp_time_ms_ = _ntpTimeMs;
-  encodedImage.timing_.is_timing_frame = _isTimingFrame;
+  encodedImage.timing_.flags = _flags;
   encodedImage.timing_.encode_start_ms = _encodeStartMs;
   encodedImage.timing_.encode_finish_ms = _encodeFinishMs;
   encodedImage._frameType = webrtc::FrameType(_frameType);
diff --git a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
index 4091886..7ac8c18 100644
--- a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
+++ b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
@@ -657,7 +657,7 @@
   frame.rotation = rotation;
   frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare :
                                                                   RTCVideoContentTypeUnspecified;
-  frame.isTimingFrame = NO;
+  frame.flags = webrtc::TimingFrameFlags::kInvalid;
 
   int qp;
   _h264BitstreamParser.ParseBitstream(buffer->data(), buffer->size());
diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h
index 7f678b9..c18fa78 100644
--- a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h
+++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoCodec.h
@@ -40,7 +40,7 @@
 @property(nonatomic, assign) uint32_t timeStamp;
 @property(nonatomic, assign) long captureTimeMs;
 @property(nonatomic, assign) long ntpTimeMs;
-@property(nonatomic, assign) BOOL isTimingFrame;
+@property(nonatomic, assign) uint8_t flags;
 @property(nonatomic, assign) long encodeStartMs;
 @property(nonatomic, assign) long encodeFinishMs;
 @property(nonatomic, assign) RTCFrameType frameType;
diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc
index 7957451..eabde40 100644
--- a/webrtc/video/payload_router.cc
+++ b/webrtc/video/payload_router.cc
@@ -130,7 +130,7 @@
     CopyCodecSpecific(codec_specific_info, &rtp_video_header);
   rtp_video_header.rotation = encoded_image.rotation_;
   rtp_video_header.content_type = encoded_image.content_type_;
-  if (encoded_image.timing_.is_timing_frame) {
+  if (encoded_image.timing_.flags != TimingFrameFlags::kInvalid) {
     rtp_video_header.video_timing.encode_start_delta_ms =
         VideoSendTiming::GetDeltaCappedMs(
             encoded_image.capture_time_ms_,
@@ -143,10 +143,8 @@
     rtp_video_header.video_timing.pacer_exit_delta_ms = 0;
     rtp_video_header.video_timing.network_timstamp_delta_ms = 0;
     rtp_video_header.video_timing.network2_timstamp_delta_ms = 0;
-    rtp_video_header.video_timing.is_timing_frame = true;
-  } else {
-    rtp_video_header.video_timing.is_timing_frame = false;
   }
+  rtp_video_header.video_timing.flags = encoded_image.timing_.flags;
   rtp_video_header.playout_delay = encoded_image.playout_delay_;
 
   int stream_index = rtp_video_header.simulcastIdx;
diff --git a/webrtc/video/rtp_video_stream_receiver.cc b/webrtc/video/rtp_video_stream_receiver.cc
index d4464ac..6064e69 100644
--- a/webrtc/video/rtp_video_stream_receiver.cc
+++ b/webrtc/video/rtp_video_stream_receiver.cc
@@ -560,7 +560,6 @@
   rtp_header.type.Video.video_timing = {0u, 0u, 0u, 0u, 0u, 0u, false};
   if (header.extension.has_video_timing) {
     rtp_header.type.Video.video_timing = header.extension.video_timing;
-    rtp_header.type.Video.video_timing.is_timing_frame = true;
   }
   rtp_header.type.Video.playout_delay = header.extension.playout_delay;