blob: bc32b226cf868e98e92c42bb68c075a8514ac477 [file] [log] [blame]
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#ifndef MODULE_COMMON_TYPES_H
12#define MODULE_COMMON_TYPES_H
13
pbos@webrtc.org3f45c2e2013-08-05 16:22:53 +000014#include <assert.h>
15#include <string.h> // memcpy
andrew@webrtc.orgd4682362013-01-22 04:44:30 +000016
17#include <algorithm>
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000018
andrew@webrtc.org856edd52012-11-23 19:17:23 +000019#include "webrtc/common_types.h"
20#include "webrtc/system_wrappers/interface/constructor_magic.h"
21#include "webrtc/typedefs.h"
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000022
23#ifdef _WIN32
24 #pragma warning(disable:4351) // remove warning "new behavior: elements of array
25 // 'array' will be default initialized"
26#endif
27
andrew@webrtc.org856edd52012-11-23 19:17:23 +000028namespace webrtc {
29
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000030struct RTPHeaderExtension
31{
pbos@webrtc.org0486a102013-04-10 17:59:17 +000032 int32_t transmissionTimeOffset;
solenberg@webrtc.orga0b00252013-05-16 11:10:31 +000033 uint32_t absoluteSendTime;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000034};
35
stefan@webrtc.org6696fba2013-05-29 12:12:51 +000036struct RTPHeader
37{
38 bool markerBit;
39 uint8_t payloadType;
40 uint16_t sequenceNumber;
41 uint32_t timestamp;
42 uint32_t ssrc;
43 uint8_t numCSRCs;
44 uint32_t arrOfCSRCs[kRtpCsrcSize];
45 uint8_t paddingLength;
46 uint16_t headerLength;
wu@webrtc.org7fc75bb2013-08-15 23:38:54 +000047 int payload_type_frequency;
stefan@webrtc.org6696fba2013-05-29 12:12:51 +000048 RTPHeaderExtension extension;
49};
50
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000051struct RTPAudioHeader
52{
pbos@webrtc.org0486a102013-04-10 17:59:17 +000053 uint8_t numEnergy; // number of valid entries in arrOfEnergy
54 uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000055 bool isCNG; // is this CNG
pbos@webrtc.org0486a102013-04-10 17:59:17 +000056 uint8_t channel; // number of channels 2 = stereo
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000057};
58
59enum {kNoPictureId = -1};
60enum {kNoTl0PicIdx = -1};
61enum {kNoTemporalIdx = -1};
62enum {kNoKeyIdx = -1};
63enum {kNoSimulcastIdx = 0};
64
65struct RTPVideoHeaderVP8
66{
67 void InitRTPVideoHeaderVP8()
68 {
69 nonReference = false;
70 pictureId = kNoPictureId;
71 tl0PicIdx = kNoTl0PicIdx;
72 temporalIdx = kNoTemporalIdx;
73 layerSync = false;
74 keyIdx = kNoKeyIdx;
75 partitionId = 0;
76 beginningOfPartition = false;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000077 }
78
79 bool nonReference; // Frame is discardable.
pbos@webrtc.org0486a102013-04-10 17:59:17 +000080 int16_t pictureId; // Picture ID index, 15 bits;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000081 // kNoPictureId if PictureID does not exist.
pbos@webrtc.org0486a102013-04-10 17:59:17 +000082 int16_t tl0PicIdx; // TL0PIC_IDX, 8 bits;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000083 // kNoTl0PicIdx means no value provided.
pbos@webrtc.org0486a102013-04-10 17:59:17 +000084 int8_t temporalIdx; // Temporal layer index, or kNoTemporalIdx.
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000085 bool layerSync; // This frame is a layer sync frame.
86 // Disabled if temporalIdx == kNoTemporalIdx.
87 int keyIdx; // 5 bits; kNoKeyIdx means not used.
88 int partitionId; // VP8 partition ID
89 bool beginningOfPartition; // True if this packet is the first
90 // in a VP8 partition. Otherwise false
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000091};
92union RTPVideoTypeHeader
93{
94 RTPVideoHeaderVP8 VP8;
95};
96
wu@webrtc.org7fc75bb2013-08-15 23:38:54 +000097enum RtpVideoCodecTypes
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +000098{
wu@webrtc.org7fc75bb2013-08-15 23:38:54 +000099 kRtpVideoNone,
100 kRtpVideoGeneric,
stefan@webrtc.orgdb74c612013-09-06 13:40:11 +0000101 kRtpVideoVp8
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000102};
103struct RTPVideoHeader
104{
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000105 uint16_t width; // size
106 uint16_t height;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000107
108 bool isFirstPacket; // first packet in frame
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000109 uint8_t simulcastIdx; // Index if the simulcast encoder creating
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000110 // this frame, 0 if not using simulcast.
wu@webrtc.org7fc75bb2013-08-15 23:38:54 +0000111 RtpVideoCodecTypes codec;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000112 RTPVideoTypeHeader codecHeader;
113};
114union RTPTypeHeader
115{
116 RTPAudioHeader Audio;
117 RTPVideoHeader Video;
118};
119
120struct WebRtcRTPHeader
121{
122 RTPHeader header;
123 FrameType frameType;
124 RTPTypeHeader type;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000125};
126
127class RTPFragmentationHeader
128{
129public:
130 RTPFragmentationHeader() :
131 fragmentationVectorSize(0),
132 fragmentationOffset(NULL),
133 fragmentationLength(NULL),
134 fragmentationTimeDiff(NULL),
135 fragmentationPlType(NULL)
136 {};
137
138 ~RTPFragmentationHeader()
139 {
140 delete [] fragmentationOffset;
141 delete [] fragmentationLength;
142 delete [] fragmentationTimeDiff;
143 delete [] fragmentationPlType;
144 }
145
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000146 void CopyFrom(const RTPFragmentationHeader& src)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000147 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000148 if(this == &src)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000149 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000150 return;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000151 }
152
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000153 if(src.fragmentationVectorSize != fragmentationVectorSize)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000154 {
155 // new size of vectors
156
157 // delete old
158 delete [] fragmentationOffset;
159 fragmentationOffset = NULL;
160 delete [] fragmentationLength;
161 fragmentationLength = NULL;
162 delete [] fragmentationTimeDiff;
163 fragmentationTimeDiff = NULL;
164 delete [] fragmentationPlType;
165 fragmentationPlType = NULL;
166
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000167 if(src.fragmentationVectorSize > 0)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000168 {
169 // allocate new
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000170 if(src.fragmentationOffset)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000171 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000172 fragmentationOffset = new uint32_t[src.fragmentationVectorSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000173 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000174 if(src.fragmentationLength)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000175 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000176 fragmentationLength = new uint32_t[src.fragmentationVectorSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000177 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000178 if(src.fragmentationTimeDiff)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000179 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000180 fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000181 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000182 if(src.fragmentationPlType)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000183 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000184 fragmentationPlType = new uint8_t[src.fragmentationVectorSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000185 }
186 }
187 // set new size
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000188 fragmentationVectorSize = src.fragmentationVectorSize;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000189 }
190
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000191 if(src.fragmentationVectorSize > 0)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000192 {
193 // copy values
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000194 if(src.fragmentationOffset)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000195 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000196 memcpy(fragmentationOffset, src.fragmentationOffset,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000197 src.fragmentationVectorSize * sizeof(uint32_t));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000198 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000199 if(src.fragmentationLength)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000200 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000201 memcpy(fragmentationLength, src.fragmentationLength,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000202 src.fragmentationVectorSize * sizeof(uint32_t));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000203 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000204 if(src.fragmentationTimeDiff)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000205 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000206 memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000207 src.fragmentationVectorSize * sizeof(uint16_t));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000208 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000209 if(src.fragmentationPlType)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000210 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000211 memcpy(fragmentationPlType, src.fragmentationPlType,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000212 src.fragmentationVectorSize * sizeof(uint8_t));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000213 }
214 }
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000215 }
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000216
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000217 void VerifyAndAllocateFragmentationHeader(const uint16_t size)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000218 {
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000219 if(fragmentationVectorSize < size)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000220 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000221 uint16_t oldVectorSize = fragmentationVectorSize;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000222 {
223 // offset
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000224 uint32_t* oldOffsets = fragmentationOffset;
225 fragmentationOffset = new uint32_t[size];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000226 memset(fragmentationOffset+oldVectorSize, 0,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000227 sizeof(uint32_t)*(size-oldVectorSize));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000228 // copy old values
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000229 memcpy(fragmentationOffset,oldOffsets, sizeof(uint32_t) * oldVectorSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000230 delete[] oldOffsets;
231 }
232 // length
233 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000234 uint32_t* oldLengths = fragmentationLength;
235 fragmentationLength = new uint32_t[size];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000236 memset(fragmentationLength+oldVectorSize, 0,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000237 sizeof(uint32_t) * (size- oldVectorSize));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000238 memcpy(fragmentationLength, oldLengths,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000239 sizeof(uint32_t) * oldVectorSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000240 delete[] oldLengths;
241 }
242 // time diff
243 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000244 uint16_t* oldTimeDiffs = fragmentationTimeDiff;
245 fragmentationTimeDiff = new uint16_t[size];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000246 memset(fragmentationTimeDiff+oldVectorSize, 0,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000247 sizeof(uint16_t) * (size- oldVectorSize));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000248 memcpy(fragmentationTimeDiff, oldTimeDiffs,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000249 sizeof(uint16_t) * oldVectorSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000250 delete[] oldTimeDiffs;
251 }
252 // payload type
253 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000254 uint8_t* oldTimePlTypes = fragmentationPlType;
255 fragmentationPlType = new uint8_t[size];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000256 memset(fragmentationPlType+oldVectorSize, 0,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000257 sizeof(uint8_t) * (size- oldVectorSize));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000258 memcpy(fragmentationPlType, oldTimePlTypes,
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000259 sizeof(uint8_t) * oldVectorSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000260 delete[] oldTimePlTypes;
261 }
262 fragmentationVectorSize = size;
263 }
264 }
265
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000266 uint16_t fragmentationVectorSize; // Number of fragmentations
267 uint32_t* fragmentationOffset; // Offset of pointer to data for each fragm.
268 uint32_t* fragmentationLength; // Data size for each fragmentation
269 uint16_t* fragmentationTimeDiff; // Timestamp difference relative "now" for
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000270 // each fragmentation
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000271 uint8_t* fragmentationPlType; // Payload type of each fragmentation
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000272
273private:
274 DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000275};
276
277struct RTCPVoIPMetric
278{
279 // RFC 3611 4.7
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000280 uint8_t lossRate;
281 uint8_t discardRate;
282 uint8_t burstDensity;
283 uint8_t gapDensity;
284 uint16_t burstDuration;
285 uint16_t gapDuration;
286 uint16_t roundTripDelay;
287 uint16_t endSystemDelay;
288 uint8_t signalLevel;
289 uint8_t noiseLevel;
290 uint8_t RERL;
291 uint8_t Gmin;
292 uint8_t Rfactor;
293 uint8_t extRfactor;
294 uint8_t MOSLQ;
295 uint8_t MOSCQ;
296 uint8_t RXconfig;
297 uint16_t JBnominal;
298 uint16_t JBmax;
299 uint16_t JBabsMax;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000300};
301
302// Types for the FEC packet masks. The type |kFecMaskRandom| is based on a
303// random loss model. The type |kFecMaskBursty| is based on a bursty/consecutive
304// loss model. The packet masks are defined in
305// modules/rtp_rtcp/fec_private_tables_random(bursty).h
306enum FecMaskType {
307 kFecMaskRandom,
308 kFecMaskBursty,
309};
310
311// Struct containing forward error correction settings.
312struct FecProtectionParams {
313 int fec_rate;
314 bool use_uep_protection;
315 int max_fec_frames;
316 FecMaskType fec_mask_type;
317};
318
stefan@webrtc.org2a5dbce2013-02-01 14:33:42 +0000319// Interface used by the CallStats class to distribute call statistics.
320// Callbacks will be triggered as soon as the class has been registered to a
321// CallStats object using RegisterStatsObserver.
fischman@webrtc.org0329e592013-02-19 22:09:36 +0000322class CallStatsObserver {
stefan@webrtc.org2a5dbce2013-02-01 14:33:42 +0000323 public:
324 virtual void OnRttUpdate(uint32_t rtt_ms) = 0;
325
fischman@webrtc.org0329e592013-02-19 22:09:36 +0000326 virtual ~CallStatsObserver() {}
stefan@webrtc.org2a5dbce2013-02-01 14:33:42 +0000327};
328
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000329// class describing a complete, or parts of an encoded frame.
330class EncodedVideoData
331{
332public:
333 EncodedVideoData() :
334 payloadType(0),
335 timeStamp(0),
336 renderTimeMs(0),
337 encodedWidth(0),
338 encodedHeight(0),
339 completeFrame(false),
340 missingFrame(false),
341 payloadData(NULL),
342 payloadSize(0),
343 bufferSize(0),
344 fragmentationHeader(),
345 frameType(kVideoFrameDelta),
346 codec(kVideoCodecUnknown)
347 {};
348
349 EncodedVideoData(const EncodedVideoData& data)
350 {
351 payloadType = data.payloadType;
352 timeStamp = data.timeStamp;
353 renderTimeMs = data.renderTimeMs;
354 encodedWidth = data.encodedWidth;
355 encodedHeight = data.encodedHeight;
356 completeFrame = data.completeFrame;
357 missingFrame = data.missingFrame;
358 payloadSize = data.payloadSize;
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000359 fragmentationHeader.CopyFrom(data.fragmentationHeader);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000360 frameType = data.frameType;
361 codec = data.codec;
362 if (data.payloadSize > 0)
363 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000364 payloadData = new uint8_t[data.payloadSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000365 memcpy(payloadData, data.payloadData, data.payloadSize);
366 }
367 else
368 {
369 payloadData = NULL;
370 }
371 }
372
373
374 ~EncodedVideoData()
375 {
376 delete [] payloadData;
377 };
378
379 EncodedVideoData& operator=(const EncodedVideoData& data)
380 {
381 if (this == &data)
382 {
383 return *this;
384 }
385 payloadType = data.payloadType;
386 timeStamp = data.timeStamp;
387 renderTimeMs = data.renderTimeMs;
388 encodedWidth = data.encodedWidth;
389 encodedHeight = data.encodedHeight;
390 completeFrame = data.completeFrame;
391 missingFrame = data.missingFrame;
392 payloadSize = data.payloadSize;
andrew@webrtc.org856edd52012-11-23 19:17:23 +0000393 fragmentationHeader.CopyFrom(data.fragmentationHeader);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000394 frameType = data.frameType;
395 codec = data.codec;
396 if (data.payloadSize > 0)
397 {
398 delete [] payloadData;
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000399 payloadData = new uint8_t[data.payloadSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000400 memcpy(payloadData, data.payloadData, data.payloadSize);
401 bufferSize = data.payloadSize;
402 }
403 return *this;
404 };
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000405 void VerifyAndAllocate( const uint32_t size)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000406 {
407 if (bufferSize < size)
408 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000409 uint8_t* oldPayload = payloadData;
410 payloadData = new uint8_t[size];
411 memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000412
413 bufferSize = size;
414 delete[] oldPayload;
415 }
416 }
417
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000418 uint8_t payloadType;
419 uint32_t timeStamp;
420 int64_t renderTimeMs;
421 uint32_t encodedWidth;
422 uint32_t encodedHeight;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000423 bool completeFrame;
424 bool missingFrame;
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000425 uint8_t* payloadData;
426 uint32_t payloadSize;
427 uint32_t bufferSize;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000428 RTPFragmentationHeader fragmentationHeader;
429 FrameType frameType;
430 VideoCodecType codec;
431};
432
433struct VideoContentMetrics {
434 VideoContentMetrics()
435 : motion_magnitude(0.0f),
436 spatial_pred_err(0.0f),
437 spatial_pred_err_h(0.0f),
438 spatial_pred_err_v(0.0f) {
439 }
440
441 void Reset() {
442 motion_magnitude = 0.0f;
443 spatial_pred_err = 0.0f;
444 spatial_pred_err_h = 0.0f;
445 spatial_pred_err_v = 0.0f;
446 }
447 float motion_magnitude;
448 float spatial_pred_err;
449 float spatial_pred_err_h;
450 float spatial_pred_err_v;
451};
452
453/*************************************************
454 *
455 * VideoFrame class
456 *
457 * The VideoFrame class allows storing and
458 * handling of video frames.
459 *
460 *
461 *************************************************/
462class VideoFrame
463{
464public:
465 VideoFrame();
466 ~VideoFrame();
467 /**
468 * Verifies that current allocated buffer size is larger than or equal to the input size.
469 * If the current buffer size is smaller, a new allocation is made and the old buffer data
470 * is copied to the new buffer.
471 * Buffer size is updated to minimumSize.
472 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000473 int32_t VerifyAndAllocate(const uint32_t minimumSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000474 /**
475 * Update length of data buffer in frame. Function verifies that new length is less or
476 * equal to allocated size.
477 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000478 int32_t SetLength(const uint32_t newLength);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000479 /*
480 * Swap buffer and size data
481 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000482 int32_t Swap(uint8_t*& newMemory,
483 uint32_t& newLength,
484 uint32_t& newSize);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000485 /*
486 * Swap buffer and size data
487 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000488 int32_t SwapFrame(VideoFrame& videoFrame);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000489 /**
490 * Copy buffer: If newLength is bigger than allocated size, a new buffer of size length
491 * is allocated.
492 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000493 int32_t CopyFrame(const VideoFrame& videoFrame);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000494 /**
495 * Copy buffer: If newLength is bigger than allocated size, a new buffer of size length
496 * is allocated.
497 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000498 int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000499 /**
500 * Delete VideoFrame and resets members to zero
501 */
502 void Free();
503 /**
504 * Set frame timestamp (90kHz)
505 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000506 void SetTimeStamp(const uint32_t timeStamp) {_timeStamp = timeStamp;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000507 /**
508 * Get pointer to frame buffer
509 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000510 uint8_t* Buffer() const {return _buffer;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000511
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000512 uint8_t*& Buffer() {return _buffer;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000513
514 /**
515 * Get allocated buffer size
516 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000517 uint32_t Size() const {return _bufferSize;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000518 /**
519 * Get frame length
520 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000521 uint32_t Length() const {return _bufferLength;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000522 /**
523 * Get frame timestamp (90kHz)
524 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000525 uint32_t TimeStamp() const {return _timeStamp;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000526 /**
527 * Get frame width
528 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000529 uint32_t Width() const {return _width;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000530 /**
531 * Get frame height
532 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000533 uint32_t Height() const {return _height;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000534 /**
535 * Set frame width
536 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000537 void SetWidth(const uint32_t width) {_width = width;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000538 /**
539 * Set frame height
540 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000541 void SetHeight(const uint32_t height) {_height = height;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000542 /**
543 * Set render time in miliseconds
544 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000545 void SetRenderTime(const int64_t renderTimeMs) {_renderTimeMs = renderTimeMs;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000546 /**
547 * Get render time in miliseconds
548 */
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000549 int64_t RenderTimeMs() const {return _renderTimeMs;}
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000550
551private:
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000552 void Set(uint8_t* buffer,
553 uint32_t size,
554 uint32_t length,
555 uint32_t timeStamp);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000556
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000557 uint8_t* _buffer; // Pointer to frame buffer
558 uint32_t _bufferSize; // Allocated buffer size
559 uint32_t _bufferLength; // Length (in bytes) of buffer
560 uint32_t _timeStamp; // Timestamp of frame (90kHz)
561 uint32_t _width;
562 uint32_t _height;
563 int64_t _renderTimeMs;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000564}; // end of VideoFrame class declaration
565
566// inline implementation of VideoFrame class:
567inline
568VideoFrame::VideoFrame():
569 _buffer(0),
570 _bufferSize(0),
571 _bufferLength(0),
572 _timeStamp(0),
573 _width(0),
574 _height(0),
575 _renderTimeMs(0)
576{
577 //
578}
579inline
580VideoFrame::~VideoFrame()
581{
582 if(_buffer)
583 {
584 delete [] _buffer;
585 _buffer = NULL;
586 }
587}
588
589
590inline
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000591int32_t
592VideoFrame::VerifyAndAllocate(const uint32_t minimumSize)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000593{
594 if (minimumSize < 1)
595 {
596 return -1;
597 }
598 if(minimumSize > _bufferSize)
599 {
600 // create buffer of sufficient size
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000601 uint8_t* newBufferBuffer = new uint8_t[minimumSize];
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000602 if(_buffer)
603 {
604 // copy old data
605 memcpy(newBufferBuffer, _buffer, _bufferSize);
606 delete [] _buffer;
607 }
608 else
609 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000610 memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t));
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000611 }
612 _buffer = newBufferBuffer;
613 _bufferSize = minimumSize;
614 }
615 return 0;
616}
617
618inline
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000619int32_t
620VideoFrame::SetLength(const uint32_t newLength)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000621{
622 if (newLength >_bufferSize )
623 { // can't accomodate new value
624 return -1;
625 }
626 _bufferLength = newLength;
627 return 0;
628}
629
630inline
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000631int32_t
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000632VideoFrame::SwapFrame(VideoFrame& videoFrame)
633{
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000634 uint32_t tmpTimeStamp = _timeStamp;
635 uint32_t tmpWidth = _width;
636 uint32_t tmpHeight = _height;
637 int64_t tmpRenderTime = _renderTimeMs;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000638
639 _timeStamp = videoFrame._timeStamp;
640 _width = videoFrame._width;
641 _height = videoFrame._height;
642 _renderTimeMs = videoFrame._renderTimeMs;
643
644 videoFrame._timeStamp = tmpTimeStamp;
645 videoFrame._width = tmpWidth;
646 videoFrame._height = tmpHeight;
647 videoFrame._renderTimeMs = tmpRenderTime;
648
649 return Swap(videoFrame._buffer, videoFrame._bufferLength, videoFrame._bufferSize);
650}
651
652inline
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000653int32_t
654VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000655{
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000656 uint8_t* tmpBuffer = _buffer;
657 uint32_t tmpLength = _bufferLength;
658 uint32_t tmpSize = _bufferSize;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000659 _buffer = newMemory;
660 _bufferLength = newLength;
661 _bufferSize = newSize;
662 newMemory = tmpBuffer;
663 newLength = tmpLength;
664 newSize = tmpSize;
665 return 0;
666}
667
668inline
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000669int32_t
670VideoFrame::CopyFrame(uint32_t length, const uint8_t* sourceBuffer)
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000671{
672 if (length > _bufferSize)
673 {
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000674 int32_t ret = VerifyAndAllocate(length);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000675 if (ret < 0)
676 {
677 return ret;
678 }
679 }
680 memcpy(_buffer, sourceBuffer, length);
681 _bufferLength = length;
682 return 0;
683}
684
685inline
pbos@webrtc.org0486a102013-04-10 17:59:17 +0000686int32_t
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000687VideoFrame::CopyFrame(const VideoFrame& videoFrame)
688{
689 if(CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0)
690 {
691 return -1;
692 }
693 _timeStamp = videoFrame._timeStamp;
694 _width = videoFrame._width;
695 _height = videoFrame._height;
696 _renderTimeMs = videoFrame._renderTimeMs;
697 return 0;
698}
699
700inline
701void
702VideoFrame::Free()
703{
704 _timeStamp = 0;
705 _bufferLength = 0;
706 _bufferSize = 0;
707 _height = 0;
708 _width = 0;
709 _renderTimeMs = 0;
710
711 if(_buffer)
712 {
713 delete [] _buffer;
714 _buffer = NULL;
715 }
716}
717
718
719/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
720 * allows for adding and subtracting frames while keeping track of the resulting
721 * states.
722 *
723 * Notes
724 * - The total number of samples in |data_| is
725 * samples_per_channel_ * num_channels_
726 *
727 * - Stereo data is interleaved starting with the left channel.
728 *
729 * - The +operator assume that you would never add exactly opposite frames when
730 * deciding the resulting state. To do this use the -operator.
731 */
732class AudioFrame
733{
734public:
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000735 // Stereo, 32 kHz, 60 ms (2 * 32 * 60)
736 static const int kMaxDataSizeSamples = 3840;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000737
738 enum VADActivity
739 {
740 kVadActive = 0,
741 kVadPassive = 1,
742 kVadUnknown = 2
743 };
744 enum SpeechType
745 {
746 kNormalSpeech = 0,
747 kPLC = 1,
748 kCNG = 2,
749 kPLCCNG = 3,
750 kUndefined = 4
751 };
752
753 AudioFrame();
754 virtual ~AudioFrame();
755
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000756 void UpdateFrame(
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000757 int id,
758 uint32_t timestamp,
759 const int16_t* data,
760 int samples_per_channel,
761 int sample_rate_hz,
762 SpeechType speech_type,
763 VADActivity vad_activity,
764 int num_channels = 1,
765 uint32_t energy = -1);
766
767 AudioFrame& Append(const AudioFrame& rhs);
768
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000769 void CopyFrom(const AudioFrame& src);
770
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000771 void Mute();
772
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000773 AudioFrame& operator>>=(const int rhs);
774 AudioFrame& operator+=(const AudioFrame& rhs);
775 AudioFrame& operator-=(const AudioFrame& rhs);
776
777 int id_;
778 uint32_t timestamp_;
779 int16_t data_[kMaxDataSizeSamples];
780 int samples_per_channel_;
781 int sample_rate_hz_;
782 int num_channels_;
783 SpeechType speech_type_;
784 VADActivity vad_activity_;
785 uint32_t energy_;
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000786
787private:
788 DISALLOW_COPY_AND_ASSIGN(AudioFrame);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000789};
790
791inline
792AudioFrame::AudioFrame()
793 :
794 id_(-1),
795 timestamp_(0),
796 data_(),
797 samples_per_channel_(0),
798 sample_rate_hz_(0),
799 num_channels_(1),
800 speech_type_(kUndefined),
801 vad_activity_(kVadUnknown),
802 energy_(0xffffffff)
803{
804}
805
806inline
807AudioFrame::~AudioFrame()
808{
809}
810
811inline
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000812void
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000813AudioFrame::UpdateFrame(
814 int id,
815 uint32_t timestamp,
816 const int16_t* data,
817 int samples_per_channel,
818 int sample_rate_hz,
819 SpeechType speech_type,
820 VADActivity vad_activity,
821 int num_channels,
822 uint32_t energy)
823{
824 id_ = id;
825 timestamp_ = timestamp;
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000826 samples_per_channel_ = samples_per_channel;
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000827 sample_rate_hz_ = sample_rate_hz;
828 speech_type_ = speech_type;
829 vad_activity_ = vad_activity;
830 num_channels_ = num_channels;
831 energy_ = energy;
832
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000833 const int length = samples_per_channel * num_channels;
834 assert(length <= kMaxDataSizeSamples && length >= 0);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000835 if(data != NULL)
836 {
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000837 memcpy(data_, data, sizeof(int16_t) * length);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000838 }
839 else
840 {
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000841 memset(data_, 0, sizeof(int16_t) * length);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000842 }
andrew@webrtc.orgd4682362013-01-22 04:44:30 +0000843}
844
845inline void AudioFrame::CopyFrom(const AudioFrame& src)
846{
847 if(this == &src)
848 {
849 return;
850 }
851 id_ = src.id_;
852 timestamp_ = src.timestamp_;
853 samples_per_channel_ = src.samples_per_channel_;
854 sample_rate_hz_ = src.sample_rate_hz_;
855 speech_type_ = src.speech_type_;
856 vad_activity_ = src.vad_activity_;
857 num_channels_ = src.num_channels_;
858 energy_ = src.energy_;
859
860 const int length = samples_per_channel_ * num_channels_;
861 assert(length <= kMaxDataSizeSamples && length >= 0);
862 memcpy(data_, src.data_, sizeof(int16_t) * length);
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000863}
864
865inline
866void
867AudioFrame::Mute()
868{
869 memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t));
870}
871
872inline
873AudioFrame&
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +0000874AudioFrame::operator>>=(const int rhs)
875{
876 assert((num_channels_ > 0) && (num_channels_ < 3));
877 if((num_channels_ > 2) ||
878 (num_channels_ < 1))
879 {
880 return *this;
881 }
882 for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
883 {
884 data_[i] = static_cast<int16_t>(data_[i] >> rhs);
885 }
886 return *this;
887}
888
889inline
890AudioFrame&
891AudioFrame::Append(const AudioFrame& rhs)
892{
893 // Sanity check
894 assert((num_channels_ > 0) && (num_channels_ < 3));
895 if((num_channels_ > 2) ||
896 (num_channels_ < 1))
897 {
898 return *this;
899 }
900 if(num_channels_ != rhs.num_channels_)
901 {
902 return *this;
903 }
904 if((vad_activity_ == kVadActive) ||
905 rhs.vad_activity_ == kVadActive)
906 {
907 vad_activity_ = kVadActive;
908 }
909 else if((vad_activity_ == kVadUnknown) ||
910 rhs.vad_activity_ == kVadUnknown)
911 {
912 vad_activity_ = kVadUnknown;
913 }
914 if(speech_type_ != rhs.speech_type_)
915 {
916 speech_type_ = kUndefined;
917 }
918
919 int offset = samples_per_channel_ * num_channels_;
920 for(int i = 0;
921 i < rhs.samples_per_channel_ * rhs.num_channels_;
922 i++)
923 {
924 data_[offset+i] = rhs.data_[i];
925 }
926 samples_per_channel_ += rhs.samples_per_channel_;
927 return *this;
928}
929
930// merge vectors
931inline
932AudioFrame&
933AudioFrame::operator+=(const AudioFrame& rhs)
934{
935 // Sanity check
936 assert((num_channels_ > 0) && (num_channels_ < 3));
937 if((num_channels_ > 2) ||
938 (num_channels_ < 1))
939 {
940 return *this;
941 }
942 if(num_channels_ != rhs.num_channels_)
943 {
944 return *this;
945 }
946 bool noPrevData = false;
947 if(samples_per_channel_ != rhs.samples_per_channel_)
948 {
949 if(samples_per_channel_ == 0)
950 {
951 // special case we have no data to start with
952 samples_per_channel_ = rhs.samples_per_channel_;
953 noPrevData = true;
954 } else
955 {
956 return *this;
957 }
958 }
959
960 if((vad_activity_ == kVadActive) ||
961 rhs.vad_activity_ == kVadActive)
962 {
963 vad_activity_ = kVadActive;
964 }
965 else if((vad_activity_ == kVadUnknown) ||
966 rhs.vad_activity_ == kVadUnknown)
967 {
968 vad_activity_ = kVadUnknown;
969 }
970
971 if(speech_type_ != rhs.speech_type_)
972 {
973 speech_type_ = kUndefined;
974 }
975
976 if(noPrevData)
977 {
978 memcpy(data_, rhs.data_,
979 sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
980 } else
981 {
982 // IMPROVEMENT this can be done very fast in assembly
983 for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
984 {
985 int32_t wrapGuard = static_cast<int32_t>(data_[i]) +
986 static_cast<int32_t>(rhs.data_[i]);
987 if(wrapGuard < -32768)
988 {
989 data_[i] = -32768;
990 }else if(wrapGuard > 32767)
991 {
992 data_[i] = 32767;
993 }else
994 {
995 data_[i] = (int16_t)wrapGuard;
996 }
997 }
998 }
999 energy_ = 0xffffffff;
1000 return *this;
1001}
1002
1003inline
1004AudioFrame&
1005AudioFrame::operator-=(const AudioFrame& rhs)
1006{
1007 // Sanity check
1008 assert((num_channels_ > 0) && (num_channels_ < 3));
1009 if((num_channels_ > 2)||
1010 (num_channels_ < 1))
1011 {
1012 return *this;
1013 }
1014 if((samples_per_channel_ != rhs.samples_per_channel_) ||
1015 (num_channels_ != rhs.num_channels_))
1016 {
1017 return *this;
1018 }
1019 if((vad_activity_ != kVadPassive) ||
1020 rhs.vad_activity_ != kVadPassive)
1021 {
1022 vad_activity_ = kVadUnknown;
1023 }
1024 speech_type_ = kUndefined;
1025
1026 for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
1027 {
1028 int32_t wrapGuard = static_cast<int32_t>(data_[i]) -
1029 static_cast<int32_t>(rhs.data_[i]);
1030 if(wrapGuard < -32768)
1031 {
1032 data_[i] = -32768;
1033 }
1034 else if(wrapGuard > 32767)
1035 {
1036 data_[i] = 32767;
1037 }
1038 else
1039 {
1040 data_[i] = (int16_t)wrapGuard;
1041 }
1042 }
1043 energy_ = 0xffffffff;
1044 return *this;
1045}
1046
stefan@webrtc.org42c74092013-04-11 17:48:02 +00001047inline bool IsNewerSequenceNumber(uint16_t sequence_number,
1048 uint16_t prev_sequence_number) {
1049 return sequence_number != prev_sequence_number &&
1050 static_cast<uint16_t>(sequence_number - prev_sequence_number) < 0x8000;
1051}
1052
1053inline bool IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
1054 return timestamp != prev_timestamp &&
1055 static_cast<uint32_t>(timestamp - prev_timestamp) < 0x80000000;
1056}
1057
1058inline uint16_t LatestSequenceNumber(uint16_t sequence_number1,
1059 uint16_t sequence_number2) {
1060 return IsNewerSequenceNumber(sequence_number1, sequence_number2) ?
1061 sequence_number1 : sequence_number2;
1062}
1063
1064inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) {
1065 return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 :
1066 timestamp2;
1067}
1068
pbos@webrtc.org3b89e102013-07-03 15:12:26 +00001069} // namespace webrtc
andrew@webrtc.orgb015cbe2012-10-22 18:19:23 +00001070
1071#endif // MODULE_COMMON_TYPES_H