andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include "video_capture_impl.h" |
| 12 | |
| 13 | #include "common_video/libyuv/include/webrtc_libyuv.h" |
| 14 | #include "critical_section_wrapper.h" |
| 15 | #include "module_common_types.h" |
| 16 | #include "ref_count.h" |
| 17 | #include "tick_util.h" |
| 18 | #include "trace.h" |
| 19 | #include "video_capture_config.h" |
| 20 | |
| 21 | #include <stdlib.h> |
| 22 | |
| 23 | namespace webrtc |
| 24 | { |
| 25 | namespace videocapturemodule |
| 26 | { |
| 27 | VideoCaptureModule* VideoCaptureImpl::Create( |
| 28 | const WebRtc_Word32 id, |
| 29 | VideoCaptureExternal*& externalCapture) |
| 30 | { |
| 31 | RefCountImpl<VideoCaptureImpl>* implementation = |
| 32 | new RefCountImpl<VideoCaptureImpl>(id); |
| 33 | externalCapture = implementation; |
| 34 | return implementation; |
| 35 | } |
| 36 | |
| 37 | const char* VideoCaptureImpl::CurrentDeviceName() const |
| 38 | { |
| 39 | return _deviceUniqueId; |
| 40 | } |
| 41 | |
| 42 | WebRtc_Word32 VideoCaptureImpl::ChangeUniqueId(const WebRtc_Word32 id) |
| 43 | { |
| 44 | _id = id; |
| 45 | return 0; |
| 46 | } |
| 47 | |
| 48 | // returns the number of milliseconds until the module want a worker thread to call Process |
| 49 | WebRtc_Word32 VideoCaptureImpl::TimeUntilNextProcess() |
| 50 | { |
| 51 | CriticalSectionScoped cs(&_callBackCs); |
| 52 | |
| 53 | WebRtc_Word32 timeToNormalProcess = kProcessInterval |
| 54 | - (WebRtc_Word32)((TickTime::Now() - _lastProcessTime).Milliseconds()); |
| 55 | |
| 56 | return timeToNormalProcess; |
| 57 | } |
| 58 | |
| 59 | // Process any pending tasks such as timeouts |
| 60 | WebRtc_Word32 VideoCaptureImpl::Process() |
| 61 | { |
| 62 | CriticalSectionScoped cs(&_callBackCs); |
| 63 | |
| 64 | const TickTime now = TickTime::Now(); |
| 65 | _lastProcessTime = TickTime::Now(); |
| 66 | |
| 67 | // Handle No picture alarm |
| 68 | |
| 69 | if (_lastProcessFrameCount.Ticks() == _incomingFrameTimes[0].Ticks() && |
| 70 | _captureAlarm != Raised) |
| 71 | { |
| 72 | if (_noPictureAlarmCallBack && _captureCallBack) |
| 73 | { |
| 74 | _captureAlarm = Raised; |
| 75 | _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm); |
| 76 | } |
| 77 | } |
| 78 | else if (_lastProcessFrameCount.Ticks() != _incomingFrameTimes[0].Ticks() && |
| 79 | _captureAlarm != Cleared) |
| 80 | { |
| 81 | if (_noPictureAlarmCallBack && _captureCallBack) |
| 82 | { |
| 83 | _captureAlarm = Cleared; |
| 84 | _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm); |
| 85 | |
| 86 | } |
| 87 | } |
| 88 | |
| 89 | // Handle frame rate callback |
| 90 | if ((now - _lastFrameRateCallbackTime).Milliseconds() |
| 91 | > kFrameRateCallbackInterval) |
| 92 | { |
| 93 | if (_frameRateCallBack && _captureCallBack) |
| 94 | { |
| 95 | const WebRtc_UWord32 frameRate = CalculateFrameRate(now); |
| 96 | _captureCallBack->OnCaptureFrameRate(_id, frameRate); |
| 97 | } |
| 98 | _lastFrameRateCallbackTime = now; // Can be set by EnableFrameRateCallback |
| 99 | |
| 100 | } |
| 101 | |
| 102 | _lastProcessFrameCount = _incomingFrameTimes[0]; |
| 103 | |
| 104 | return 0; |
| 105 | } |
| 106 | |
| 107 | VideoCaptureImpl::VideoCaptureImpl(const WebRtc_Word32 id) |
| 108 | : _id(id), _deviceUniqueId(NULL), _apiCs(*CriticalSectionWrapper::CreateCriticalSection()), |
| 109 | _captureDelay(0), _requestedCapability(), |
| 110 | _callBackCs(*CriticalSectionWrapper::CreateCriticalSection()), |
| 111 | _lastProcessTime(TickTime::Now()), |
| 112 | _lastFrameRateCallbackTime(TickTime::Now()), _frameRateCallBack(false), |
| 113 | _noPictureAlarmCallBack(false), _captureAlarm(Cleared), _setCaptureDelay(0), |
| 114 | _dataCallBack(NULL), _captureCallBack(NULL), |
| 115 | _lastProcessFrameCount(TickTime::Now()), _rotateFrame(kRotateNone), |
| 116 | last_capture_time_(TickTime::MillisecondTimestamp()) |
| 117 | |
| 118 | { |
| 119 | _requestedCapability.width = kDefaultWidth; |
| 120 | _requestedCapability.height = kDefaultHeight; |
| 121 | _requestedCapability.maxFPS = 30; |
| 122 | _requestedCapability.rawType = kVideoI420; |
| 123 | _requestedCapability.codecType = kVideoCodecUnknown; |
| 124 | memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes)); |
| 125 | } |
| 126 | |
| 127 | VideoCaptureImpl::~VideoCaptureImpl() |
| 128 | { |
| 129 | DeRegisterCaptureDataCallback(); |
| 130 | DeRegisterCaptureCallback(); |
| 131 | delete &_callBackCs; |
| 132 | delete &_apiCs; |
| 133 | |
| 134 | if (_deviceUniqueId) |
| 135 | delete[] _deviceUniqueId; |
| 136 | } |
| 137 | |
| 138 | WebRtc_Word32 VideoCaptureImpl::RegisterCaptureDataCallback( |
| 139 | VideoCaptureDataCallback& dataCallBack) |
| 140 | { |
| 141 | CriticalSectionScoped cs(&_apiCs); |
| 142 | CriticalSectionScoped cs2(&_callBackCs); |
| 143 | _dataCallBack = &dataCallBack; |
| 144 | |
| 145 | return 0; |
| 146 | } |
| 147 | |
| 148 | WebRtc_Word32 VideoCaptureImpl::DeRegisterCaptureDataCallback() |
| 149 | { |
| 150 | CriticalSectionScoped cs(&_apiCs); |
| 151 | CriticalSectionScoped cs2(&_callBackCs); |
| 152 | _dataCallBack = NULL; |
| 153 | return 0; |
| 154 | } |
| 155 | WebRtc_Word32 VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack) |
| 156 | { |
| 157 | |
| 158 | CriticalSectionScoped cs(&_apiCs); |
| 159 | CriticalSectionScoped cs2(&_callBackCs); |
| 160 | _captureCallBack = &callBack; |
| 161 | return 0; |
| 162 | } |
| 163 | WebRtc_Word32 VideoCaptureImpl::DeRegisterCaptureCallback() |
| 164 | { |
| 165 | |
| 166 | CriticalSectionScoped cs(&_apiCs); |
| 167 | CriticalSectionScoped cs2(&_callBackCs); |
| 168 | _captureCallBack = NULL; |
| 169 | return 0; |
| 170 | |
| 171 | } |
| 172 | WebRtc_Word32 VideoCaptureImpl::SetCaptureDelay(WebRtc_Word32 delayMS) |
| 173 | { |
| 174 | CriticalSectionScoped cs(&_apiCs); |
| 175 | _captureDelay = delayMS; |
| 176 | return 0; |
| 177 | } |
| 178 | WebRtc_Word32 VideoCaptureImpl::CaptureDelay() |
| 179 | { |
| 180 | CriticalSectionScoped cs(&_apiCs); |
| 181 | return _setCaptureDelay; |
| 182 | } |
| 183 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 184 | WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 185 | captureFrame, WebRtc_Word64 capture_time) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 186 | UpdateFrameCount(); // frame count used for local frame rate callback. |
| 187 | |
| 188 | const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay; |
| 189 | // Capture delay changed |
| 190 | if (_setCaptureDelay != _captureDelay) { |
| 191 | _setCaptureDelay = _captureDelay; |
| 192 | } |
| 193 | |
| 194 | // Set the capture time |
| 195 | if (capture_time != 0) { |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 196 | captureFrame.set_render_time_ms(capture_time); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 197 | } |
| 198 | else { |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 199 | captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp()); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 200 | } |
| 201 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 202 | if (captureFrame.render_time_ms() == last_capture_time_) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 203 | // We don't allow the same capture time for two frames, drop this one. |
| 204 | return -1; |
| 205 | } |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 206 | last_capture_time_ = captureFrame.render_time_ms(); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 207 | |
| 208 | if (_dataCallBack) { |
| 209 | if (callOnCaptureDelayChanged) { |
| 210 | _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay); |
| 211 | } |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 212 | _dataCallBack->OnIncomingCapturedFrame(_id, captureFrame); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 213 | } |
| 214 | |
| 215 | return 0; |
| 216 | } |
| 217 | |
| 218 | WebRtc_Word32 VideoCaptureImpl::DeliverEncodedCapturedFrame( |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 219 | VideoFrame& captureFrame, WebRtc_Word64 capture_time) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 220 | UpdateFrameCount(); // frame count used for local frame rate callback. |
| 221 | |
| 222 | const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay; |
| 223 | // Capture delay changed |
| 224 | if (_setCaptureDelay != _captureDelay) { |
| 225 | _setCaptureDelay = _captureDelay; |
| 226 | } |
| 227 | |
| 228 | // Set the capture time |
| 229 | if (capture_time != 0) { |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 230 | captureFrame.SetRenderTime(capture_time); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 231 | } |
| 232 | else { |
| 233 | captureFrame.SetRenderTime(TickTime::MillisecondTimestamp()); |
| 234 | } |
| 235 | |
| 236 | if (captureFrame.RenderTimeMs() == last_capture_time_) { |
| 237 | // We don't allow the same capture time for two frames, drop this one. |
| 238 | return -1; |
| 239 | } |
| 240 | last_capture_time_ = captureFrame.RenderTimeMs(); |
| 241 | |
| 242 | if (_dataCallBack) { |
| 243 | if (callOnCaptureDelayChanged) { |
| 244 | _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay); |
| 245 | } |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 246 | _dataCallBack->OnIncomingCapturedEncodedFrame(_id, captureFrame); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 247 | } |
| 248 | |
| 249 | return 0; |
| 250 | } |
| 251 | |
| 252 | WebRtc_Word32 VideoCaptureImpl::IncomingFrame( |
| 253 | WebRtc_UWord8* videoFrame, |
| 254 | WebRtc_Word32 videoFrameLength, |
| 255 | const VideoCaptureCapability& frameInfo, |
| 256 | WebRtc_Word64 captureTime/*=0*/) |
| 257 | { |
| 258 | WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCapture, _id, |
| 259 | "IncomingFrame width %d, height %d", (int) frameInfo.width, |
| 260 | (int) frameInfo.height); |
| 261 | |
| 262 | TickTime startProcessTime = TickTime::Now(); |
| 263 | |
| 264 | CriticalSectionScoped cs(&_callBackCs); |
| 265 | |
| 266 | const WebRtc_Word32 width = frameInfo.width; |
| 267 | const WebRtc_Word32 height = frameInfo.height; |
| 268 | |
| 269 | if (frameInfo.codecType == kVideoCodecUnknown) |
| 270 | { |
| 271 | // Not encoded, convert to I420. |
| 272 | const VideoType commonVideoType = |
| 273 | RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); |
| 274 | |
| 275 | if (frameInfo.rawType != kVideoMJPEG && |
| 276 | CalcBufferSize(commonVideoType, width, |
| 277 | abs(height)) != videoFrameLength) |
| 278 | { |
| 279 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 280 | "Wrong incoming frame length."); |
| 281 | return -1; |
| 282 | } |
| 283 | |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 284 | // Setting absolute height (in case it was negative). |
| 285 | // In Windows, the image starts bottom left, instead of top left. |
| 286 | // Setting a negative source height, inverts the image (within LibYuv). |
mikhal@webrtc.org | b0d59f1 | 2012-10-30 19:19:32 +0000 | [diff] [blame] | 287 | int stride_y = 0; |
| 288 | int stride_uv = 0; |
| 289 | Calc16ByteAlignedStride(width, &stride_y, &stride_uv); |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 290 | int ret = _captureFrame.CreateEmptyFrame(width, abs(height), |
mikhal@webrtc.org | b0d59f1 | 2012-10-30 19:19:32 +0000 | [diff] [blame] | 291 | stride_y, |
| 292 | stride_uv, stride_uv); |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 293 | if (ret < 0) |
| 294 | { |
| 295 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 296 | "Failed to allocate I420 frame."); |
| 297 | return -1; |
| 298 | } |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 299 | const int conversionResult = ConvertToI420(commonVideoType, |
| 300 | videoFrame, |
| 301 | 0, 0, // No cropping |
| 302 | width, height, |
| 303 | videoFrameLength, |
| 304 | _rotateFrame, |
| 305 | &_captureFrame); |
| 306 | if (conversionResult < 0) |
| 307 | { |
| 308 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 309 | "Failed to convert capture frame from type %d to I420", |
| 310 | frameInfo.rawType); |
| 311 | return -1; |
| 312 | } |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 313 | DeliverCapturedFrame(_captureFrame, captureTime); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 314 | } |
| 315 | else // Encoded format |
| 316 | { |
| 317 | if (_capture_encoded_frame.CopyFrame(videoFrameLength, videoFrame) != 0) |
| 318 | { |
| 319 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 320 | "Failed to copy captured frame of length %d", |
| 321 | static_cast<int>(videoFrameLength)); |
| 322 | } |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 323 | DeliverEncodedCapturedFrame(_capture_encoded_frame, captureTime); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 324 | } |
| 325 | |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 326 | const WebRtc_UWord32 processTime = |
| 327 | (WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds(); |
| 328 | if (processTime > 10) // If the process time is too long MJPG will not work well. |
| 329 | { |
| 330 | WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, |
| 331 | "Too long processing time of Incoming frame: %ums", |
| 332 | (unsigned int) processTime); |
| 333 | } |
| 334 | |
| 335 | return 0; |
| 336 | } |
| 337 | |
| 338 | WebRtc_Word32 VideoCaptureImpl::IncomingFrameI420( |
| 339 | const VideoFrameI420& video_frame, WebRtc_Word64 captureTime) { |
| 340 | |
| 341 | CriticalSectionScoped cs(&_callBackCs); |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 342 | int size_y = video_frame.height * video_frame.y_pitch; |
| 343 | int size_u = video_frame.u_pitch * (video_frame.height + 1) / 2; |
| 344 | int size_v = video_frame.v_pitch * (video_frame.height + 1) / 2; |
| 345 | // TODO(mikhal): Can we use Swap here? This will do a memcpy. |
| 346 | int ret = _captureFrame.CreateFrame(size_y, video_frame.y_plane, |
| 347 | size_u, video_frame.u_plane, |
| 348 | size_v, video_frame.v_plane, |
| 349 | video_frame.width, video_frame.height, |
| 350 | video_frame.y_pitch, video_frame.u_pitch, |
| 351 | video_frame.v_pitch); |
| 352 | if (ret < 0) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 353 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 354 | "Failed to create I420VideoFrame"); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 355 | return -1; |
| 356 | } |
| 357 | |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 358 | DeliverCapturedFrame(_captureFrame, captureTime); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 359 | |
| 360 | return 0; |
| 361 | } |
| 362 | |
| 363 | WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) |
| 364 | { |
| 365 | CriticalSectionScoped cs(&_apiCs); |
| 366 | CriticalSectionScoped cs2(&_callBackCs); |
| 367 | switch (rotation) |
| 368 | { |
| 369 | case kCameraRotate0: |
| 370 | _rotateFrame = kRotateNone; |
| 371 | break; |
| 372 | case kCameraRotate90: |
| 373 | _rotateFrame = kRotate90; |
| 374 | break; |
| 375 | case kCameraRotate180: |
| 376 | _rotateFrame = kRotate180; |
| 377 | break; |
| 378 | case kCameraRotate270: |
| 379 | _rotateFrame = kRotate270; |
| 380 | break; |
| 381 | } |
| 382 | return 0; |
| 383 | } |
| 384 | |
| 385 | WebRtc_Word32 VideoCaptureImpl::EnableFrameRateCallback(const bool enable) |
| 386 | { |
| 387 | CriticalSectionScoped cs(&_apiCs); |
| 388 | CriticalSectionScoped cs2(&_callBackCs); |
| 389 | _frameRateCallBack = enable; |
| 390 | if (enable) |
| 391 | { |
| 392 | _lastFrameRateCallbackTime = TickTime::Now(); |
| 393 | } |
| 394 | return 0; |
| 395 | } |
| 396 | |
| 397 | WebRtc_Word32 VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) |
| 398 | { |
| 399 | CriticalSectionScoped cs(&_apiCs); |
| 400 | CriticalSectionScoped cs2(&_callBackCs); |
| 401 | _noPictureAlarmCallBack = enable; |
| 402 | return 0; |
| 403 | } |
| 404 | |
| 405 | void VideoCaptureImpl::UpdateFrameCount() |
| 406 | { |
| 407 | if (_incomingFrameTimes[0].MicrosecondTimestamp() == 0) |
| 408 | { |
| 409 | // first no shift |
| 410 | } |
| 411 | else |
| 412 | { |
| 413 | // shift |
| 414 | for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--) |
| 415 | { |
| 416 | _incomingFrameTimes[i + 1] = _incomingFrameTimes[i]; |
| 417 | } |
| 418 | } |
| 419 | _incomingFrameTimes[0] = TickTime::Now(); |
| 420 | } |
| 421 | |
| 422 | WebRtc_UWord32 VideoCaptureImpl::CalculateFrameRate(const TickTime& now) |
| 423 | { |
| 424 | WebRtc_Word32 num = 0; |
| 425 | WebRtc_Word32 nrOfFrames = 0; |
| 426 | for (num = 1; num < (kFrameRateCountHistorySize - 1); num++) |
| 427 | { |
| 428 | if (_incomingFrameTimes[num].Ticks() <= 0 |
| 429 | || (now - _incomingFrameTimes[num]).Milliseconds() > kFrameRateHistoryWindowMs) // don't use data older than 2sec |
| 430 | { |
| 431 | break; |
| 432 | } |
| 433 | else |
| 434 | { |
| 435 | nrOfFrames++; |
| 436 | } |
| 437 | } |
| 438 | if (num > 1) |
| 439 | { |
| 440 | WebRtc_Word64 diff = (now - _incomingFrameTimes[num - 1]).Milliseconds(); |
| 441 | if (diff > 0) |
| 442 | { |
| 443 | return WebRtc_UWord32((nrOfFrames * 1000.0f / diff) + 0.5f); |
| 444 | } |
| 445 | } |
| 446 | |
| 447 | return nrOfFrames; |
| 448 | } |
| 449 | } // namespace videocapturemodule |
| 450 | } // namespace webrtc |