andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include <stdio.h> |
| 12 | |
andrew@webrtc.org | 12e6574 | 2012-10-22 21:51:58 +0000 | [diff] [blame] | 13 | #include "gtest/gtest.h" |
| 14 | #include "modules/utility/interface/process_thread.h" |
andrew@webrtc.org | 5f6856f | 2012-10-30 21:58:00 +0000 | [diff] [blame^] | 15 | #include "webrtc/modules/video_capture/include/video_capture.h" |
| 16 | #include "webrtc/modules/video_capture/include/video_capture_factory.h" |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 17 | #include "common_video/interface/i420_video_frame.h" |
| 18 | #include "common_video/libyuv/include/webrtc_libyuv.h" |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 19 | #include "system_wrappers/interface/critical_section_wrapper.h" |
| 20 | #include "system_wrappers/interface/scoped_ptr.h" |
| 21 | #include "system_wrappers/interface/scoped_refptr.h" |
| 22 | #include "system_wrappers/interface/sleep.h" |
| 23 | #include "system_wrappers/interface/tick_util.h" |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 24 | |
| 25 | using webrtc::CriticalSectionWrapper; |
| 26 | using webrtc::CriticalSectionScoped; |
| 27 | using webrtc::scoped_ptr; |
| 28 | using webrtc::SleepMs; |
| 29 | using webrtc::TickTime; |
| 30 | using webrtc::VideoCaptureAlarm; |
| 31 | using webrtc::VideoCaptureCapability; |
| 32 | using webrtc::VideoCaptureDataCallback; |
| 33 | using webrtc::VideoCaptureFactory; |
| 34 | using webrtc::VideoCaptureFeedBack; |
| 35 | using webrtc::VideoCaptureModule; |
| 36 | |
| 37 | |
| 38 | #define WAIT_(ex, timeout, res) \ |
| 39 | do { \ |
| 40 | res = (ex); \ |
| 41 | WebRtc_Word64 start = TickTime::MillisecondTimestamp(); \ |
| 42 | while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \ |
| 43 | SleepMs(5); \ |
| 44 | res = (ex); \ |
| 45 | } \ |
| 46 | } while (0);\ |
| 47 | |
| 48 | #define EXPECT_TRUE_WAIT(ex, timeout) \ |
| 49 | do { \ |
| 50 | bool res; \ |
| 51 | WAIT_(ex, timeout, res); \ |
| 52 | if (!res) EXPECT_TRUE(ex); \ |
| 53 | } while (0); |
| 54 | |
| 55 | |
| 56 | static const int kTimeOut = 5000; |
| 57 | static const int kTestHeight = 288; |
| 58 | static const int kTestWidth = 352; |
| 59 | static const int kTestFramerate = 30; |
| 60 | |
| 61 | // Compares the content of two video frames. |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 62 | static bool CompareFrames(const webrtc::I420VideoFrame& frame1, |
| 63 | const webrtc::I420VideoFrame& frame2) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 64 | bool result = |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 65 | (frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) && |
| 66 | (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) && |
| 67 | (frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) && |
| 68 | (frame1.width() == frame2.width()) && |
| 69 | (frame1.height() == frame2.height()); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 70 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 71 | if (!result) |
| 72 | return false; |
| 73 | for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) { |
| 74 | webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane); |
| 75 | int allocated_size1 = frame1.allocated_size(plane_type); |
| 76 | int allocated_size2 = frame2.allocated_size(plane_type); |
| 77 | if (allocated_size1 != allocated_size2) |
| 78 | return false; |
| 79 | const uint8_t* plane_buffer1 = frame1.buffer(plane_type); |
| 80 | const uint8_t* plane_buffer2 = frame2.buffer(plane_type); |
| 81 | if (memcmp(plane_buffer1, plane_buffer2, allocated_size1)) |
| 82 | return false; |
| 83 | } |
| 84 | return true; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 85 | } |
| 86 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 87 | // Compares the content of a I420 frame in planar form and the new video frame. |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 88 | static bool CompareFrames(const webrtc::VideoFrameI420& frame1, |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 89 | const webrtc::I420VideoFrame& frame2) { |
| 90 | if (frame1.width != frame2.width() || |
| 91 | frame1.height != frame2.height()) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 92 | return false; |
| 93 | } |
| 94 | |
| 95 | // Compare Y |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 96 | const unsigned char* y_plane = frame1.y_plane; |
| 97 | const unsigned char* y_plane2 = frame2.buffer(webrtc::kYPlane); |
| 98 | for (int i = 0; i < frame2.height(); ++i) { |
| 99 | for (int j = 0; j < frame2.width(); ++j) { |
| 100 | if (*y_plane != *y_plane2) |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 101 | return false; |
| 102 | ++y_plane; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 103 | ++y_plane2; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 104 | } |
| 105 | y_plane += frame1.y_pitch - frame1.width; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 106 | y_plane2 += frame2.stride(webrtc::kYPlane) - frame2.width(); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | // Compare U |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 110 | const unsigned char* u_plane = frame1.u_plane; |
| 111 | const unsigned char* u_plane2 = frame2.buffer(webrtc::kUPlane); |
| 112 | for (int i = 0; i < (frame2.height() + 1) / 2; ++i) { |
| 113 | for (int j = 0; j < (frame2.width() + 1) / 2; ++j) { |
| 114 | if (*u_plane != *u_plane2) |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 115 | return false; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 116 | ++u_plane; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 117 | ++u_plane2; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 118 | } |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 119 | u_plane += frame1.u_pitch - (frame1.width + 1) / 2; |
| 120 | u_plane2+= frame2.stride(webrtc::kUPlane) - (frame2.width() + 1) / 2; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 121 | } |
| 122 | |
| 123 | // Compare V |
| 124 | unsigned char* v_plane = frame1.v_plane; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 125 | const unsigned char* v_plane2 = frame2.buffer(webrtc::kVPlane); |
| 126 | for (int i = 0; i < frame2.height() /2; ++i) { |
| 127 | for (int j = 0; j < frame2.width() /2; ++j) { |
| 128 | if (*u_plane != *u_plane2) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 129 | return false; |
| 130 | } |
| 131 | ++v_plane; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 132 | ++v_plane2; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 133 | } |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 134 | v_plane += frame1.v_pitch - (frame1.width + 1) / 2; |
| 135 | u_plane2+= frame2.stride(webrtc::kVPlane) - (frame2.width() + 1) / 2; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 136 | } |
| 137 | return true; |
| 138 | } |
| 139 | |
| 140 | |
| 141 | class TestVideoCaptureCallback : public VideoCaptureDataCallback { |
| 142 | public: |
| 143 | TestVideoCaptureCallback() |
| 144 | : capture_cs_(CriticalSectionWrapper::CreateCriticalSection()), |
| 145 | capture_delay_(0), |
| 146 | last_render_time_ms_(0), |
| 147 | incoming_frames_(0), |
| 148 | timing_warnings_(0) { |
| 149 | } |
| 150 | |
| 151 | ~TestVideoCaptureCallback() { |
| 152 | if (timing_warnings_ > 0) |
| 153 | printf("No of timing warnings %d\n", timing_warnings_); |
| 154 | } |
| 155 | |
| 156 | virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id, |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 157 | webrtc::I420VideoFrame& videoFrame) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 158 | CriticalSectionScoped cs(capture_cs_.get()); |
| 159 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 160 | int height = videoFrame.height(); |
| 161 | int width = videoFrame.width(); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 162 | EXPECT_EQ(height, capability_.height); |
| 163 | EXPECT_EQ(width, capability_.width); |
| 164 | // RenderTimstamp should be the time now. |
| 165 | EXPECT_TRUE( |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 166 | videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 && |
| 167 | videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp()); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 168 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 169 | if ((videoFrame.render_time_ms() > |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 170 | last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS && |
| 171 | last_render_time_ms_ > 0) || |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 172 | (videoFrame.render_time_ms() < |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 173 | last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS && |
| 174 | last_render_time_ms_ > 0)) { |
| 175 | timing_warnings_++; |
| 176 | } |
| 177 | |
| 178 | incoming_frames_++; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 179 | last_render_time_ms_ = videoFrame.render_time_ms(); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 180 | last_frame_.CopyFrame(videoFrame); |
| 181 | } |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 182 | virtual void OnIncomingCapturedEncodedFrame(const WebRtc_Word32 id, |
mikhal@webrtc.org | eb4840f | 2012-10-29 15:59:40 +0000 | [diff] [blame] | 183 | webrtc::VideoFrame& videoFrame) |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 184 | { |
| 185 | assert(!"NOTIMPLEMENTED"); |
| 186 | } |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 187 | |
| 188 | virtual void OnCaptureDelayChanged(const WebRtc_Word32 id, |
| 189 | const WebRtc_Word32 delay) { |
| 190 | CriticalSectionScoped cs(capture_cs_.get()); |
| 191 | capture_delay_ = delay; |
| 192 | } |
| 193 | |
| 194 | void SetExpectedCapability(VideoCaptureCapability capability) { |
| 195 | CriticalSectionScoped cs(capture_cs_.get()); |
| 196 | capability_= capability; |
| 197 | incoming_frames_ = 0; |
| 198 | last_render_time_ms_ = 0; |
| 199 | capture_delay_ = 0; |
| 200 | } |
| 201 | int incoming_frames() { |
| 202 | CriticalSectionScoped cs(capture_cs_.get()); |
| 203 | return incoming_frames_; |
| 204 | } |
| 205 | |
| 206 | int capture_delay() { |
| 207 | CriticalSectionScoped cs(capture_cs_.get()); |
| 208 | return capture_delay_; |
| 209 | } |
| 210 | int timing_warnings() { |
| 211 | CriticalSectionScoped cs(capture_cs_.get()); |
| 212 | return timing_warnings_; |
| 213 | } |
| 214 | VideoCaptureCapability capability() { |
| 215 | CriticalSectionScoped cs(capture_cs_.get()); |
| 216 | return capability_; |
| 217 | } |
| 218 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 219 | bool CompareLastFrame(const webrtc::I420VideoFrame& frame) { |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 220 | CriticalSectionScoped cs(capture_cs_.get()); |
| 221 | return CompareFrames(last_frame_, frame); |
| 222 | } |
| 223 | |
| 224 | bool CompareLastFrame(const webrtc::VideoFrameI420& frame) { |
| 225 | CriticalSectionScoped cs(capture_cs_.get()); |
| 226 | return CompareFrames(frame, last_frame_); |
| 227 | } |
| 228 | |
| 229 | private: |
| 230 | scoped_ptr<CriticalSectionWrapper> capture_cs_; |
| 231 | VideoCaptureCapability capability_; |
| 232 | int capture_delay_; |
| 233 | WebRtc_Word64 last_render_time_ms_; |
| 234 | int incoming_frames_; |
| 235 | int timing_warnings_; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 236 | webrtc::I420VideoFrame last_frame_; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 237 | }; |
| 238 | |
| 239 | class TestVideoCaptureFeedBack : public VideoCaptureFeedBack { |
| 240 | public: |
| 241 | TestVideoCaptureFeedBack() : |
| 242 | capture_cs_(CriticalSectionWrapper::CreateCriticalSection()), |
| 243 | frame_rate_(0), |
| 244 | alarm_(webrtc::Cleared) { |
| 245 | } |
| 246 | |
| 247 | virtual void OnCaptureFrameRate(const WebRtc_Word32 id, |
| 248 | const WebRtc_UWord32 frameRate) { |
| 249 | CriticalSectionScoped cs(capture_cs_.get()); |
| 250 | frame_rate_ = frameRate; |
| 251 | } |
| 252 | |
| 253 | virtual void OnNoPictureAlarm(const WebRtc_Word32 id, |
| 254 | const VideoCaptureAlarm reported_alarm) { |
| 255 | CriticalSectionScoped cs(capture_cs_.get()); |
| 256 | alarm_ = reported_alarm; |
| 257 | } |
| 258 | int frame_rate() { |
| 259 | CriticalSectionScoped cs(capture_cs_.get()); |
| 260 | return frame_rate_; |
| 261 | |
| 262 | } |
| 263 | VideoCaptureAlarm alarm() { |
| 264 | CriticalSectionScoped cs(capture_cs_.get()); |
| 265 | return alarm_; |
| 266 | } |
| 267 | |
| 268 | private: |
| 269 | scoped_ptr<CriticalSectionWrapper> capture_cs_; |
| 270 | unsigned int frame_rate_; |
| 271 | VideoCaptureAlarm alarm_; |
| 272 | }; |
| 273 | |
| 274 | class VideoCaptureTest : public testing::Test { |
| 275 | public: |
| 276 | VideoCaptureTest() : number_of_devices_(0) {} |
| 277 | |
| 278 | void SetUp() { |
| 279 | device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(5)); |
| 280 | number_of_devices_ = device_info_->NumberOfDevices(); |
| 281 | ASSERT_GT(number_of_devices_, 0u); |
| 282 | } |
| 283 | |
| 284 | webrtc::scoped_refptr<VideoCaptureModule> OpenVideoCaptureDevice( |
| 285 | unsigned int device, |
| 286 | VideoCaptureDataCallback* callback) { |
| 287 | char device_name[256]; |
| 288 | char unique_name[256]; |
| 289 | |
| 290 | EXPECT_EQ(0, device_info_->GetDeviceName( |
| 291 | device, device_name, 256, unique_name, 256)); |
| 292 | |
| 293 | webrtc::scoped_refptr<VideoCaptureModule> module( |
| 294 | VideoCaptureFactory::Create(device, unique_name)); |
| 295 | if (module.get() == NULL) |
| 296 | return NULL; |
| 297 | |
| 298 | EXPECT_FALSE(module->CaptureStarted()); |
| 299 | |
| 300 | EXPECT_EQ(0, module->RegisterCaptureDataCallback(*callback)); |
| 301 | return module; |
| 302 | } |
| 303 | |
| 304 | void StartCapture(VideoCaptureModule* capture_module, |
| 305 | VideoCaptureCapability capability) { |
| 306 | EXPECT_EQ(0, capture_module->StartCapture(capability)); |
| 307 | EXPECT_TRUE(capture_module->CaptureStarted()); |
| 308 | |
| 309 | VideoCaptureCapability resulting_capability; |
| 310 | EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability)); |
| 311 | EXPECT_EQ(capability.width, resulting_capability.width); |
| 312 | EXPECT_EQ(capability.height, resulting_capability.height); |
| 313 | } |
| 314 | |
| 315 | scoped_ptr<VideoCaptureModule::DeviceInfo> device_info_; |
| 316 | unsigned int number_of_devices_; |
| 317 | }; |
| 318 | |
| 319 | TEST_F(VideoCaptureTest, CreateDelete) { |
| 320 | for (int i = 0; i < 5; ++i) { |
| 321 | WebRtc_Word64 start_time = TickTime::MillisecondTimestamp(); |
| 322 | TestVideoCaptureCallback capture_observer; |
| 323 | webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice( |
| 324 | 0, &capture_observer)); |
| 325 | ASSERT_TRUE(module.get() != NULL); |
| 326 | |
| 327 | VideoCaptureCapability capability; |
| 328 | #ifndef WEBRTC_MAC |
| 329 | device_info_->GetCapability(module->CurrentDeviceName(), 0, capability); |
| 330 | #else |
| 331 | capability.width = kTestWidth; |
| 332 | capability.height = kTestHeight; |
| 333 | capability.maxFPS = kTestFramerate; |
| 334 | capability.rawType = webrtc::kVideoUnknown; |
| 335 | #endif |
| 336 | capture_observer.SetExpectedCapability(capability); |
| 337 | StartCapture(module.get(), capability); |
| 338 | |
| 339 | // Less than 4s to start the camera. |
| 340 | EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000); |
| 341 | |
| 342 | // Make sure 5 frames are captured. |
| 343 | EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut); |
| 344 | |
| 345 | EXPECT_GT(capture_observer.capture_delay(), 0); |
| 346 | |
| 347 | WebRtc_Word64 stop_time = TickTime::MillisecondTimestamp(); |
| 348 | EXPECT_EQ(0, module->StopCapture()); |
| 349 | EXPECT_FALSE(module->CaptureStarted()); |
| 350 | |
| 351 | // Less than 3s to stop the camera. |
| 352 | EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000); |
| 353 | } |
| 354 | } |
| 355 | |
| 356 | TEST_F(VideoCaptureTest, Capabilities) { |
| 357 | #ifdef WEBRTC_MAC |
| 358 | printf("Video capture capabilities are not supported on Mac.\n"); |
| 359 | return; |
| 360 | #endif |
| 361 | |
| 362 | TestVideoCaptureCallback capture_observer; |
| 363 | |
| 364 | webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice( |
| 365 | 0, &capture_observer)); |
| 366 | ASSERT_TRUE(module.get() != NULL); |
| 367 | |
| 368 | int number_of_capabilities = device_info_->NumberOfCapabilities( |
| 369 | module->CurrentDeviceName()); |
| 370 | EXPECT_GT(number_of_capabilities, 0); |
| 371 | for (int i = 0; i < number_of_capabilities; ++i) { |
| 372 | VideoCaptureCapability capability; |
| 373 | EXPECT_EQ(0, device_info_->GetCapability(module->CurrentDeviceName(), i, |
| 374 | capability)); |
| 375 | capture_observer.SetExpectedCapability(capability); |
| 376 | StartCapture(module.get(), capability); |
| 377 | // Make sure 5 frames are captured. |
| 378 | EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut); |
| 379 | |
| 380 | EXPECT_EQ(0, module->StopCapture()); |
| 381 | } |
| 382 | } |
| 383 | |
| 384 | // NOTE: flaky, crashes sometimes. |
| 385 | // http://code.google.com/p/webrtc/issues/detail?id=777 |
| 386 | TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { |
| 387 | if (number_of_devices_ < 2) { |
| 388 | printf("There are not two cameras available. Aborting test. \n"); |
| 389 | return; |
| 390 | } |
| 391 | |
| 392 | TestVideoCaptureCallback capture_observer1; |
| 393 | webrtc::scoped_refptr<VideoCaptureModule> module1(OpenVideoCaptureDevice( |
| 394 | 0, &capture_observer1)); |
| 395 | ASSERT_TRUE(module1.get() != NULL); |
| 396 | VideoCaptureCapability capability1; |
| 397 | #ifndef WEBRTC_MAC |
| 398 | device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability1); |
| 399 | #else |
| 400 | capability1.width = kTestWidth; |
| 401 | capability1.height = kTestHeight; |
| 402 | capability1.maxFPS = kTestFramerate; |
| 403 | capability1.rawType = webrtc::kVideoUnknown; |
| 404 | #endif |
| 405 | capture_observer1.SetExpectedCapability(capability1); |
| 406 | |
| 407 | TestVideoCaptureCallback capture_observer2; |
| 408 | webrtc::scoped_refptr<VideoCaptureModule> module2(OpenVideoCaptureDevice( |
| 409 | 1, &capture_observer2)); |
| 410 | ASSERT_TRUE(module1.get() != NULL); |
| 411 | |
| 412 | |
| 413 | VideoCaptureCapability capability2; |
| 414 | #ifndef WEBRTC_MAC |
| 415 | device_info_->GetCapability(module2->CurrentDeviceName(), 0, capability2); |
| 416 | #else |
| 417 | capability2.width = kTestWidth; |
| 418 | capability2.height = kTestHeight; |
| 419 | capability2.maxFPS = kTestFramerate; |
| 420 | capability2.rawType = webrtc::kVideoUnknown; |
| 421 | #endif |
| 422 | capture_observer2.SetExpectedCapability(capability2); |
| 423 | |
| 424 | StartCapture(module1.get(), capability1); |
| 425 | StartCapture(module2.get(), capability2); |
| 426 | EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut); |
| 427 | EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut); |
| 428 | } |
| 429 | |
| 430 | // Test class for testing external capture and capture feedback information |
| 431 | // such as frame rate and picture alarm. |
| 432 | class VideoCaptureExternalTest : public testing::Test { |
| 433 | public: |
| 434 | void SetUp() { |
| 435 | capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_); |
| 436 | process_module_ = webrtc::ProcessThread::CreateProcessThread(); |
| 437 | process_module_->Start(); |
| 438 | process_module_->RegisterModule(capture_module_); |
| 439 | |
| 440 | VideoCaptureCapability capability; |
| 441 | capability.width = kTestWidth; |
| 442 | capability.height = kTestHeight; |
| 443 | capability.rawType = webrtc::kVideoYV12; |
| 444 | capability.maxFPS = kTestFramerate; |
| 445 | capture_callback_.SetExpectedCapability(capability); |
| 446 | |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 447 | test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth, |
| 448 | ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 449 | SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp. |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 450 | memset(test_frame_.buffer(webrtc::kYPlane), 127, kTestWidth * kTestHeight); |
| 451 | memset(test_frame_.buffer(webrtc::kUPlane), 127, |
| 452 | ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2)); |
| 453 | memset(test_frame_.buffer(webrtc::kVPlane), 127, |
| 454 | ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2)); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 455 | |
| 456 | EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback( |
| 457 | capture_callback_)); |
| 458 | EXPECT_EQ(0, capture_module_->RegisterCaptureCallback(capture_feedback_)); |
| 459 | EXPECT_EQ(0, capture_module_->EnableFrameRateCallback(true)); |
| 460 | EXPECT_EQ(0, capture_module_->EnableNoPictureAlarm(true)); |
| 461 | } |
| 462 | |
| 463 | void TearDown() { |
| 464 | process_module_->Stop(); |
| 465 | webrtc::ProcessThread::DestroyProcessThread(process_module_); |
| 466 | } |
| 467 | |
| 468 | webrtc::VideoCaptureExternal* capture_input_interface_; |
| 469 | webrtc::scoped_refptr<VideoCaptureModule> capture_module_; |
| 470 | webrtc::ProcessThread* process_module_; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 471 | webrtc::I420VideoFrame test_frame_; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 472 | TestVideoCaptureCallback capture_callback_; |
| 473 | TestVideoCaptureFeedBack capture_feedback_; |
| 474 | }; |
| 475 | |
| 476 | // Test input of external video frames. |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 477 | TEST_F(VideoCaptureExternalTest, TestExternalCapture) { |
| 478 | unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, |
| 479 | test_frame_.width(), |
| 480 | test_frame_.height()); |
| 481 | webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]); |
| 482 | webrtc::ExtractBuffer(test_frame_, length, test_buffer.get()); |
| 483 | EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), |
| 484 | length, capture_callback_.capability(), 0)); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 485 | EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); |
| 486 | } |
| 487 | |
| 488 | // Test input of planar I420 frames. |
| 489 | // NOTE: flaky, sometimes fails on the last CompareLastFrame. |
| 490 | // http://code.google.com/p/webrtc/issues/detail?id=777 |
| 491 | TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) { |
| 492 | webrtc::VideoFrameI420 frame_i420; |
| 493 | frame_i420.width = kTestWidth; |
| 494 | frame_i420.height = kTestHeight; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 495 | frame_i420.y_plane = test_frame_.buffer(webrtc::kYPlane); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 496 | frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight); |
| 497 | frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2); |
| 498 | frame_i420.y_pitch = kTestWidth; |
| 499 | frame_i420.u_pitch = kTestWidth / 2; |
| 500 | frame_i420.v_pitch = kTestWidth / 2; |
| 501 | EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0)); |
| 502 | EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420)); |
| 503 | |
| 504 | // Test with a frame with pitch not equal to width |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 505 | memset(test_frame_.buffer(webrtc::kYPlane), 0xAA, |
| 506 | test_frame_.allocated_size(webrtc::kYPlane)); |
| 507 | memset(test_frame_.buffer(webrtc::kUPlane), 0xAA, |
| 508 | test_frame_.allocated_size(webrtc::kUPlane)); |
| 509 | memset(test_frame_.buffer(webrtc::kVPlane), 0xAA, |
| 510 | test_frame_.allocated_size(webrtc::kVPlane)); |
| 511 | webrtc::I420VideoFrame aligned_test_frame; |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 512 | int y_pitch = kTestWidth + 2; |
| 513 | int u_pitch = kTestWidth / 2 + 1; |
| 514 | int v_pitch = u_pitch; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 515 | aligned_test_frame.CreateEmptyFrame(kTestWidth, kTestHeight, |
| 516 | y_pitch, u_pitch, v_pitch); |
| 517 | memset(aligned_test_frame.buffer(webrtc::kYPlane), 0, |
| 518 | kTestWidth * kTestHeight); |
| 519 | memset(aligned_test_frame.buffer(webrtc::kUPlane), 0, |
| 520 | (kTestWidth + 1) / 2 * (kTestHeight + 1) / 2); |
| 521 | memset(aligned_test_frame.buffer(webrtc::kVPlane), 0, |
| 522 | (kTestWidth + 1) / 2 * (kTestHeight + 1) / 2); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 523 | // Copy the test_frame_ to aligned_test_frame. |
| 524 | int y_width = kTestWidth; |
| 525 | int uv_width = kTestWidth / 2; |
| 526 | int y_rows = kTestHeight; |
| 527 | int uv_rows = kTestHeight / 2; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 528 | unsigned char* y_plane = test_frame_.buffer(webrtc::kYPlane); |
| 529 | unsigned char* u_plane = test_frame_.buffer(webrtc::kUPlane); |
| 530 | unsigned char* v_plane = test_frame_.buffer(webrtc::kVPlane); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 531 | // Copy Y |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 532 | unsigned char* current_pointer = aligned_test_frame.buffer(webrtc::kYPlane); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 533 | for (int i = 0; i < y_rows; ++i) { |
| 534 | memcpy(current_pointer, y_plane, y_width); |
| 535 | // Remove the alignment which ViE doesn't support. |
| 536 | current_pointer += y_pitch; |
| 537 | y_plane += y_width; |
| 538 | } |
| 539 | // Copy U |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 540 | current_pointer = aligned_test_frame.buffer(webrtc::kUPlane); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 541 | for (int i = 0; i < uv_rows; ++i) { |
| 542 | memcpy(current_pointer, u_plane, uv_width); |
| 543 | // Remove the alignment which ViE doesn't support. |
| 544 | current_pointer += u_pitch; |
| 545 | u_plane += uv_width; |
| 546 | } |
| 547 | // Copy V |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 548 | current_pointer = aligned_test_frame.buffer(webrtc::kVPlane); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 549 | for (int i = 0; i < uv_rows; ++i) { |
| 550 | memcpy(current_pointer, v_plane, uv_width); |
| 551 | // Remove the alignment which ViE doesn't support. |
| 552 | current_pointer += v_pitch; |
| 553 | v_plane += uv_width; |
| 554 | } |
| 555 | frame_i420.width = kTestWidth; |
| 556 | frame_i420.height = kTestHeight; |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 557 | frame_i420.y_plane = aligned_test_frame.buffer(webrtc::kYPlane); |
| 558 | frame_i420.u_plane = aligned_test_frame.buffer(webrtc::kYPlane); |
| 559 | frame_i420.v_plane = aligned_test_frame.buffer(webrtc::kVPlane); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 560 | frame_i420.y_pitch = y_pitch; |
| 561 | frame_i420.u_pitch = u_pitch; |
| 562 | frame_i420.v_pitch = v_pitch; |
| 563 | |
| 564 | EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0)); |
| 565 | EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_)); |
| 566 | } |
| 567 | |
| 568 | // Test frame rate and no picture alarm. |
| 569 | TEST_F(VideoCaptureExternalTest , FrameRate) { |
| 570 | WebRtc_Word64 testTime = 3; |
| 571 | TickTime startTime = TickTime::Now(); |
| 572 | |
| 573 | while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) { |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 574 | unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, |
| 575 | test_frame_.width(), |
| 576 | test_frame_.height()); |
| 577 | webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]); |
| 578 | webrtc::ExtractBuffer(test_frame_, length, test_buffer.get()); |
| 579 | EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), |
| 580 | length, capture_callback_.capability(), 0)); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 581 | SleepMs(100); |
| 582 | } |
| 583 | EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 && |
| 584 | capture_feedback_.frame_rate() <= 10); |
| 585 | SleepMs(500); |
| 586 | EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm()); |
| 587 | |
| 588 | startTime = TickTime::Now(); |
| 589 | while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) { |
mikhal@webrtc.org | dc7e6cf | 2012-10-24 18:33:04 +0000 | [diff] [blame] | 590 | unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, |
| 591 | test_frame_.width(), |
| 592 | test_frame_.height()); |
| 593 | webrtc::scoped_array<uint8_t> test_buffer(new uint8_t[length]); |
| 594 | webrtc::ExtractBuffer(test_frame_, length, test_buffer.get()); |
| 595 | EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(), |
| 596 | length, capture_callback_.capability(), 0)); |
andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 597 | SleepMs(1000 / 30); |
| 598 | } |
| 599 | EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm()); |
| 600 | // Frame rate might be less than 33 since we have paused providing |
| 601 | // frames for a while. |
| 602 | EXPECT_TRUE(capture_feedback_.frame_rate() >= 25 && |
| 603 | capture_feedback_.frame_rate() <= 33); |
| 604 | } |