wuchengli@chromium.org | 1bdf186 | 2014-05-28 07:00:51 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | // This file includes unit tests for ViECapturer. |
| 12 | |
| 13 | #include "webrtc/video_engine/vie_capturer.h" |
| 14 | |
| 15 | #include <vector> |
| 16 | |
| 17 | #include "testing/gmock/include/gmock/gmock.h" |
| 18 | #include "testing/gtest/include/gtest/gtest.h" |
| 19 | #include "webrtc/common.h" |
| 20 | #include "webrtc/common_video/interface/native_handle.h" |
| 21 | #include "webrtc/common_video/interface/texture_video_frame.h" |
| 22 | #include "webrtc/modules/utility/interface/mock/mock_process_thread.h" |
| 23 | #include "webrtc/modules/video_capture/include/mock/mock_video_capture.h" |
| 24 | #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" |
| 25 | #include "webrtc/system_wrappers/interface/event_wrapper.h" |
| 26 | #include "webrtc/system_wrappers/interface/ref_count.h" |
| 27 | #include "webrtc/system_wrappers/interface/scoped_ptr.h" |
| 28 | #include "webrtc/system_wrappers/interface/scoped_vector.h" |
| 29 | #include "webrtc/video_engine/mock/mock_vie_frame_provider_base.h" |
| 30 | |
| 31 | using ::testing::_; |
| 32 | using ::testing::Invoke; |
| 33 | using ::testing::NiceMock; |
| 34 | using ::testing::Return; |
| 35 | using ::testing::WithArg; |
| 36 | |
| 37 | // If an output frame does not arrive in 500ms, the test will fail. |
| 38 | #define FRAME_TIMEOUT_MS 500 |
| 39 | |
| 40 | namespace webrtc { |
| 41 | |
| 42 | bool EqualFrames(const I420VideoFrame& frame1, |
| 43 | const I420VideoFrame& frame2); |
| 44 | bool EqualTextureFrames(const I420VideoFrame& frame1, |
| 45 | const I420VideoFrame& frame2); |
| 46 | bool EqualBufferFrames(const I420VideoFrame& frame1, |
| 47 | const I420VideoFrame& frame2); |
| 48 | bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1, |
| 49 | const ScopedVector<I420VideoFrame>& frames2); |
| 50 | I420VideoFrame* CreateI420VideoFrame(uint8_t length); |
| 51 | |
| 52 | class FakeNativeHandle : public NativeHandle { |
| 53 | public: |
| 54 | FakeNativeHandle() {} |
| 55 | virtual ~FakeNativeHandle() {} |
| 56 | virtual void* GetHandle() { return NULL; } |
| 57 | }; |
| 58 | |
| 59 | class ViECapturerTest : public ::testing::Test { |
| 60 | protected: |
| 61 | ViECapturerTest() |
| 62 | : mock_capture_module_(new NiceMock<MockVideoCaptureModule>()), |
| 63 | mock_process_thread_(new NiceMock<MockProcessThread>), |
| 64 | mock_frame_callback_(new NiceMock<MockViEFrameCallback>), |
| 65 | data_callback_(NULL), |
| 66 | output_frame_event_(EventWrapper::Create()) { |
| 67 | } |
| 68 | |
| 69 | virtual void SetUp() { |
| 70 | EXPECT_CALL(*mock_capture_module_, RegisterCaptureDataCallback(_)) |
| 71 | .WillRepeatedly(Invoke(this, &ViECapturerTest::SetCaptureDataCallback)); |
| 72 | EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_, _, _, _)) |
| 73 | .WillRepeatedly( |
| 74 | WithArg<1>(Invoke(this, &ViECapturerTest::AddOutputFrame))); |
| 75 | |
| 76 | Config config; |
| 77 | vie_capturer_.reset( |
| 78 | ViECapturer::CreateViECapture( |
| 79 | 0, 0, config, mock_capture_module_.get(), *mock_process_thread_)); |
| 80 | vie_capturer_->RegisterFrameCallback(0, mock_frame_callback_.get()); |
| 81 | } |
| 82 | |
| 83 | virtual void TearDown() { |
| 84 | // ViECapturer accesses |mock_process_thread_| in destructor and should |
| 85 | // be deleted first. |
| 86 | vie_capturer_.reset(); |
| 87 | } |
| 88 | |
| 89 | void SetCaptureDataCallback(VideoCaptureDataCallback& data_callback) { |
| 90 | data_callback_ = &data_callback; |
| 91 | } |
| 92 | |
| 93 | void AddInputFrame(I420VideoFrame* frame) { |
| 94 | data_callback_->OnIncomingCapturedFrame(0, *frame); |
| 95 | } |
| 96 | |
| 97 | void AddOutputFrame(I420VideoFrame* frame) { |
| 98 | if (frame->native_handle() == NULL) |
| 99 | output_frame_ybuffers_.push_back(frame->buffer(kYPlane)); |
| 100 | // Clone the frames because ViECapturer owns the frames. |
| 101 | output_frames_.push_back(frame->CloneFrame()); |
| 102 | output_frame_event_->Set(); |
| 103 | } |
| 104 | |
| 105 | void WaitOutputFrame() { |
| 106 | EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS)); |
| 107 | } |
| 108 | |
| 109 | scoped_ptr<MockVideoCaptureModule> mock_capture_module_; |
| 110 | scoped_ptr<MockProcessThread> mock_process_thread_; |
| 111 | scoped_ptr<MockViEFrameCallback> mock_frame_callback_; |
| 112 | |
| 113 | // Used to send input capture frames to ViECapturer. |
| 114 | VideoCaptureDataCallback* data_callback_; |
| 115 | |
| 116 | scoped_ptr<ViECapturer> vie_capturer_; |
| 117 | |
| 118 | // Input capture frames of ViECapturer. |
| 119 | ScopedVector<I420VideoFrame> input_frames_; |
| 120 | |
| 121 | // Indicate an output frame has arrived. |
| 122 | scoped_ptr<EventWrapper> output_frame_event_; |
| 123 | |
| 124 | // Output delivered frames of ViECaptuer. |
| 125 | ScopedVector<I420VideoFrame> output_frames_; |
| 126 | |
| 127 | // The pointers of Y plane buffers of output frames. This is used to verify |
| 128 | // the frame are swapped and not copied. |
| 129 | std::vector<uint8_t*> output_frame_ybuffers_; |
| 130 | }; |
| 131 | |
| 132 | TEST_F(ViECapturerTest, TestTextureFrames) { |
| 133 | const int kNumFrame = 3; |
| 134 | for (int i = 0 ; i < kNumFrame; ++i) { |
| 135 | webrtc::RefCountImpl<FakeNativeHandle>* handle = |
| 136 | new webrtc::RefCountImpl<FakeNativeHandle>(); |
| 137 | input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i)); |
| 138 | AddInputFrame(input_frames_[i]); |
| 139 | WaitOutputFrame(); |
| 140 | } |
| 141 | |
| 142 | EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 143 | } |
| 144 | |
| 145 | TEST_F(ViECapturerTest, TestI420Frames) { |
| 146 | const int kNumFrame = 4; |
| 147 | ScopedVector<I420VideoFrame> copied_input_frames; |
| 148 | std::vector<uint8_t*> ybuffer_pointers; |
| 149 | for (int i = 0; i < kNumFrame; ++i) { |
| 150 | input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1))); |
| 151 | ybuffer_pointers.push_back(input_frames_[i]->buffer(kYPlane)); |
| 152 | // Copy input frames because the buffer data will be swapped. |
| 153 | copied_input_frames.push_back(input_frames_[i]->CloneFrame()); |
| 154 | AddInputFrame(input_frames_[i]); |
| 155 | WaitOutputFrame(); |
| 156 | } |
| 157 | |
| 158 | EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_)); |
| 159 | // Make sure the buffer is swapped and not copied. |
| 160 | for (int i = 0; i < kNumFrame; ++i) |
| 161 | EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]); |
| 162 | // The pipeline should be filled with frames with allocated buffers. Check |
| 163 | // the last input frame has the same allocated size after swapping. |
| 164 | EXPECT_EQ(input_frames_.back()->allocated_size(kYPlane), |
| 165 | copied_input_frames.back()->allocated_size(kYPlane)); |
| 166 | } |
| 167 | |
| 168 | TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) { |
| 169 | webrtc::RefCountImpl<FakeNativeHandle>* handle = |
| 170 | new webrtc::RefCountImpl<FakeNativeHandle>(); |
| 171 | input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1)); |
| 172 | AddInputFrame(input_frames_[0]); |
| 173 | WaitOutputFrame(); |
| 174 | |
| 175 | input_frames_.push_back(CreateI420VideoFrame(1)); |
| 176 | scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[1]->CloneFrame()); |
| 177 | AddInputFrame(copied_input_frame.get()); |
| 178 | WaitOutputFrame(); |
| 179 | |
| 180 | EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 181 | } |
| 182 | |
| 183 | TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) { |
| 184 | input_frames_.push_back(CreateI420VideoFrame(1)); |
| 185 | scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[0]->CloneFrame()); |
| 186 | AddInputFrame(copied_input_frame.get()); |
| 187 | WaitOutputFrame(); |
| 188 | |
| 189 | webrtc::RefCountImpl<FakeNativeHandle>* handle = |
| 190 | new webrtc::RefCountImpl<FakeNativeHandle>(); |
| 191 | input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1)); |
| 192 | AddInputFrame(input_frames_[1]); |
| 193 | WaitOutputFrame(); |
| 194 | |
| 195 | EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); |
| 196 | } |
| 197 | |
| 198 | bool EqualFrames(const I420VideoFrame& frame1, |
| 199 | const I420VideoFrame& frame2) { |
| 200 | if (frame1.native_handle() != NULL || frame2.native_handle() != NULL) |
| 201 | return EqualTextureFrames(frame1, frame2); |
| 202 | return EqualBufferFrames(frame1, frame2); |
| 203 | } |
| 204 | |
| 205 | bool EqualTextureFrames(const I420VideoFrame& frame1, |
| 206 | const I420VideoFrame& frame2) { |
| 207 | return ((frame1.native_handle() == frame2.native_handle()) && |
| 208 | (frame1.width() == frame2.width()) && |
| 209 | (frame1.height() == frame2.height()) && |
| 210 | (frame1.timestamp() == frame2.timestamp()) && |
| 211 | (frame1.render_time_ms() == frame2.render_time_ms())); |
| 212 | } |
| 213 | |
| 214 | bool EqualBufferFrames(const I420VideoFrame& frame1, |
| 215 | const I420VideoFrame& frame2) { |
| 216 | return ((frame1.width() == frame2.width()) && |
| 217 | (frame1.height() == frame2.height()) && |
| 218 | (frame1.stride(kYPlane) == frame2.stride(kYPlane)) && |
| 219 | (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && |
| 220 | (frame1.stride(kVPlane) == frame2.stride(kVPlane)) && |
| 221 | (frame1.timestamp() == frame2.timestamp()) && |
| 222 | (frame1.ntp_time_ms() == frame2.ntp_time_ms()) && |
| 223 | (frame1.render_time_ms() == frame2.render_time_ms()) && |
| 224 | (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && |
| 225 | (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && |
| 226 | (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) && |
| 227 | (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), |
| 228 | frame1.allocated_size(kYPlane)) == 0) && |
| 229 | (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), |
| 230 | frame1.allocated_size(kUPlane)) == 0) && |
| 231 | (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), |
| 232 | frame1.allocated_size(kVPlane)) == 0)); |
| 233 | } |
| 234 | |
| 235 | bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1, |
| 236 | const ScopedVector<I420VideoFrame>& frames2) { |
| 237 | if (frames1.size() != frames2.size()) |
| 238 | return false; |
| 239 | for (size_t i = 0; i < frames1.size(); ++i) { |
| 240 | if (!EqualFrames(*frames1[i], *frames2[i])) |
| 241 | return false; |
| 242 | } |
| 243 | return true; |
| 244 | } |
| 245 | |
| 246 | I420VideoFrame* CreateI420VideoFrame(uint8_t data) { |
| 247 | I420VideoFrame* frame = new I420VideoFrame(); |
| 248 | const int width = 36; |
| 249 | const int height = 24; |
| 250 | const int kSizeY = width * height * 2; |
| 251 | const int kSizeUV = width * height; |
| 252 | uint8_t buffer[kSizeY]; |
| 253 | memset(buffer, data, kSizeY); |
| 254 | frame->CreateFrame( |
| 255 | kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width, |
| 256 | width / 2, width / 2); |
| 257 | frame->set_timestamp(data); |
| 258 | frame->set_ntp_time_ms(data); |
| 259 | frame->set_render_time_ms(data); |
| 260 | return frame; |
| 261 | } |
| 262 | |
| 263 | } // namespace webrtc |