blob: 225a7d30899eeaa90e1eb435c63a806a5ca79baf [file] [log] [blame]
Emircan Uysaler901e0ff2018-06-26 12:22:38 -07001/*
2 * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
Jonas Olssona4d87372019-07-05 19:08:33 +020011#include "api/video/video_frame.h"
12
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070013#include <math.h>
14#include <string.h>
15
16#include "api/video/i010_buffer.h"
17#include "api/video/i420_buffer.h"
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070018#include "rtc_base/bind.h"
Steve Anton10542f22019-01-11 09:11:00 -080019#include "rtc_base/time_utils.h"
Emircan Uysaler901e0ff2018-06-26 12:22:38 -070020#include "test/fake_texture_frame.h"
21#include "test/frame_utils.h"
22#include "test/gtest.h"
23
24namespace webrtc {
25
26namespace {
27
28// Helper class to delegate calls to appropriate container.
29class PlanarYuvBufferFactory {
30 public:
31 static rtc::scoped_refptr<PlanarYuvBuffer> Create(VideoFrameBuffer::Type type,
32 int width,
33 int height) {
34 switch (type) {
35 case VideoFrameBuffer::Type::kI420:
36 return I420Buffer::Create(width, height);
37 case VideoFrameBuffer::Type::kI010:
38 return I010Buffer::Create(width, height);
39 default:
40 RTC_NOTREACHED();
41 }
42 return nullptr;
43 }
44
45 static rtc::scoped_refptr<PlanarYuvBuffer> Copy(const VideoFrameBuffer& src) {
46 switch (src.type()) {
47 case VideoFrameBuffer::Type::kI420:
48 return I420Buffer::Copy(src);
49 case VideoFrameBuffer::Type::kI010:
50 return I010Buffer::Copy(*src.GetI010());
51 default:
52 RTC_NOTREACHED();
53 }
54 return nullptr;
55 }
56
57 static rtc::scoped_refptr<PlanarYuvBuffer> Rotate(const VideoFrameBuffer& src,
58 VideoRotation rotation) {
59 switch (src.type()) {
60 case VideoFrameBuffer::Type::kI420:
61 return I420Buffer::Rotate(src, rotation);
62 case VideoFrameBuffer::Type::kI010:
63 return I010Buffer::Rotate(*src.GetI010(), rotation);
64 default:
65 RTC_NOTREACHED();
66 }
67 return nullptr;
68 }
69
70 static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
71 const VideoFrameBuffer& src,
72 int offset_x,
73 int offset_y,
74 int crop_width,
75 int crop_height) {
76 switch (src.type()) {
77 case VideoFrameBuffer::Type::kI420: {
78 rtc::scoped_refptr<I420Buffer> buffer =
79 I420Buffer::Create(crop_width, crop_height);
80 buffer->CropAndScaleFrom(*src.GetI420(), offset_x, offset_y, crop_width,
81 crop_height);
82 return buffer;
83 }
84 case VideoFrameBuffer::Type::kI010: {
85 rtc::scoped_refptr<I010Buffer> buffer =
86 I010Buffer::Create(crop_width, crop_height);
87 buffer->CropAndScaleFrom(*src.GetI010(), offset_x, offset_y, crop_width,
88 crop_height);
89 return buffer;
90 }
91 default:
92 RTC_NOTREACHED();
93 }
94 return nullptr;
95 }
96
97 static rtc::scoped_refptr<PlanarYuvBuffer> CropAndScaleFrom(
98 const VideoFrameBuffer& src,
99 int crop_width,
100 int crop_height) {
101 const int out_width =
102 std::min(src.width(), crop_width * src.height() / crop_height);
103 const int out_height =
104 std::min(src.height(), crop_height * src.width() / crop_width);
105 return CropAndScaleFrom(src, (src.width() - out_width) / 2,
106 (src.height() - out_height) / 2, out_width,
107 out_height);
108 }
109
110 static rtc::scoped_refptr<PlanarYuvBuffer>
111 ScaleFrom(const VideoFrameBuffer& src, int crop_width, int crop_height) {
112 switch (src.type()) {
113 case VideoFrameBuffer::Type::kI420: {
114 rtc::scoped_refptr<I420Buffer> buffer =
115 I420Buffer::Create(crop_width, crop_height);
116 buffer->ScaleFrom(*src.GetI420());
117 return buffer;
118 }
119 case VideoFrameBuffer::Type::kI010: {
120 rtc::scoped_refptr<I010Buffer> buffer =
121 I010Buffer::Create(crop_width, crop_height);
122 buffer->ScaleFrom(*src.GetI010());
123 return buffer;
124 }
125 default:
126 RTC_NOTREACHED();
127 }
128 return nullptr;
129 }
130};
131
132rtc::scoped_refptr<PlanarYuvBuffer> CreateGradient(VideoFrameBuffer::Type type,
133 int width,
134 int height) {
135 rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
136 // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
137 for (int x = 0; x < width; x++) {
138 for (int y = 0; y < height; y++) {
139 buffer->MutableDataY()[x + y * width] =
140 128 * (x * height + y * width) / (width * height);
141 }
142 }
143 int chroma_width = buffer->ChromaWidth();
144 int chroma_height = buffer->ChromaHeight();
145 for (int x = 0; x < chroma_width; x++) {
146 for (int y = 0; y < chroma_height; y++) {
147 buffer->MutableDataU()[x + y * chroma_width] =
148 255 * x / (chroma_width - 1);
149 buffer->MutableDataV()[x + y * chroma_width] =
150 255 * y / (chroma_height - 1);
151 }
152 }
153 if (type == VideoFrameBuffer::Type::kI420)
154 return buffer;
155
156 RTC_DCHECK(type == VideoFrameBuffer::Type::kI010);
157 return I010Buffer::Copy(*buffer);
158}
159
160// The offsets and sizes describe the rectangle extracted from the
161// original (gradient) frame, in relative coordinates where the
162// original frame correspond to the unit square, 0.0 <= x, y < 1.0.
163void CheckCrop(const webrtc::I420BufferInterface& frame,
164 double offset_x,
165 double offset_y,
166 double rel_width,
167 double rel_height) {
168 int width = frame.width();
169 int height = frame.height();
170 // Check that pixel values in the corners match the gradient used
171 // for initialization.
172 for (int i = 0; i < 2; i++) {
173 for (int j = 0; j < 2; j++) {
174 // Pixel coordinates of the corner.
175 int x = i * (width - 1);
176 int y = j * (height - 1);
177 // Relative coordinates, range 0.0 - 1.0 correspond to the
178 // size of the uncropped input frame.
179 double orig_x = offset_x + i * rel_width;
180 double orig_y = offset_y + j * rel_height;
181
182 EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0,
183 (orig_x + orig_y) / 2, 0.02);
184 EXPECT_NEAR(frame.DataU()[x / 2 + (y / 2) * frame.StrideU()] / 256.0,
185 orig_x, 0.02);
186 EXPECT_NEAR(frame.DataV()[x / 2 + (y / 2) * frame.StrideV()] / 256.0,
187 orig_y, 0.02);
188 }
189 }
190}
191
192void CheckRotate(int width,
193 int height,
194 webrtc::VideoRotation rotation,
195 const webrtc::I420BufferInterface& rotated) {
196 int rotated_width = width;
197 int rotated_height = height;
198
199 if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
200 std::swap(rotated_width, rotated_height);
201 }
202 EXPECT_EQ(rotated_width, rotated.width());
203 EXPECT_EQ(rotated_height, rotated.height());
204
205 // Clock-wise order (with 0,0 at top-left)
206 const struct {
207 int x;
208 int y;
209 } corners[] = {{0, 0}, {1, 0}, {1, 1}, {0, 1}};
210 // Corresponding corner colors of the frame produced by CreateGradient.
211 const struct {
212 int y;
213 int u;
214 int v;
215 } colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}};
216 int corner_offset = static_cast<int>(rotation) / 90;
217
218 for (int i = 0; i < 4; i++) {
219 int j = (i + corner_offset) % 4;
220 int x = corners[j].x * (rotated_width - 1);
221 int y = corners[j].y * (rotated_height - 1);
222 EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]);
223 EXPECT_EQ(colors[i].u,
224 rotated.DataU()[(x / 2) + (y / 2) * rotated.StrideU()]);
225 EXPECT_EQ(colors[i].v,
226 rotated.DataV()[(x / 2) + (y / 2) * rotated.StrideV()]);
227 }
228}
229
Ilya Nikolaevskiya9216602018-12-21 14:21:08 +0100230int GetU(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
231 if (buf->type() == VideoFrameBuffer::Type::kI420) {
232 return buf->GetI420()
233 ->DataU()[row / 2 * buf->GetI420()->StrideU() + col / 2];
234 } else {
235 return buf->GetI010()
236 ->DataU()[row / 2 * buf->GetI010()->StrideU() + col / 2];
237 }
238}
239
240int GetV(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
241 if (buf->type() == VideoFrameBuffer::Type::kI420) {
242 return buf->GetI420()
243 ->DataV()[row / 2 * buf->GetI420()->StrideV() + col / 2];
244 } else {
245 return buf->GetI010()
246 ->DataV()[row / 2 * buf->GetI010()->StrideV() + col / 2];
247 }
248}
249
250int GetY(rtc::scoped_refptr<PlanarYuvBuffer> buf, int col, int row) {
251 if (buf->type() == VideoFrameBuffer::Type::kI420) {
252 return buf->GetI420()->DataY()[row * buf->GetI420()->StrideY() + col];
253 } else {
254 return buf->GetI010()->DataY()[row * buf->GetI010()->StrideY() + col];
255 }
256}
257
258void PasteFromBuffer(PlanarYuvBuffer* canvas,
259 const PlanarYuvBuffer& picture,
260 int offset_col,
261 int offset_row) {
262 if (canvas->type() == VideoFrameBuffer::Type::kI420) {
263 I420Buffer* buf = static_cast<I420Buffer*>(canvas);
264 buf->PasteFrom(*picture.GetI420(), offset_col, offset_row);
265 } else {
266 I010Buffer* buf = static_cast<I010Buffer*>(canvas);
267 buf->PasteFrom(*picture.GetI010(), offset_col, offset_row);
268 }
269}
270
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700271} // namespace
272
273TEST(TestVideoFrame, WidthHeightValues) {
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100274 VideoFrame frame =
275 VideoFrame::Builder()
276 .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90))
277 .set_rotation(webrtc::kVideoRotation_0)
278 .set_timestamp_ms(789)
279 .build();
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700280 const int valid_value = 10;
281 EXPECT_EQ(valid_value, frame.width());
282 EXPECT_EQ(valid_value, frame.height());
283 frame.set_timestamp(123u);
284 EXPECT_EQ(123u, frame.timestamp());
285 frame.set_ntp_time_ms(456);
286 EXPECT_EQ(456, frame.ntp_time_ms());
287 EXPECT_EQ(789, frame.render_time_ms());
288}
289
290TEST(TestVideoFrame, ShallowCopy) {
291 uint32_t timestamp = 1;
292 int64_t ntp_time_ms = 2;
293 int64_t timestamp_us = 3;
294 int stride_y = 15;
295 int stride_u = 10;
296 int stride_v = 10;
297 int width = 15;
298 int height = 15;
299
300 const int kSizeY = 400;
301 const int kSizeU = 100;
302 const int kSizeV = 100;
303 const VideoRotation kRotation = kVideoRotation_270;
304 uint8_t buffer_y[kSizeY];
305 uint8_t buffer_u[kSizeU];
306 uint8_t buffer_v[kSizeV];
307 memset(buffer_y, 16, kSizeY);
308 memset(buffer_u, 8, kSizeU);
309 memset(buffer_v, 4, kSizeV);
310
Artem Titov1ebfb6a2019-01-03 23:49:37 +0100311 VideoFrame frame1 = VideoFrame::Builder()
312 .set_video_frame_buffer(I420Buffer::Copy(
313 width, height, buffer_y, stride_y, buffer_u,
314 stride_u, buffer_v, stride_v))
315 .set_rotation(kRotation)
316 .set_timestamp_us(0)
317 .build();
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700318 frame1.set_timestamp(timestamp);
319 frame1.set_ntp_time_ms(ntp_time_ms);
320 frame1.set_timestamp_us(timestamp_us);
321 VideoFrame frame2(frame1);
322
323 EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
Ilya Nikolaevskiya8507e32019-05-03 11:39:26 +0200324 const webrtc::I420BufferInterface* yuv1 =
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700325 frame1.video_frame_buffer()->GetI420();
Ilya Nikolaevskiya8507e32019-05-03 11:39:26 +0200326 const webrtc::I420BufferInterface* yuv2 =
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700327 frame2.video_frame_buffer()->GetI420();
328 EXPECT_EQ(yuv1->DataY(), yuv2->DataY());
329 EXPECT_EQ(yuv1->DataU(), yuv2->DataU());
330 EXPECT_EQ(yuv1->DataV(), yuv2->DataV());
331
332 EXPECT_EQ(frame2.timestamp(), frame1.timestamp());
333 EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms());
334 EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us());
335 EXPECT_EQ(frame2.rotation(), frame1.rotation());
336
337 frame2.set_timestamp(timestamp + 1);
338 frame2.set_ntp_time_ms(ntp_time_ms + 1);
339 frame2.set_timestamp_us(timestamp_us + 1);
340 frame2.set_rotation(kVideoRotation_90);
341
342 EXPECT_NE(frame2.timestamp(), frame1.timestamp());
343 EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms());
344 EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us());
345 EXPECT_NE(frame2.rotation(), frame1.rotation());
346}
347
348TEST(TestVideoFrame, TextureInitialValues) {
349 VideoFrame frame = test::FakeNativeBuffer::CreateFrame(
350 640, 480, 100, 10, webrtc::kVideoRotation_0);
351 EXPECT_EQ(640, frame.width());
352 EXPECT_EQ(480, frame.height());
353 EXPECT_EQ(100u, frame.timestamp());
354 EXPECT_EQ(10, frame.render_time_ms());
355 ASSERT_TRUE(frame.video_frame_buffer() != nullptr);
356 EXPECT_TRUE(frame.video_frame_buffer()->type() ==
357 VideoFrameBuffer::Type::kNative);
358
359 frame.set_timestamp(200);
360 EXPECT_EQ(200u, frame.timestamp());
361 frame.set_timestamp_us(20);
362 EXPECT_EQ(20, frame.timestamp_us());
363}
364
365class TestPlanarYuvBuffer
366 : public ::testing::TestWithParam<VideoFrameBuffer::Type> {};
367
368rtc::scoped_refptr<I420Buffer> CreateAndFillBuffer() {
369 auto buf = I420Buffer::Create(20, 10);
370 memset(buf->MutableDataY(), 1, 200);
371 memset(buf->MutableDataU(), 2, 50);
372 memset(buf->MutableDataV(), 3, 50);
373 return buf;
374}
375
376TEST_P(TestPlanarYuvBuffer, Copy) {
377 rtc::scoped_refptr<PlanarYuvBuffer> buf1;
378 switch (GetParam()) {
379 case VideoFrameBuffer::Type::kI420: {
380 buf1 = CreateAndFillBuffer();
381 break;
382 }
383 case VideoFrameBuffer::Type::kI010: {
384 buf1 = I010Buffer::Copy(*CreateAndFillBuffer());
385 break;
386 }
387 default:
388 RTC_NOTREACHED();
389 }
390
391 rtc::scoped_refptr<PlanarYuvBuffer> buf2 =
392 PlanarYuvBufferFactory::Copy(*buf1);
393 EXPECT_TRUE(test::FrameBufsEqual(buf1->ToI420(), buf2->ToI420()));
394}
395
396TEST_P(TestPlanarYuvBuffer, Scale) {
397 rtc::scoped_refptr<PlanarYuvBuffer> buf =
398 CreateGradient(GetParam(), 200, 100);
399
400 // Pure scaling, no cropping.
401 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
402 PlanarYuvBufferFactory::ScaleFrom(*buf, 150, 75);
403 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.0, 1.0, 1.0);
404}
405
406TEST_P(TestPlanarYuvBuffer, CropXCenter) {
407 rtc::scoped_refptr<PlanarYuvBuffer> buf =
408 CreateGradient(GetParam(), 200, 100);
409
410 // Pure center cropping, no scaling.
411 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
412 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 50, 0, 100, 100);
413 CheckCrop(*scaled_buffer->ToI420(), 0.25, 0.0, 0.5, 1.0);
414}
415
416TEST_P(TestPlanarYuvBuffer, CropXNotCenter) {
417 rtc::scoped_refptr<PlanarYuvBuffer> buf =
418 CreateGradient(GetParam(), 200, 100);
419
420 // Non-center cropping, no scaling.
421 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
422 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 25, 0, 100, 100);
423 CheckCrop(*scaled_buffer->ToI420(), 0.125, 0.0, 0.5, 1.0);
424}
425
426TEST_P(TestPlanarYuvBuffer, CropYCenter) {
427 rtc::scoped_refptr<PlanarYuvBuffer> buf =
428 CreateGradient(GetParam(), 100, 200);
429
430 // Pure center cropping, no scaling.
431 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
432 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 50, 100, 100);
433 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.25, 1.0, 0.5);
434}
435
436TEST_P(TestPlanarYuvBuffer, CropYNotCenter) {
437 rtc::scoped_refptr<PlanarYuvBuffer> buf =
438 CreateGradient(GetParam(), 100, 200);
439
440 // Pure center cropping, no scaling.
441 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
442 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 25, 100, 100);
443 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.5);
444}
445
446TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) {
447 rtc::scoped_refptr<PlanarYuvBuffer> buf =
448 CreateGradient(GetParam(), 640, 480);
449
450 // Pure center cropping, no scaling.
451 rtc::scoped_refptr<PlanarYuvBuffer> scaled_buffer =
452 PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 320, 180);
453 CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.75);
454}
455
Ilya Nikolaevskiya9216602018-12-21 14:21:08 +0100456TEST_P(TestPlanarYuvBuffer, PastesIntoBuffer) {
457 const int kOffsetx = 20;
458 const int kOffsety = 30;
459 const int kPicSize = 20;
460 const int kWidth = 160;
461 const int kHeight = 80;
462 rtc::scoped_refptr<PlanarYuvBuffer> buf =
463 CreateGradient(GetParam(), kWidth, kHeight);
464
465 rtc::scoped_refptr<PlanarYuvBuffer> original =
466 CreateGradient(GetParam(), kWidth, kHeight);
467
468 rtc::scoped_refptr<PlanarYuvBuffer> picture =
469 CreateGradient(GetParam(), kPicSize, kPicSize);
470
471 rtc::scoped_refptr<PlanarYuvBuffer> odd_picture =
472 CreateGradient(GetParam(), kPicSize + 1, kPicSize - 1);
473
474 PasteFromBuffer(buf.get(), *picture, kOffsetx, kOffsety);
475
476 for (int i = 0; i < kWidth; ++i) {
477 for (int j = 0; j < kHeight; ++j) {
478 bool is_inside = i >= kOffsetx && i < kOffsetx + kPicSize &&
479 j >= kOffsety && j < kOffsety + kPicSize;
480 if (!is_inside) {
481 EXPECT_EQ(GetU(original, i, j), GetU(buf, i, j));
482 EXPECT_EQ(GetV(original, i, j), GetV(buf, i, j));
483 EXPECT_EQ(GetY(original, i, j), GetY(buf, i, j));
484 } else {
485 EXPECT_EQ(GetU(picture, i - kOffsetx, j - kOffsety), GetU(buf, i, j));
486 EXPECT_EQ(GetV(picture, i - kOffsetx, j - kOffsety), GetV(buf, i, j));
487 EXPECT_EQ(GetY(picture, i - kOffsetx, j - kOffsety), GetY(buf, i, j));
488 }
489 }
490 }
491}
492
Mirko Bonadei1b575412019-09-23 08:34:50 +0200493INSTANTIATE_TEST_SUITE_P(All,
Mirko Bonadeic84f6612019-01-31 12:20:57 +0100494 TestPlanarYuvBuffer,
495 ::testing::Values(VideoFrameBuffer::Type::kI420,
496 VideoFrameBuffer::Type::kI010));
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700497
498class TestPlanarYuvBufferRotate
499 : public ::testing::TestWithParam<
500 std::tuple<webrtc::VideoRotation, VideoFrameBuffer::Type>> {};
501
502TEST_P(TestPlanarYuvBufferRotate, Rotates) {
503 const webrtc::VideoRotation rotation = std::get<0>(GetParam());
504 const VideoFrameBuffer::Type type = std::get<1>(GetParam());
505 rtc::scoped_refptr<PlanarYuvBuffer> buffer = CreateGradient(type, 640, 480);
506 rtc::scoped_refptr<PlanarYuvBuffer> rotated_buffer =
507 PlanarYuvBufferFactory::Rotate(*buffer, rotation);
508 CheckRotate(640, 480, rotation, *rotated_buffer->ToI420());
509}
510
Mirko Bonadeic84f6612019-01-31 12:20:57 +0100511INSTANTIATE_TEST_SUITE_P(
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700512 Rotate,
513 TestPlanarYuvBufferRotate,
514 ::testing::Combine(::testing::Values(kVideoRotation_0,
515 kVideoRotation_90,
516 kVideoRotation_180,
517 kVideoRotation_270),
518 ::testing::Values(VideoFrameBuffer::Type::kI420,
519 VideoFrameBuffer::Type::kI010)));
520
Ilya Nikolaevskiy0660cee2019-11-19 14:57:57 +0100521TEST(TestUpdateRect, CanCompare) {
522 VideoFrame::UpdateRect a = {0, 0, 100, 200};
523 VideoFrame::UpdateRect b = {0, 0, 100, 200};
524 VideoFrame::UpdateRect c = {1, 0, 100, 200};
525 VideoFrame::UpdateRect d = {0, 1, 100, 200};
526 EXPECT_TRUE(a == b);
527 EXPECT_FALSE(a == c);
528 EXPECT_FALSE(a == d);
529}
530
531TEST(TestUpdateRect, ComputesIsEmpty) {
532 VideoFrame::UpdateRect a = {0, 0, 0, 0};
533 VideoFrame::UpdateRect b = {0, 0, 100, 200};
534 VideoFrame::UpdateRect c = {1, 100, 0, 0};
535 VideoFrame::UpdateRect d = {1, 100, 100, 200};
536 EXPECT_TRUE(a.IsEmpty());
537 EXPECT_FALSE(b.IsEmpty());
538 EXPECT_TRUE(c.IsEmpty());
539 EXPECT_FALSE(d.IsEmpty());
540}
541
542TEST(TestUpdateRectUnion, NonIntersecting) {
543 VideoFrame::UpdateRect a = {0, 0, 10, 20};
544 VideoFrame::UpdateRect b = {100, 200, 10, 20};
545 a.Union(b);
546 EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 110, 220}));
547}
548
549TEST(TestUpdateRectUnion, Intersecting) {
550 VideoFrame::UpdateRect a = {0, 0, 10, 10};
551 VideoFrame::UpdateRect b = {5, 5, 30, 20};
552 a.Union(b);
553 EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 35, 25}));
554}
555
556TEST(TestUpdateRectUnion, OneInsideAnother) {
557 VideoFrame::UpdateRect a = {0, 0, 100, 100};
558 VideoFrame::UpdateRect b = {5, 5, 30, 20};
559 a.Union(b);
560 EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 100, 100}));
561}
562
563TEST(TestUpdateRectIntersect, NonIntersecting) {
564 VideoFrame::UpdateRect a = {0, 0, 10, 20};
565 VideoFrame::UpdateRect b = {100, 200, 10, 20};
566 a.Intersect(b);
567 EXPECT_EQ(a, VideoFrame::UpdateRect({0, 0, 0, 0}));
568}
569
570TEST(TestUpdateRectIntersect, Intersecting) {
571 VideoFrame::UpdateRect a = {0, 0, 10, 10};
572 VideoFrame::UpdateRect b = {5, 5, 30, 20};
573 a.Intersect(b);
574 EXPECT_EQ(a, VideoFrame::UpdateRect({5, 5, 5, 5}));
575}
576
577TEST(TestUpdateRectIntersect, OneInsideAnother) {
578 VideoFrame::UpdateRect a = {0, 0, 100, 100};
579 VideoFrame::UpdateRect b = {5, 5, 30, 20};
580 a.Intersect(b);
581 EXPECT_EQ(a, VideoFrame::UpdateRect({5, 5, 30, 20}));
582}
583
584TEST(TestUpdateRectScale, NoScale) {
585 const int width = 640;
586 const int height = 480;
587 VideoFrame::UpdateRect a = {100, 50, 100, 200};
588 VideoFrame::UpdateRect scaled =
589 a.ScaleWithFrame(width, height, 0, 0, width, height, width, height);
590 EXPECT_EQ(scaled, VideoFrame::UpdateRect({100, 50, 100, 200}));
591}
592
593TEST(TestUpdateRectScale, CropOnly) {
594 const int width = 640;
595 const int height = 480;
596 VideoFrame::UpdateRect a = {100, 50, 100, 200};
597 VideoFrame::UpdateRect scaled = a.ScaleWithFrame(
598 width, height, 10, 10, width - 20, height - 20, width - 20, height - 20);
599 EXPECT_EQ(scaled, VideoFrame::UpdateRect({90, 40, 100, 200}));
600}
601
602TEST(TestUpdateRectScale, CropOnlyToOddOffset) {
603 const int width = 640;
604 const int height = 480;
605 VideoFrame::UpdateRect a = {100, 50, 100, 200};
606 VideoFrame::UpdateRect scaled = a.ScaleWithFrame(
607 width, height, 5, 5, width - 10, height - 10, width - 10, height - 10);
608 EXPECT_EQ(scaled, VideoFrame::UpdateRect({94, 44, 102, 202}));
609}
610
611TEST(TestUpdateRectScale, ScaleByHalf) {
612 const int width = 640;
613 const int height = 480;
614 VideoFrame::UpdateRect a = {100, 60, 100, 200};
615 VideoFrame::UpdateRect scaled = a.ScaleWithFrame(
616 width, height, 0, 0, width, height, width / 2, height / 2);
617 // Scaled by half and +2 pixels in all directions.
618 EXPECT_EQ(scaled, VideoFrame::UpdateRect({48, 28, 54, 104}));
619}
620
621TEST(TestUpdateRectScale, CropToUnchangedRegionBelowUpdateRect) {
622 const int width = 640;
623 const int height = 480;
624 VideoFrame::UpdateRect a = {100, 60, 100, 200};
625 VideoFrame::UpdateRect scaled = a.ScaleWithFrame(
626 width, height, (width - 10) / 2, (height - 10) / 2, 10, 10, 10, 10);
627 // Update is out of the cropped frame.
628 EXPECT_EQ(scaled, VideoFrame::UpdateRect({0, 0, 0, 0}));
629}
630
631TEST(TestUpdateRectScale, CropToUnchangedRegionAboveUpdateRect) {
632 const int width = 640;
633 const int height = 480;
634 VideoFrame::UpdateRect a = {600, 400, 10, 10};
635 VideoFrame::UpdateRect scaled = a.ScaleWithFrame(
636 width, height, (width - 10) / 2, (height - 10) / 2, 10, 10, 10, 10);
637 // Update is out of the cropped frame.
638 EXPECT_EQ(scaled, VideoFrame::UpdateRect({0, 0, 0, 0}));
639}
640
641TEST(TestUpdateRectScale, CropInsideUpdate) {
642 const int width = 640;
643 const int height = 480;
644 VideoFrame::UpdateRect a = {300, 200, 100, 100};
645 VideoFrame::UpdateRect scaled = a.ScaleWithFrame(
646 width, height, (width - 10) / 2, (height - 10) / 2, 10, 10, 10, 10);
647 // Cropped frame is inside the update rect.
648 EXPECT_EQ(scaled, VideoFrame::UpdateRect({0, 0, 10, 10}));
649}
650
651TEST(TestUpdateRectScale, CropAndScaleByHalf) {
652 const int width = 640;
653 const int height = 480;
654 VideoFrame::UpdateRect a = {100, 60, 100, 200};
655 VideoFrame::UpdateRect scaled =
656 a.ScaleWithFrame(width, height, 10, 10, width - 20, height - 20,
657 (width - 20) / 2, (height - 20) / 2);
658 // Scaled by half and +3 pixels in all directions, because of odd offset after
659 // crop and scale.
660 EXPECT_EQ(scaled, VideoFrame::UpdateRect({42, 22, 56, 106}));
661}
662
Emircan Uysaler901e0ff2018-06-26 12:22:38 -0700663} // namespace webrtc