blob: 509c6c16a94724953c51a45e7403e07f5143742a [file] [log] [blame]
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +01001/*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <stdio.h>
12
13#include "rtc_base/flags.h"
14#include "rtc_base/stringencode.h"
Bjorn Tereliusedab3012018-01-31 17:23:40 +010015#include "system_wrappers/include/field_trial_default.h"
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +010016#include "test/field_trial.h"
17#include "test/gtest.h"
18#include "test/run_test.h"
19#include "video/video_quality_test.h"
20
21namespace webrtc {
22namespace flags {
23
24// Flags for video.
25DEFINE_int(vwidth, 640, "Video width.");
26size_t VideoWidth() {
27 return static_cast<size_t>(FLAG_vwidth);
28}
29
30DEFINE_int(vheight, 480, "Video height.");
31size_t VideoHeight() {
32 return static_cast<size_t>(FLAG_vheight);
33}
34
35DEFINE_int(vfps, 30, "Video frames per second.");
36int VideoFps() {
37 return static_cast<int>(FLAG_vfps);
38}
39
40DEFINE_int(capture_device_index,
41 0,
42 "Capture device to select for video stream");
43size_t GetCaptureDevice() {
44 return static_cast<size_t>(FLAG_capture_device_index);
45}
46
47DEFINE_int(vtarget_bitrate, 400, "Video stream target bitrate in kbps.");
48int VideoTargetBitrateKbps() {
49 return static_cast<int>(FLAG_vtarget_bitrate);
50}
51
52DEFINE_int(vmin_bitrate, 100, "Video stream min bitrate in kbps.");
53int VideoMinBitrateKbps() {
54 return static_cast<int>(FLAG_vmin_bitrate);
55}
56
57DEFINE_int(vmax_bitrate, 2000, "Video stream max bitrate in kbps.");
58int VideoMaxBitrateKbps() {
59 return static_cast<int>(FLAG_vmax_bitrate);
60}
61
62DEFINE_bool(suspend_below_min_bitrate,
63 false,
64 "Suspends video below the configured min bitrate.");
65
66DEFINE_int(vnum_temporal_layers,
67 1,
68 "Number of temporal layers for video. Set to 1-4 to override.");
69int VideoNumTemporalLayers() {
70 return static_cast<int>(FLAG_vnum_temporal_layers);
71}
72
73DEFINE_int(vnum_streams, 0, "Number of video streams to show or analyze.");
74int VideoNumStreams() {
75 return static_cast<int>(FLAG_vnum_streams);
76}
77
78DEFINE_int(vnum_spatial_layers, 1, "Number of video spatial layers to use.");
79int VideoNumSpatialLayers() {
80 return static_cast<int>(FLAG_vnum_spatial_layers);
81}
82
83DEFINE_string(
84 vstream0,
85 "",
86 "Comma separated values describing VideoStream for video stream #0.");
87std::string VideoStream0() {
88 return static_cast<std::string>(FLAG_vstream0);
89}
90
91DEFINE_string(
92 vstream1,
93 "",
94 "Comma separated values describing VideoStream for video stream #1.");
95std::string VideoStream1() {
96 return static_cast<std::string>(FLAG_vstream1);
97}
98
99DEFINE_string(
100 vsl0,
101 "",
102 "Comma separated values describing SpatialLayer for video layer #0.");
103std::string VideoSL0() {
104 return static_cast<std::string>(FLAG_vsl0);
105}
106
107DEFINE_string(
108 vsl1,
109 "",
110 "Comma separated values describing SpatialLayer for video layer #1.");
111std::string VideoSL1() {
112 return static_cast<std::string>(FLAG_vsl1);
113}
114
115DEFINE_int(vselected_tl,
116 -1,
117 "Temporal layer to show or analyze for screenshare. -1 to disable "
118 "filtering.");
119int VideoSelectedTL() {
120 return static_cast<int>(FLAG_vselected_tl);
121}
122
123DEFINE_int(vselected_stream,
124 0,
125 "ID of the stream to show or analyze for screenshare."
126 "Set to the number of streams to show them all.");
127int VideoSelectedStream() {
128 return static_cast<int>(FLAG_vselected_stream);
129}
130
131DEFINE_int(vselected_sl,
132 -1,
133 "Spatial layer to show or analyze for screenshare. -1 to disable "
134 "filtering.");
135int VideoSelectedSL() {
136 return static_cast<int>(FLAG_vselected_sl);
137}
138
139// Flags for screenshare.
140DEFINE_int(min_transmit_bitrate,
141 400,
142 "Min transmit bitrate incl. padding for screenshare.");
143int ScreenshareMinTransmitBitrateKbps() {
144 return FLAG_min_transmit_bitrate;
145}
146
147DEFINE_int(swidth, 1850, "Screenshare width (crops source).");
148size_t ScreenshareWidth() {
149 return static_cast<size_t>(FLAG_swidth);
150}
151
152DEFINE_int(sheight, 1110, "Screenshare height (crops source).");
153size_t ScreenshareHeight() {
154 return static_cast<size_t>(FLAG_sheight);
155}
156
157DEFINE_int(sfps, 5, "Frames per second for screenshare.");
158int ScreenshareFps() {
159 return static_cast<int>(FLAG_sfps);
160}
161
162DEFINE_int(starget_bitrate, 100, "Screenshare stream target bitrate in kbps.");
163int ScreenshareTargetBitrateKbps() {
164 return static_cast<int>(FLAG_starget_bitrate);
165}
166
167DEFINE_int(smin_bitrate, 100, "Screenshare stream min bitrate in kbps.");
168int ScreenshareMinBitrateKbps() {
169 return static_cast<int>(FLAG_smin_bitrate);
170}
171
172DEFINE_int(smax_bitrate, 2000, "Screenshare stream max bitrate in kbps.");
173int ScreenshareMaxBitrateKbps() {
174 return static_cast<int>(FLAG_smax_bitrate);
175}
176
177DEFINE_int(snum_temporal_layers,
178 2,
179 "Number of temporal layers to use in screenshare.");
180int ScreenshareNumTemporalLayers() {
181 return static_cast<int>(FLAG_snum_temporal_layers);
182}
183
184DEFINE_int(snum_streams,
185 0,
186 "Number of screenshare streams to show or analyze.");
187int ScreenshareNumStreams() {
188 return static_cast<int>(FLAG_snum_streams);
189}
190
191DEFINE_int(snum_spatial_layers,
192 1,
193 "Number of screemshare spatial layers to use.");
194int ScreenshareNumSpatialLayers() {
195 return static_cast<int>(FLAG_snum_spatial_layers);
196}
197
198DEFINE_string(
199 sstream0,
200 "",
201 "Comma separated values describing VideoStream for screenshare stream #0.");
202std::string ScreenshareStream0() {
203 return static_cast<std::string>(FLAG_sstream0);
204}
205
206DEFINE_string(
207 sstream1,
208 "",
209 "Comma separated values describing VideoStream for screenshare stream #1.");
210std::string ScreenshareStream1() {
211 return static_cast<std::string>(FLAG_sstream1);
212}
213
214DEFINE_string(
215 ssl0,
216 "",
217 "Comma separated values describing SpatialLayer for screenshare layer #0.");
218std::string ScreenshareSL0() {
219 return static_cast<std::string>(FLAG_ssl0);
220}
221
222DEFINE_string(
223 ssl1,
224 "",
225 "Comma separated values describing SpatialLayer for screenshare layer #1.");
226std::string ScreenshareSL1() {
227 return static_cast<std::string>(FLAG_ssl1);
228}
229
230DEFINE_int(sselected_tl,
231 -1,
232 "Temporal layer to show or analyze for screenshare. -1 to disable "
233 "filtering.");
234int ScreenshareSelectedTL() {
235 return static_cast<int>(FLAG_sselected_tl);
236}
237
238DEFINE_int(sselected_stream,
239 0,
240 "ID of the stream to show or analyze for screenshare."
241 "Set to the number of streams to show them all.");
242int ScreenshareSelectedStream() {
243 return static_cast<int>(FLAG_sselected_stream);
244}
245
246DEFINE_int(sselected_sl,
247 -1,
248 "Spatial layer to show or analyze for screenshare. -1 to disable "
249 "filtering.");
250int ScreenshareSelectedSL() {
251 return static_cast<int>(FLAG_sselected_sl);
252}
253
254DEFINE_bool(
255 generate_slides,
256 false,
257 "Whether to use randomly generated slides or read them from files.");
258bool GenerateSlides() {
259 return static_cast<int>(FLAG_generate_slides);
260}
261
262DEFINE_int(slide_change_interval,
263 10,
264 "Interval (in seconds) between simulated slide changes.");
265int SlideChangeInterval() {
266 return static_cast<int>(FLAG_slide_change_interval);
267}
268
269DEFINE_int(
270 scroll_duration,
271 0,
272 "Duration (in seconds) during which a slide will be scrolled into place.");
273int ScrollDuration() {
274 return static_cast<int>(FLAG_scroll_duration);
275}
276
277DEFINE_string(slides,
278 "",
279 "Comma-separated list of *.yuv files to display as slides.");
280std::vector<std::string> Slides() {
281 std::vector<std::string> slides;
282 std::string slides_list = FLAG_slides;
283 rtc::tokenize(slides_list, ',', &slides);
284 return slides;
285}
286
287// Flags common with screenshare and video loopback, with equal default values.
288DEFINE_int(start_bitrate, 600, "Call start bitrate in kbps.");
289int StartBitrateKbps() {
290 return static_cast<int>(FLAG_start_bitrate);
291}
292
293DEFINE_string(codec, "VP8", "Video codec to use.");
294std::string Codec() {
295 return static_cast<std::string>(FLAG_codec);
296}
297
298DEFINE_bool(analyze_video,
299 false,
300 "Analyze video stream (if --duration is present)");
301bool AnalyzeVideo() {
302 return static_cast<bool>(FLAG_analyze_video);
303}
304
305DEFINE_bool(analyze_screenshare,
306 false,
307 "Analyze screenshare stream (if --duration is present)");
308bool AnalyzeScreenshare() {
309 return static_cast<bool>(FLAG_analyze_screenshare);
310}
311
312DEFINE_int(
313 duration,
314 0,
315 "Duration of the test in seconds. If 0, rendered will be shown instead.");
316int DurationSecs() {
317 return static_cast<int>(FLAG_duration);
318}
319
320DEFINE_string(output_filename, "", "Target graph data filename.");
321std::string OutputFilename() {
322 return static_cast<std::string>(FLAG_output_filename);
323}
324
325DEFINE_string(graph_title,
326 "",
327 "If empty, title will be generated automatically.");
328std::string GraphTitle() {
329 return static_cast<std::string>(FLAG_graph_title);
330}
331
332DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
333int LossPercent() {
334 return static_cast<int>(FLAG_loss_percent);
335}
336
337DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
338int AvgBurstLossLength() {
339 return static_cast<int>(FLAG_avg_burst_loss_length);
340}
341
342DEFINE_int(link_capacity,
343 0,
344 "Capacity (kbps) of the fake link. 0 means infinite.");
345int LinkCapacityKbps() {
346 return static_cast<int>(FLAG_link_capacity);
347}
348
349DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
350int QueueSize() {
351 return static_cast<int>(FLAG_queue_size);
352}
353
354DEFINE_int(avg_propagation_delay_ms,
355 0,
356 "Average link propagation delay in ms.");
357int AvgPropagationDelayMs() {
358 return static_cast<int>(FLAG_avg_propagation_delay_ms);
359}
360
361DEFINE_string(rtc_event_log_name,
362 "",
363 "Filename for rtc event log. Two files "
364 "with \"_send\" and \"_recv\" suffixes will be created. "
365 "Works only when --duration is set.");
366std::string RtcEventLogName() {
367 return static_cast<std::string>(FLAG_rtc_event_log_name);
368}
369
370DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
371std::string RtpDumpName() {
372 return static_cast<std::string>(FLAG_rtp_dump_name);
373}
374
375DEFINE_int(std_propagation_delay_ms,
376 0,
377 "Link propagation delay standard deviation in ms.");
378int StdPropagationDelayMs() {
379 return static_cast<int>(FLAG_std_propagation_delay_ms);
380}
381
382DEFINE_string(encoded_frame_path,
383 "",
384 "The base path for encoded frame logs. Created files will have "
385 "the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
386std::string EncodedFramePath() {
387 return static_cast<std::string>(FLAG_encoded_frame_path);
388}
389
390DEFINE_bool(logs, false, "print logs to stderr");
391
392DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
393
394DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
395
396DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
397
398DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
399
400DEFINE_bool(audio, false, "Add audio stream");
401
402DEFINE_bool(audio_video_sync,
403 false,
404 "Sync audio and video stream (no effect if"
405 " audio is false)");
406
407DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
408
409DEFINE_bool(video, true, "Add video stream");
410
411DEFINE_string(
412 force_fieldtrials,
413 "",
414 "Field trials control experimental feature code which can be forced. "
415 "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
416 " will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
417 "trials are separated by \"/\"");
418
419// Video-specific flags.
420DEFINE_string(vclip,
421 "",
422 "Name of the clip to show. If empty, the camera is used. Use "
423 "\"Generator\" for chroma generator.");
424std::string VideoClip() {
425 return static_cast<std::string>(FLAG_vclip);
426}
427
428DEFINE_bool(help, false, "prints this message");
429
430} // namespace flags
431
432void Loopback() {
433 int camera_idx, screenshare_idx;
434 RTC_CHECK(!(flags::AnalyzeScreenshare() && flags::AnalyzeVideo()))
435 << "Select only one of video or screenshare.";
436 RTC_CHECK(!flags::DurationSecs() || flags::AnalyzeScreenshare() ||
437 flags::AnalyzeVideo())
438 << "If duration is set, exactly one of analyze_* flags should be set.";
439 // Default: camera feed first, if nothing selected.
440 if (flags::AnalyzeVideo() || !flags::AnalyzeScreenshare()) {
441 camera_idx = 0;
442 screenshare_idx = 1;
443 } else {
444 camera_idx = 1;
445 screenshare_idx = 0;
446 }
447
448 FakeNetworkPipe::Config pipe_config;
449 pipe_config.loss_percent = flags::LossPercent();
450 pipe_config.avg_burst_loss_length = flags::AvgBurstLossLength();
451 pipe_config.link_capacity_kbps = flags::LinkCapacityKbps();
452 pipe_config.queue_length_packets = flags::QueueSize();
453 pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs();
454 pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
455 pipe_config.allow_reordering = flags::FLAG_allow_reordering;
456
Sebastian Janssonfc8d26b2018-02-21 09:52:06 +0100457 BitrateConstraints call_bitrate_config;
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100458 call_bitrate_config.min_bitrate_bps =
459 (flags::ScreenshareMinBitrateKbps() + flags::VideoMinBitrateKbps()) *
460 1000;
461 call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
462 call_bitrate_config.max_bitrate_bps =
463 (flags::ScreenshareMaxBitrateKbps() + flags::VideoMaxBitrateKbps()) *
464 1000;
465
466 VideoQualityTest::Params params, camera_params, screenshare_params;
467 params.call = {flags::FLAG_send_side_bwe, call_bitrate_config, 0};
468 params.call.dual_video = true;
469 params.video[screenshare_idx] = {
470 true,
471 flags::ScreenshareWidth(),
472 flags::ScreenshareHeight(),
473 flags::ScreenshareFps(),
474 flags::ScreenshareMinBitrateKbps() * 1000,
475 flags::ScreenshareTargetBitrateKbps() * 1000,
476 flags::ScreenshareMaxBitrateKbps() * 1000,
477 false,
478 flags::Codec(),
479 flags::ScreenshareNumTemporalLayers(),
480 flags::ScreenshareSelectedTL(),
481 flags::ScreenshareMinTransmitBitrateKbps() * 1000,
482 false, // ULPFEC disabled.
483 false, // FlexFEC disabled.
484 ""};
485 params.video[camera_idx] = {flags::FLAG_video,
486 flags::VideoWidth(),
487 flags::VideoHeight(),
488 flags::VideoFps(),
489 flags::VideoMinBitrateKbps() * 1000,
490 flags::VideoTargetBitrateKbps() * 1000,
491 flags::VideoMaxBitrateKbps() * 1000,
492 flags::FLAG_suspend_below_min_bitrate,
493 flags::Codec(),
494 flags::VideoNumTemporalLayers(),
495 flags::VideoSelectedTL(),
496 0, // No min transmit bitrate.
497 flags::FLAG_use_ulpfec,
498 flags::FLAG_use_flexfec,
499 flags::VideoClip(),
500 flags::GetCaptureDevice()};
501 params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync,
502 flags::FLAG_audio_dtx};
503 params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
504 flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
505 params.analyzer = {"dual_streams",
506 0.0,
507 0.0,
508 flags::DurationSecs(),
509 flags::OutputFilename(),
510 flags::GraphTitle()};
511 params.pipe = pipe_config;
512
513 params.screenshare[camera_idx].enabled = false;
514 params.screenshare[screenshare_idx] = {
515 true, flags::GenerateSlides(), flags::SlideChangeInterval(),
516 flags::ScrollDuration(), flags::Slides()};
517
518 if (flags::VideoNumStreams() > 1 && flags::VideoStream0().empty() &&
519 flags::VideoStream1().empty()) {
520 params.ss[camera_idx].infer_streams = true;
521 }
522
523 if (flags::ScreenshareNumStreams() > 1 &&
524 flags::ScreenshareStream0().empty() &&
525 flags::ScreenshareStream1().empty()) {
526 params.ss[screenshare_idx].infer_streams = true;
527 }
528
529 std::vector<std::string> stream_descriptors;
530 stream_descriptors.push_back(flags::ScreenshareStream0());
531 stream_descriptors.push_back(flags::ScreenshareStream1());
532 std::vector<std::string> SL_descriptors;
533 SL_descriptors.push_back(flags::ScreenshareSL0());
534 SL_descriptors.push_back(flags::ScreenshareSL1());
535 VideoQualityTest::FillScalabilitySettings(
536 &params, screenshare_idx, stream_descriptors,
537 flags::ScreenshareNumStreams(), flags::ScreenshareSelectedStream(),
538 flags::ScreenshareNumSpatialLayers(), flags::ScreenshareSelectedSL(),
539 SL_descriptors);
540
541 stream_descriptors.clear();
542 stream_descriptors.push_back(flags::VideoStream0());
543 stream_descriptors.push_back(flags::VideoStream1());
544 SL_descriptors.clear();
545 SL_descriptors.push_back(flags::VideoSL0());
546 SL_descriptors.push_back(flags::VideoSL1());
547 VideoQualityTest::FillScalabilitySettings(
548 &params, camera_idx, stream_descriptors, flags::VideoNumStreams(),
549 flags::VideoSelectedStream(), flags::VideoNumSpatialLayers(),
550 flags::VideoSelectedSL(), SL_descriptors);
551
552 VideoQualityTest test;
553 if (flags::DurationSecs()) {
554 test.RunWithAnalyzer(params);
555 } else {
556 test.RunWithRenderers(params);
557 }
558}
559} // namespace webrtc
560
561int main(int argc, char* argv[]) {
562 ::testing::InitGoogleTest(&argc, argv);
563 if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true) != 0) {
564 // Fail on unrecognized flags.
565 return 1;
566 }
567 if (webrtc::flags::FLAG_help) {
568 rtc::FlagList::Print(nullptr, false);
569 return 0;
570 }
571
Bjorn Tereliusedab3012018-01-31 17:23:40 +0100572 webrtc::test::ValidateFieldTrialsStringOrDie(
573 webrtc::flags::FLAG_force_fieldtrials);
574 // InitFieldTrialsFromString stores the char*, so the char array must outlive
575 // the application.
576 webrtc::field_trial::InitFieldTrialsFromString(
577 webrtc::flags::FLAG_force_fieldtrials);
Ilya Nikolaevskiy255d1cd2017-12-21 18:02:59 +0100578
579 webrtc::test::RunTest(webrtc::Loopback);
580 return 0;
581}