blob: 233179babaefeb62373f31bf4a1d78dd211b69f7 [file] [log] [blame]
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +00001/*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "video_engine/vie_capturer.h"
12
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +000013#include "common_video/libyuv/include/webrtc_libyuv.h"
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +000014#include "modules/interface/module_common_types.h"
15#include "modules/utility/interface/process_thread.h"
andrew@webrtc.org5f6856f2012-10-30 21:58:00 +000016#include "webrtc/modules/video_capture/include/video_capture_factory.h"
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +000017#include "modules/video_processing/main/interface/video_processing.h"
18#include "modules/video_render/main/interface/video_render_defines.h"
19#include "system_wrappers/interface/critical_section_wrapper.h"
20#include "system_wrappers/interface/event_wrapper.h"
21#include "system_wrappers/interface/thread_wrapper.h"
22#include "system_wrappers/interface/trace.h"
23#include "video_engine/include/vie_image_process.h"
24#include "video_engine/vie_defines.h"
25#include "video_engine/vie_encoder.h"
26
27namespace webrtc {
28
29const int kThreadWaitTimeMs = 100;
30const int kMaxDeliverWaitTime = 500;
31
32ViECapturer::ViECapturer(int capture_id,
33 int engine_id,
34 ProcessThread& module_process_thread)
35 : ViEFrameProviderBase(capture_id, engine_id),
36 capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
37 deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
38 capture_module_(NULL),
39 external_capture_module_(NULL),
40 module_process_thread_(module_process_thread),
41 capture_id_(capture_id),
42 capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
43 this, kHighPriority,
44 "ViECaptureThread")),
45 capture_event_(*EventWrapper::Create()),
46 deliver_event_(*EventWrapper::Create()),
47 effect_filter_(NULL),
48 image_proc_module_(NULL),
49 image_proc_module_ref_counter_(0),
50 deflicker_frame_stats_(NULL),
51 brightness_frame_stats_(NULL),
52 current_brightness_level_(Normal),
53 reported_brightness_level_(Normal),
54 denoising_enabled_(false),
55 observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
56 observer_(NULL),
57 encoding_cs_(CriticalSectionWrapper::CreateCriticalSection()),
58 capture_encoder_(NULL),
59 encode_complete_callback_(NULL),
60 vie_encoder_(NULL),
61 vcm_(NULL),
62 decoder_initialized_(false) {
63 WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
64 "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
65 capture_id, engine_id);
66 unsigned int t_id = 0;
67 if (capture_thread_.Start(t_id)) {
68 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
69 "%s: thread started: %u", __FUNCTION__, t_id);
70 } else {
71 assert(false);
72 }
73}
74
75ViECapturer::~ViECapturer() {
76 WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
77 "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
78 capture_id_, engine_id_);
79
80 // Stop the thread.
81 deliver_cs_->Enter();
82 capture_cs_->Enter();
83 capture_thread_.SetNotAlive();
84 capture_event_.Set();
85 capture_cs_->Leave();
86 deliver_cs_->Leave();
87
88 provider_cs_->Enter();
89 if (vie_encoder_) {
90 vie_encoder_->DeRegisterExternalEncoder(codec_.plType);
91 }
92 provider_cs_->Leave();
93
94 // Stop the camera input.
95 if (capture_module_) {
96 module_process_thread_.DeRegisterModule(capture_module_);
97 capture_module_->DeRegisterCaptureDataCallback();
98 capture_module_->Release();
99 capture_module_ = NULL;
100 }
101 if (capture_thread_.Stop()) {
102 // Thread stopped.
103 delete &capture_thread_;
104 delete &capture_event_;
105 delete &deliver_event_;
106 } else {
107 assert(false);
108 WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
109 ViEId(engine_id_, capture_id_),
110 "%s: Not able to stop capture thread for device %d, leaking",
111 __FUNCTION__, capture_id_);
112 }
113
114 if (image_proc_module_) {
115 VideoProcessingModule::Destroy(image_proc_module_);
116 }
117 if (deflicker_frame_stats_) {
118 delete deflicker_frame_stats_;
119 deflicker_frame_stats_ = NULL;
120 }
121 delete brightness_frame_stats_;
122 if (vcm_) {
123 delete vcm_;
124 }
125}
126
127ViECapturer* ViECapturer::CreateViECapture(
128 int capture_id,
129 int engine_id,
130 VideoCaptureModule* capture_module,
131 ProcessThread& module_process_thread) {
132 ViECapturer* capture = new ViECapturer(capture_id, engine_id,
133 module_process_thread);
134 if (!capture || capture->Init(capture_module) != 0) {
135 delete capture;
136 capture = NULL;
137 }
138 return capture;
139}
140
141WebRtc_Word32 ViECapturer::Init(VideoCaptureModule* capture_module) {
142 assert(capture_module_ == NULL);
143 capture_module_ = capture_module;
144 capture_module_->RegisterCaptureDataCallback(*this);
145 capture_module_->AddRef();
146 if (module_process_thread_.RegisterModule(capture_module_) != 0) {
147 return -1;
148 }
149
150 return 0;
151}
152
153ViECapturer* ViECapturer::CreateViECapture(
154 int capture_id,
155 int engine_id,
156 const char* device_unique_idUTF8,
157 const WebRtc_UWord32 device_unique_idUTF8Length,
158 ProcessThread& module_process_thread) {
159 ViECapturer* capture = new ViECapturer(capture_id, engine_id,
160 module_process_thread);
161 if (!capture ||
162 capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
163 delete capture;
164 capture = NULL;
165 }
166 return capture;
167}
168
169WebRtc_Word32 ViECapturer::Init(
170 const char* device_unique_idUTF8,
171 const WebRtc_UWord32 device_unique_idUTF8Length) {
172 assert(capture_module_ == NULL);
173 if (device_unique_idUTF8 == NULL) {
174 capture_module_ = VideoCaptureFactory::Create(
175 ViEModuleId(engine_id_, capture_id_), external_capture_module_);
176 } else {
177 capture_module_ = VideoCaptureFactory::Create(
178 ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
179 }
180 if (!capture_module_) {
181 return -1;
182 }
183 capture_module_->AddRef();
184 capture_module_->RegisterCaptureDataCallback(*this);
185 if (module_process_thread_.RegisterModule(capture_module_) != 0) {
186 return -1;
187 }
188
189 return 0;
190}
191
192int ViECapturer::FrameCallbackChanged() {
193 if (Started() && !EncoderActive() && !CaptureCapabilityFixed()) {
194 // Reconfigure the camera if a new size is required and the capture device
195 // does not provide encoded frames.
196 int best_width;
197 int best_height;
198 int best_frame_rate;
199 VideoCaptureCapability capture_settings;
200 capture_module_->CaptureSettings(capture_settings);
201 GetBestFormat(&best_width, &best_height, &best_frame_rate);
202 if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
203 if (best_width != capture_settings.width ||
204 best_height != capture_settings.height ||
205 best_frame_rate != capture_settings.maxFPS ||
206 capture_settings.codecType != kVideoCodecUnknown) {
207 Stop();
208 Start(requested_capability_);
209 }
210 }
211 }
212 return 0;
213}
214
215WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
216 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
217 __FUNCTION__);
218 int width;
219 int height;
220 int frame_rate;
221 VideoCaptureCapability capability;
222 requested_capability_ = capture_capability;
223 if (EncoderActive()) {
224 CriticalSectionScoped cs(encoding_cs_.get());
225 capability.width = codec_.width;
226 capability.height = codec_.height;
227 capability.maxFPS = codec_.maxFramerate;
228 capability.codecType = codec_.codecType;
229 capability.rawType = kVideoI420;
230
231 } else if (!CaptureCapabilityFixed()) {
232 // Ask the observers for best size.
233 GetBestFormat(&width, &height, &frame_rate);
234 if (width == 0) {
235 width = kViECaptureDefaultWidth;
236 }
237 if (height == 0) {
238 height = kViECaptureDefaultHeight;
239 }
240 if (frame_rate == 0) {
241 frame_rate = kViECaptureDefaultFramerate;
242 }
243 capability.height = height;
244 capability.width = width;
245 capability.maxFPS = frame_rate;
246 capability.rawType = kVideoI420;
247 capability.codecType = kVideoCodecUnknown;
248 } else {
249 // Width, height and type specified with call to Start, not set by
250 // observers.
251 capability.width = requested_capability_.width;
252 capability.height = requested_capability_.height;
253 capability.maxFPS = requested_capability_.maxFPS;
254 capability.rawType = requested_capability_.rawType;
255 capability.interlaced = requested_capability_.interlaced;
256 }
257 return capture_module_->StartCapture(capability);
258}
259
260WebRtc_Word32 ViECapturer::Stop() {
261 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
262 __FUNCTION__);
263 requested_capability_ = CaptureCapability();
264 return capture_module_->StopCapture();
265}
266
267bool ViECapturer::Started() {
268 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
269 __FUNCTION__);
270 return capture_module_->CaptureStarted();
271}
272
273const char* ViECapturer::CurrentDeviceName() const {
274 return capture_module_->CurrentDeviceName();
275}
276
277WebRtc_Word32 ViECapturer::SetCaptureDelay(WebRtc_Word32 delay_ms) {
278 return capture_module_->SetCaptureDelay(delay_ms);
279}
280
281WebRtc_Word32 ViECapturer::SetRotateCapturedFrames(
282 const RotateCapturedFrame rotation) {
283 VideoCaptureRotation converted_rotation = kCameraRotate0;
284 switch (rotation) {
285 case RotateCapturedFrame_0:
286 converted_rotation = kCameraRotate0;
287 break;
288 case RotateCapturedFrame_90:
289 converted_rotation = kCameraRotate90;
290 break;
291 case RotateCapturedFrame_180:
292 converted_rotation = kCameraRotate180;
293 break;
294 case RotateCapturedFrame_270:
295 converted_rotation = kCameraRotate270;
296 break;
297 }
298 return capture_module_->SetCaptureRotation(converted_rotation);
299}
300
301int ViECapturer::IncomingFrame(unsigned char* video_frame,
302 unsigned int video_frame_length,
303 uint16_t width,
304 uint16_t height,
305 RawVideoType video_type,
306 unsigned long long capture_time) { // NOLINT
307 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
308 "ExternalCapture::IncomingFrame width %d, height %d, "
309 "capture_time %u", width, height, capture_time);
310
311 if (!external_capture_module_) {
312 return -1;
313 }
314 VideoCaptureCapability capability;
315 capability.width = width;
316 capability.height = height;
317 capability.rawType = video_type;
318 return external_capture_module_->IncomingFrame(video_frame,
319 video_frame_length,
320 capability, capture_time);
321}
322
323int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
324 unsigned long long capture_time) { // NOLINT
325 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
326 "ExternalCapture::IncomingFrame width %d, height %d, "
327 " capture_time %u", video_frame.width, video_frame.height,
328 capture_time);
329
330 if (!external_capture_module_) {
331 return -1;
332 }
333
334 VideoFrameI420 frame;
335 frame.width = video_frame.width;
336 frame.height = video_frame.height;
337 frame.y_plane = video_frame.y_plane;
338 frame.u_plane = video_frame.u_plane;
339 frame.v_plane = video_frame.v_plane;
340 frame.y_pitch = video_frame.y_pitch;
341 frame.u_pitch = video_frame.u_pitch;
342 frame.v_pitch = video_frame.v_pitch;
343
344 return external_capture_module_->IncomingFrameI420(frame, capture_time);
345}
346
347void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id,
mikhal@webrtc.orgeb4840f2012-10-29 15:59:40 +0000348 I420VideoFrame& video_frame) {
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000349 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
350 "%s(capture_id: %d)", __FUNCTION__, capture_id);
351 CriticalSectionScoped cs(capture_cs_.get());
352 // Make sure we render this frame earlier since we know the render time set
353 // is slightly off since it's being set when the frame has been received from
354 // the camera, and not when the camera actually captured the frame.
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000355 video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
356 captured_frame_.SwapFrame(&video_frame);
357 capture_event_.Set();
358 return;
359}
360
361void ViECapturer::OnIncomingCapturedEncodedFrame(const WebRtc_Word32 capture_id,
mikhal@webrtc.orgeb4840f2012-10-29 15:59:40 +0000362 VideoFrame& video_frame) {
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000363 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
364 "%s(capture_id: %d)", __FUNCTION__, capture_id);
365 CriticalSectionScoped cs(capture_cs_.get());
366 // Make sure we render this frame earlier since we know the render time set
367 // is slightly off since it's being set when the frame has been received from
368 // the camera, and not when the camera actually captured the frame.
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000369 video_frame.SetRenderTime(video_frame.RenderTimeMs() - FrameDelay());
mikhal@webrtc.orgeb4840f2012-10-29 15:59:40 +0000370 if (encoded_frame_.Length() != 0) {
371 // The last encoded frame has not been sent yet. Need to wait.
372 deliver_event_.Reset();
373 WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, capture_id_),
374 "%s(capture_id: %d) Last encoded frame not yet delivered.",
375 __FUNCTION__, capture_id);
376 capture_cs_->Leave();
377 // Wait for the coded frame to be sent before unblocking this.
378 deliver_event_.Wait(kMaxDeliverWaitTime);
379 assert(encoded_frame_.Length() == 0);
380 capture_cs_->Enter();
381 } else {
382 assert(false);
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000383 }
mikhal@webrtc.orgeb4840f2012-10-29 15:59:40 +0000384 encoded_frame_.SwapFrame(video_frame);
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000385 capture_event_.Set();
386 return;
387}
388
389void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
390 const WebRtc_Word32 delay) {
391 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
392 "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
393 delay);
394
395 // Deliver the network delay to all registered callbacks.
396 ViEFrameProviderBase::SetFrameDelay(delay);
397 CriticalSectionScoped cs(encoding_cs_.get());
398 if (vie_encoder_) {
399 vie_encoder_->DelayChanged(id, delay);
400 }
401}
402
403WebRtc_Word32 ViECapturer::RegisterEffectFilter(
404 ViEEffectFilter* effect_filter) {
405 CriticalSectionScoped cs(deliver_cs_.get());
406
407 if (!effect_filter) {
408 if (!effect_filter_) {
409 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
410 "%s: no effect filter added for capture device %d",
411 __FUNCTION__, capture_id_);
412 return -1;
413 }
414 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
415 "%s: deregister effect filter for device %d", __FUNCTION__,
416 capture_id_);
417 } else {
418 if (effect_filter_) {
419 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
420 "%s: effect filter already added for capture device %d",
421 __FUNCTION__, capture_id_);
422 return -1;
423 }
424 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
425 "%s: register effect filter for device %d", __FUNCTION__,
426 capture_id_);
427 }
428 effect_filter_ = effect_filter;
429 return 0;
430}
431
432WebRtc_Word32 ViECapturer::IncImageProcRefCount() {
433 if (!image_proc_module_) {
434 assert(image_proc_module_ref_counter_ == 0);
435 image_proc_module_ = VideoProcessingModule::Create(
436 ViEModuleId(engine_id_, capture_id_));
437 if (!image_proc_module_) {
438 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
439 "%s: could not create video processing module",
440 __FUNCTION__);
441 return -1;
442 }
443 }
444 image_proc_module_ref_counter_++;
445 return 0;
446}
447
448WebRtc_Word32 ViECapturer::DecImageProcRefCount() {
449 image_proc_module_ref_counter_--;
450 if (image_proc_module_ref_counter_ == 0) {
451 // Destroy module.
452 VideoProcessingModule::Destroy(image_proc_module_);
453 image_proc_module_ = NULL;
454 }
455 return 0;
456}
457
458WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) {
459 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
460 "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
461 capture_id_, enable);
462
463 CriticalSectionScoped cs(deliver_cs_.get());
464 if (enable) {
465 if (denoising_enabled_) {
466 // Already enabled, nothing need to be done.
467 return 0;
468 }
469 denoising_enabled_ = true;
470 if (IncImageProcRefCount() != 0) {
471 return -1;
472 }
473 } else {
474 if (denoising_enabled_ == false) {
475 // Already disabled, nothing need to be done.
476 return 0;
477 }
478 denoising_enabled_ = false;
479 DecImageProcRefCount();
480 }
481
482 return 0;
483}
484
485WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) {
486 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
487 "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
488 capture_id_, enable);
489
490 CriticalSectionScoped cs(deliver_cs_.get());
491 if (enable) {
492 if (deflicker_frame_stats_) {
493 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
494 "%s: deflickering already enabled", __FUNCTION__);
495 return -1;
496 }
497 if (IncImageProcRefCount() != 0) {
498 return -1;
499 }
500 deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
501 } else {
502 if (deflicker_frame_stats_ == NULL) {
503 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
504 "%s: deflickering not enabled", __FUNCTION__);
505 return -1;
506 }
507 DecImageProcRefCount();
508 delete deflicker_frame_stats_;
509 deflicker_frame_stats_ = NULL;
510 }
511 return 0;
512}
513
514WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable) {
515 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
516 "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
517 capture_id_, enable);
518
519 CriticalSectionScoped cs(deliver_cs_.get());
520 if (enable) {
521 if (brightness_frame_stats_) {
522 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
523 "%s: BrightnessAlarm already enabled", __FUNCTION__);
524 return -1;
525 }
526 if (IncImageProcRefCount() != 0) {
527 return -1;
528 }
529 brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
530 } else {
531 DecImageProcRefCount();
532 if (brightness_frame_stats_ == NULL) {
533 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
534 "%s: deflickering not enabled", __FUNCTION__);
535 return -1;
536 }
537 delete brightness_frame_stats_;
538 brightness_frame_stats_ = NULL;
539 }
540 return 0;
541}
542
543bool ViECapturer::ViECaptureThreadFunction(void* obj) {
544 return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
545}
546
547bool ViECapturer::ViECaptureProcess() {
548 if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
549 deliver_cs_->Enter();
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000550 if (!captured_frame_.IsZeroSize()) {
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000551 // New I420 frame.
552 capture_cs_->Enter();
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000553 deliver_frame_.SwapFrame(&captured_frame_);
554 captured_frame_.ResetSize();
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000555 capture_cs_->Leave();
556 DeliverI420Frame(&deliver_frame_);
557 }
558 if (encoded_frame_.Length() > 0) {
559 capture_cs_->Enter();
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000560 deliver_encoded_frame_.SwapFrame(encoded_frame_);
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000561 encoded_frame_.SetLength(0);
562 deliver_event_.Set();
563 capture_cs_->Leave();
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000564 DeliverCodedFrame(&deliver_encoded_frame_);
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000565 }
566 deliver_cs_->Leave();
567 if (current_brightness_level_ != reported_brightness_level_) {
568 CriticalSectionScoped cs(observer_cs_.get());
569 if (observer_) {
570 observer_->BrightnessAlarm(id_, current_brightness_level_);
571 reported_brightness_level_ = current_brightness_level_;
572 }
573 }
574 }
575 // We're done!
576 return true;
577}
578
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000579void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000580 // Apply image enhancement and effect filter.
581 if (deflicker_frame_stats_) {
582 if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
583 *video_frame) == 0) {
584 image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
585 } else {
586 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
587 "%s: could not get frame stats for captured frame",
588 __FUNCTION__);
589 }
590 }
591 if (denoising_enabled_) {
592 image_proc_module_->Denoising(video_frame);
593 }
594 if (brightness_frame_stats_) {
595 if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
596 *video_frame) == 0) {
597 WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
598 *video_frame, *brightness_frame_stats_);
599
600 switch (brightness) {
601 case VideoProcessingModule::kNoWarning:
602 current_brightness_level_ = Normal;
603 break;
604 case VideoProcessingModule::kDarkWarning:
605 current_brightness_level_ = Dark;
606 break;
607 case VideoProcessingModule::kBrightWarning:
608 current_brightness_level_ = Bright;
609 break;
610 default:
611 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
612 "%s: Brightness detection failed", __FUNCTION__);
613 }
614 }
615 }
616 if (effect_filter_) {
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000617 unsigned int length = CalcBufferSize(kI420,
618 video_frame->width(),
619 video_frame->height());
620 scoped_array<uint8_t> video_buffer(new uint8_t[length]);
621 ExtractBuffer(*video_frame, length, video_buffer.get());
622 effect_filter_->Transform(length, video_buffer.get(),
623 video_frame->timestamp(), video_frame->width(),
624 video_frame->height());
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000625 }
626 // Deliver the captured frame to all observers (channels, renderer or file).
627 ViEFrameProviderBase::DeliverFrame(video_frame);
628}
629
630void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
631 if (encode_complete_callback_) {
632 EncodedImage encoded_image(video_frame->Buffer(), video_frame->Length(),
633 video_frame->Size());
634 encoded_image._timeStamp =
635 90 * static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
636 encode_complete_callback_->Encoded(encoded_image);
637 }
638
639 if (NumberOfRegisteredFrameCallbacks() > 0 && decoder_initialized_) {
640 video_frame->Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
641 decode_buffer_.payloadSize);
642 decode_buffer_.encodedHeight = video_frame->Height();
643 decode_buffer_.encodedWidth = video_frame->Width();
644 decode_buffer_.renderTimeMs = video_frame->RenderTimeMs();
645 const int kMsToRtpTimestamp = 90;
646 decode_buffer_.timeStamp = kMsToRtpTimestamp *
647 static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
648 decode_buffer_.payloadType = codec_.plType;
649 vcm_->DecodeFromStorage(decode_buffer_);
650 }
651}
652
653int ViECapturer::DeregisterFrameCallback(
654 const ViEFrameCallback* callbackObject) {
655 provider_cs_->Enter();
656 if (callbackObject == vie_encoder_) {
657 // Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
658 ViEEncoder* vie_encoder = NULL;
659 vie_encoder = vie_encoder_;
660 vie_encoder_ = NULL;
661 provider_cs_->Leave();
662
663 // Need to take this here in order to avoid deadlock with VCM. The reason is
664 // that VCM will call ::Release and a deadlock can occur.
665 deliver_cs_->Enter();
666 vie_encoder->DeRegisterExternalEncoder(codec_.plType);
667 deliver_cs_->Leave();
668 return 0;
669 }
670 provider_cs_->Leave();
671 return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
672}
673
674bool ViECapturer::IsFrameCallbackRegistered(
675 const ViEFrameCallback* callbackObject) {
676 CriticalSectionScoped cs(provider_cs_.get());
677 if (callbackObject == vie_encoder_) {
678 return true;
679 }
680 return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
681}
682
683WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
684 ViEEncoder& vie_encoder,
685 WebRtc_Word32 vie_encoder_id) {
686 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
687 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
688 if (vie_encoder_ && &vie_encoder != vie_encoder_) {
689 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
690 "%s(capture_device_id: %d Capture device already encoding)",
691 __FUNCTION__, capture_id_);
692 return -1;
693 }
694
695 CriticalSectionScoped cs(encoding_cs_.get());
696 VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder =
697 capture_module_->GetEncodeInterface(codec);
698 if (!capture_encoder) {
699 // Encoding not supported?
700 return -1;
701 }
702 capture_encoder_ = capture_encoder;
703
704 // Create VCM module used for decoding frames if needed.
705 if (!vcm_) {
706 vcm_ = VideoCodingModule::Create(capture_id_);
707 }
708
709 if (vie_encoder.RegisterExternalEncoder(this, codec.plType) != 0) {
710 return -1;
711 }
712 if (vie_encoder.SetEncoder(codec) != 0) {
713 vie_encoder.DeRegisterExternalEncoder(codec.plType);
714 return -1;
715 }
716
717 // Make sure the encoder is not an I420 observer.
718 ViEFrameProviderBase::DeregisterFrameCallback(&vie_encoder);
719 // Store the vie_encoder using this capture device.
720 vie_encoder_ = &vie_encoder;
721 vie_encoder_id_ = vie_encoder_id;
722 memcpy(&codec_, &codec, sizeof(VideoCodec));
723 return 0;
724}
725
726bool ViECapturer::EncoderActive() {
727 return vie_encoder_ != NULL;
728}
729
730bool ViECapturer::CaptureCapabilityFixed() {
731 return requested_capability_.width != 0 &&
732 requested_capability_.height != 0 &&
733 requested_capability_.maxFPS != 0;
734}
735
736WebRtc_Word32 ViECapturer::Version(char* version,
737 WebRtc_Word32 length) const {
738 return 0;
739}
740
741WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
742 WebRtc_Word32 number_of_cores,
743 WebRtc_UWord32 max_payload_size) {
744 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
745 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
746
747 CriticalSectionScoped cs(encoding_cs_.get());
748 if (!capture_encoder_ || !codec_settings) {
749 return WEBRTC_VIDEO_CODEC_ERROR;
750 }
751
752 if (vcm_) {
753 // Initialize VCM to be able to decode frames if needed.
754 if (vcm_->InitializeReceiver() == 0) {
755 if (vcm_->RegisterReceiveCallback(this) == 0) {
756 if (vcm_->RegisterReceiveCodec(codec_settings, number_of_cores,
757 false) == 0) {
758 decoder_initialized_ = true;
759 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
760 "%s(capture_device_id: %d) VCM Decoder initialized",
761 __FUNCTION__, capture_id_);
762 }
763 }
764 }
765 }
766 return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
767}
768
769WebRtc_Word32 ViECapturer::Encode(
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000770 const I420VideoFrame& input_image,
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000771 const CodecSpecificInfo* codec_specific_info,
772 const std::vector<VideoFrameType>* frame_types) {
773 CriticalSectionScoped cs(encoding_cs_.get());
774 if (!capture_encoder_) {
775 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
776 }
777 if (frame_types == NULL) {
778 return capture_encoder_->EncodeFrameType(kVideoFrameDelta);
779 } else if ((*frame_types)[0] == kKeyFrame) {
780 return capture_encoder_->EncodeFrameType(kVideoFrameKey);
781 } else if ((*frame_types)[0] == kSkipFrame) {
782 return capture_encoder_->EncodeFrameType(kFrameEmpty);
783 }
784 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
785}
786
787WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback(
788 EncodedImageCallback* callback) {
789 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
790 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
791
792 CriticalSectionScoped cs(deliver_cs_.get());
793 if (!capture_encoder_) {
794 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
795 }
796 encode_complete_callback_ = callback;
797 return 0;
798}
799
800WebRtc_Word32 ViECapturer::Release() {
801 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
802 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
803 {
804 CriticalSectionScoped cs(deliver_cs_.get());
805 encode_complete_callback_ = NULL;
806 }
807
808 {
809 CriticalSectionScoped cs(encoding_cs_.get());
810
811 decoder_initialized_ = false;
812 codec_.codecType = kVideoCodecUnknown;
813 // Reset the camera to output I420.
814 capture_encoder_->ConfigureEncoder(codec_, 0);
815
816 if (vie_encoder_) {
817 // Need to add the encoder as an observer of I420.
818 ViEFrameProviderBase::RegisterFrameCallback(vie_encoder_id_,
819 vie_encoder_);
820 }
821 vie_encoder_ = NULL;
822 }
823 return 0;
824}
825
826// Should reset the capture device to the state it was in after the InitEncode
827// function. Current implementation do nothing.
828WebRtc_Word32 ViECapturer::Reset() {
829 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
830 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
831 return 0;
832}
833
834WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packet_loss,
835 int rtt) {
836 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
837 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
838
839 CriticalSectionScoped cs(encoding_cs_.get());
840 if (!capture_encoder_) {
841 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
842 }
843 return capture_encoder_->SetChannelParameters(packet_loss, rtt);
844}
845
846WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
847 WebRtc_UWord32 frame_rate) {
848 WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
849 "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
850
851 CriticalSectionScoped cs(encoding_cs_.get());
852 if (!capture_encoder_) {
853 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
854 }
855 return capture_encoder_->SetRates(new_bit_rate, frame_rate);
856}
857
mikhal@webrtc.orgdc7e6cf2012-10-24 18:33:04 +0000858WebRtc_Word32 ViECapturer::FrameToRender(
859 I420VideoFrame& video_frame) { //NOLINT
andrew@webrtc.orga7b57da2012-10-22 18:19:23 +0000860 deliver_cs_->Enter();
861 DeliverI420Frame(&video_frame);
862 deliver_cs_->Leave();
863 return 0;
864}
865
866WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
867 if (observer_) {
868 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
869 "%s Observer already registered", __FUNCTION__, capture_id_);
870 return -1;
871 }
872 if (capture_module_->RegisterCaptureCallback(*this) != 0) {
873 return -1;
874 }
875 capture_module_->EnableFrameRateCallback(true);
876 capture_module_->EnableNoPictureAlarm(true);
877 observer_ = observer;
878 return 0;
879}
880
881WebRtc_Word32 ViECapturer::DeRegisterObserver() {
882 CriticalSectionScoped cs(observer_cs_.get());
883 if (!observer_) {
884 WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
885 "%s No observer registered", __FUNCTION__, capture_id_);
886 return -1;
887 }
888 capture_module_->EnableFrameRateCallback(false);
889 capture_module_->EnableNoPictureAlarm(false);
890 capture_module_->DeRegisterCaptureCallback();
891 observer_ = NULL;
892 return 0;
893}
894
895bool ViECapturer::IsObserverRegistered() {
896 CriticalSectionScoped cs(observer_cs_.get());
897 return observer_ != NULL;
898}
899
900void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
901 const WebRtc_UWord32 frame_rate) {
902 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
903 "OnCaptureFrameRate %d", frame_rate);
904
905 CriticalSectionScoped cs(observer_cs_.get());
906 observer_->CapturedFrameRate(id_, (WebRtc_UWord8) frame_rate);
907}
908
909void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
910 const VideoCaptureAlarm alarm) {
911 WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
912 "OnNoPictureAlarm %d", alarm);
913
914 CriticalSectionScoped cs(observer_cs_.get());
915 CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
916 observer_->NoPictureAlarm(id, vie_alarm);
917}
918
919} // namespace webrtc