andrew@webrtc.org | a7b57da | 2012-10-22 18:19:23 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include <sys/ioctl.h> |
| 12 | #include <unistd.h> |
| 13 | #include <sys/stat.h> |
| 14 | #include <fcntl.h> |
| 15 | #include <linux/videodev2.h> |
| 16 | #include <errno.h> |
| 17 | #include <stdio.h> |
| 18 | #include <sys/mman.h> |
| 19 | #include <string.h> |
| 20 | |
| 21 | #include <iostream> |
| 22 | #include <new> |
| 23 | |
| 24 | #include "ref_count.h" |
| 25 | #include "trace.h" |
| 26 | #include "thread_wrapper.h" |
| 27 | #include "critical_section_wrapper.h" |
| 28 | #include "video_capture_linux.h" |
| 29 | |
| 30 | namespace webrtc |
| 31 | { |
| 32 | namespace videocapturemodule |
| 33 | { |
| 34 | VideoCaptureModule* VideoCaptureImpl::Create(const WebRtc_Word32 id, |
| 35 | const char* deviceUniqueId) |
| 36 | { |
| 37 | RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation = |
| 38 | new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id); |
| 39 | |
| 40 | if (!implementation || implementation->Init(deviceUniqueId) != 0) |
| 41 | { |
| 42 | delete implementation; |
| 43 | implementation = NULL; |
| 44 | } |
| 45 | |
| 46 | return implementation; |
| 47 | } |
| 48 | |
| 49 | VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const WebRtc_Word32 id) |
| 50 | : VideoCaptureImpl(id), |
| 51 | _captureThread(NULL), |
| 52 | _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()), |
| 53 | _deviceId(-1), |
| 54 | _deviceFd(-1), |
| 55 | _buffersAllocatedByDevice(-1), |
| 56 | _currentWidth(-1), |
| 57 | _currentHeight(-1), |
| 58 | _currentFrameRate(-1), |
| 59 | _captureStarted(false), |
| 60 | _captureVideoType(kVideoI420), |
| 61 | _pool(NULL) |
| 62 | { |
| 63 | } |
| 64 | |
| 65 | WebRtc_Word32 VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) |
| 66 | { |
| 67 | int len = strlen((const char*) deviceUniqueIdUTF8); |
| 68 | _deviceUniqueId = new (std::nothrow) char[len + 1]; |
| 69 | if (_deviceUniqueId) |
| 70 | { |
| 71 | memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); |
| 72 | } |
| 73 | |
| 74 | int fd; |
| 75 | char device[32]; |
| 76 | bool found = false; |
| 77 | |
| 78 | /* detect /dev/video [0-63] entries */ |
| 79 | int n; |
| 80 | for (n = 0; n < 64; n++) |
| 81 | { |
| 82 | sprintf(device, "/dev/video%d", n); |
| 83 | if ((fd = open(device, O_RDONLY)) != -1) |
| 84 | { |
| 85 | // query device capabilities |
| 86 | struct v4l2_capability cap; |
| 87 | if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) |
| 88 | { |
| 89 | if (cap.bus_info[0] != 0) |
| 90 | { |
| 91 | if (strncmp((const char*) cap.bus_info, |
| 92 | (const char*) deviceUniqueIdUTF8, |
| 93 | strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id |
| 94 | { |
| 95 | close(fd); |
| 96 | found = true; |
| 97 | break; // fd matches with device unique id supplied |
| 98 | } |
| 99 | } |
| 100 | } |
| 101 | close(fd); // close since this is not the matching device |
| 102 | } |
| 103 | } |
| 104 | if (!found) |
| 105 | { |
| 106 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found"); |
| 107 | return -1; |
| 108 | } |
| 109 | _deviceId = n; //store the device id |
| 110 | return 0; |
| 111 | } |
| 112 | |
| 113 | VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() |
| 114 | { |
| 115 | StopCapture(); |
| 116 | if (_captureCritSect) |
| 117 | { |
| 118 | delete _captureCritSect; |
| 119 | } |
| 120 | if (_deviceFd != -1) |
| 121 | close(_deviceFd); |
| 122 | } |
| 123 | |
| 124 | WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture( |
| 125 | const VideoCaptureCapability& capability) |
| 126 | { |
| 127 | if (_captureStarted) |
| 128 | { |
| 129 | if (capability.width == _currentWidth && |
| 130 | capability.height == _currentHeight && |
| 131 | _captureVideoType == capability.rawType) |
| 132 | { |
| 133 | return 0; |
| 134 | } |
| 135 | else |
| 136 | { |
| 137 | StopCapture(); |
| 138 | } |
| 139 | } |
| 140 | |
| 141 | CriticalSectionScoped cs(_captureCritSect); |
| 142 | //first open /dev/video device |
| 143 | char device[20]; |
| 144 | sprintf(device, "/dev/video%d", (int) _deviceId); |
| 145 | |
| 146 | if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) |
| 147 | { |
| 148 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 149 | "error in opening %s errono = %d", device, errno); |
| 150 | return -1; |
| 151 | } |
| 152 | |
| 153 | // Supported video formats in preferred order. |
| 154 | // If the requested resolution is larger than VGA, we prefer MJPEG. Go for |
| 155 | // I420 otherwise. |
| 156 | const int nFormats = 3; |
| 157 | unsigned int fmts[nFormats]; |
| 158 | if (capability.width > 640 || capability.height > 480) { |
| 159 | fmts[0] = V4L2_PIX_FMT_MJPEG; |
| 160 | fmts[1] = V4L2_PIX_FMT_YUV420; |
| 161 | fmts[2] = V4L2_PIX_FMT_YUYV; |
| 162 | } else { |
| 163 | fmts[0] = V4L2_PIX_FMT_YUV420; |
| 164 | fmts[1] = V4L2_PIX_FMT_YUYV; |
| 165 | fmts[2] = V4L2_PIX_FMT_MJPEG; |
| 166 | } |
| 167 | |
| 168 | struct v4l2_format video_fmt; |
| 169 | memset(&video_fmt, 0, sizeof(struct v4l2_format)); |
| 170 | video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 171 | video_fmt.fmt.pix.sizeimage = 0; |
| 172 | video_fmt.fmt.pix.width = capability.width; |
| 173 | video_fmt.fmt.pix.height = capability.height; |
| 174 | |
| 175 | bool formatMatch = false; |
| 176 | for (int i = 0; i < nFormats; i++) |
| 177 | { |
| 178 | video_fmt.fmt.pix.pixelformat = fmts[i]; |
| 179 | if (ioctl(_deviceFd, VIDIOC_TRY_FMT, &video_fmt) < 0) |
| 180 | { |
| 181 | continue; |
| 182 | } |
| 183 | else |
| 184 | { |
| 185 | formatMatch = true; |
| 186 | break; |
| 187 | } |
| 188 | } |
| 189 | if (!formatMatch) |
| 190 | { |
| 191 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 192 | "no supporting video formats found"); |
| 193 | return -1; |
| 194 | } |
| 195 | |
| 196 | if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) |
| 197 | _captureVideoType = kVideoYUY2; |
| 198 | else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) |
| 199 | _captureVideoType = kVideoI420; |
| 200 | else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) |
| 201 | _captureVideoType = kVideoMJPEG; |
| 202 | |
| 203 | //set format and frame size now |
| 204 | if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) |
| 205 | { |
| 206 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 207 | "error in VIDIOC_S_FMT, errno = %d", errno); |
| 208 | return -1; |
| 209 | } |
| 210 | |
| 211 | // initialize current width and height |
| 212 | _currentWidth = video_fmt.fmt.pix.width; |
| 213 | _currentHeight = video_fmt.fmt.pix.height; |
| 214 | _captureDelay = 120; |
| 215 | |
| 216 | // Trying to set frame rate, before check driver capability. |
| 217 | bool driver_framerate_support = true; |
| 218 | struct v4l2_streamparm streamparms; |
| 219 | memset(&streamparms, 0, sizeof(streamparms)); |
| 220 | streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 221 | if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { |
| 222 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 223 | "error in VIDIOC_G_PARM errno = %d", errno); |
| 224 | driver_framerate_support = false; |
| 225 | // continue |
| 226 | } else { |
| 227 | // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. |
| 228 | if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) { |
| 229 | // driver supports the feature. Set required framerate. |
| 230 | memset(&streamparms, 0, sizeof(streamparms)); |
| 231 | streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 232 | streamparms.parm.capture.timeperframe.numerator = 1; |
| 233 | streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; |
| 234 | if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { |
| 235 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 236 | "Failed to set the framerate. errno=%d", errno); |
| 237 | driver_framerate_support = false; |
| 238 | } else { |
| 239 | _currentFrameRate = capability.maxFPS; |
| 240 | } |
| 241 | } |
| 242 | } |
| 243 | // If driver doesn't support framerate control, need to hardcode. |
| 244 | // Hardcoding the value based on the frame size. |
| 245 | if (!driver_framerate_support) { |
| 246 | if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) { |
| 247 | _currentFrameRate = 15; |
| 248 | } else { |
| 249 | _currentFrameRate = 30; |
| 250 | } |
| 251 | } |
| 252 | |
| 253 | if (!AllocateVideoBuffers()) |
| 254 | { |
| 255 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 256 | "failed to allocate video capture buffers"); |
| 257 | return -1; |
| 258 | } |
| 259 | |
| 260 | //start capture thread; |
| 261 | if (!_captureThread) |
| 262 | { |
| 263 | _captureThread = ThreadWrapper::CreateThread( |
| 264 | VideoCaptureModuleV4L2::CaptureThread, this, kHighPriority); |
| 265 | unsigned int id; |
| 266 | _captureThread->Start(id); |
| 267 | } |
| 268 | |
| 269 | // Needed to start UVC camera - from the uvcview application |
| 270 | enum v4l2_buf_type type; |
| 271 | type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 272 | if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) |
| 273 | { |
| 274 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 275 | "Failed to turn on stream"); |
| 276 | return -1; |
| 277 | } |
| 278 | |
| 279 | _captureStarted = true; |
| 280 | return 0; |
| 281 | } |
| 282 | |
| 283 | WebRtc_Word32 VideoCaptureModuleV4L2::StopCapture() |
| 284 | { |
| 285 | if (_captureThread) { |
| 286 | // Make sure the capture thread stop stop using the critsect. |
| 287 | _captureThread->SetNotAlive(); |
| 288 | if (_captureThread->Stop()) { |
| 289 | delete _captureThread; |
| 290 | _captureThread = NULL; |
| 291 | } else |
| 292 | { |
| 293 | // Couldn't stop the thread, leak instead of crash. |
| 294 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, |
| 295 | "%s: could not stop capture thread", __FUNCTION__); |
| 296 | assert(!"could not stop capture thread"); |
| 297 | } |
| 298 | } |
| 299 | |
| 300 | CriticalSectionScoped cs(_captureCritSect); |
| 301 | if (_captureStarted) |
| 302 | { |
| 303 | _captureStarted = false; |
| 304 | _captureThread = NULL; |
| 305 | |
| 306 | DeAllocateVideoBuffers(); |
| 307 | close(_deviceFd); |
| 308 | _deviceFd = -1; |
| 309 | } |
| 310 | |
| 311 | return 0; |
| 312 | } |
| 313 | |
| 314 | //critical section protected by the caller |
| 315 | |
| 316 | bool VideoCaptureModuleV4L2::AllocateVideoBuffers() |
| 317 | { |
| 318 | struct v4l2_requestbuffers rbuffer; |
| 319 | memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); |
| 320 | |
| 321 | rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 322 | rbuffer.memory = V4L2_MEMORY_MMAP; |
| 323 | rbuffer.count = kNoOfV4L2Bufffers; |
| 324 | |
| 325 | if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) |
| 326 | { |
| 327 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 328 | "Could not get buffers from device. errno = %d", errno); |
| 329 | return false; |
| 330 | } |
| 331 | |
| 332 | if (rbuffer.count > kNoOfV4L2Bufffers) |
| 333 | rbuffer.count = kNoOfV4L2Bufffers; |
| 334 | |
| 335 | _buffersAllocatedByDevice = rbuffer.count; |
| 336 | |
| 337 | //Map the buffers |
| 338 | _pool = new Buffer[rbuffer.count]; |
| 339 | |
| 340 | for (unsigned int i = 0; i < rbuffer.count; i++) |
| 341 | { |
| 342 | struct v4l2_buffer buffer; |
| 343 | memset(&buffer, 0, sizeof(v4l2_buffer)); |
| 344 | buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 345 | buffer.memory = V4L2_MEMORY_MMAP; |
| 346 | buffer.index = i; |
| 347 | |
| 348 | if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) |
| 349 | { |
| 350 | return false; |
| 351 | } |
| 352 | |
| 353 | _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, |
| 354 | _deviceFd, buffer.m.offset); |
| 355 | |
| 356 | if (MAP_FAILED == _pool[i].start) |
| 357 | { |
| 358 | for (unsigned int j = 0; j < i; j++) |
| 359 | munmap(_pool[j].start, _pool[j].length); |
| 360 | return false; |
| 361 | } |
| 362 | |
| 363 | _pool[i].length = buffer.length; |
| 364 | |
| 365 | if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) |
| 366 | { |
| 367 | return false; |
| 368 | } |
| 369 | } |
| 370 | return true; |
| 371 | } |
| 372 | |
| 373 | bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() |
| 374 | { |
| 375 | // unmap buffers |
| 376 | for (int i = 0; i < _buffersAllocatedByDevice; i++) |
| 377 | munmap(_pool[i].start, _pool[i].length); |
| 378 | |
| 379 | delete[] _pool; |
| 380 | |
| 381 | // turn off stream |
| 382 | enum v4l2_buf_type type; |
| 383 | type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 384 | if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) |
| 385 | { |
| 386 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 387 | "VIDIOC_STREAMOFF error. errno: %d", errno); |
| 388 | } |
| 389 | |
| 390 | return true; |
| 391 | } |
| 392 | |
| 393 | bool VideoCaptureModuleV4L2::CaptureStarted() |
| 394 | { |
| 395 | return _captureStarted; |
| 396 | } |
| 397 | |
| 398 | bool VideoCaptureModuleV4L2::CaptureThread(void* obj) |
| 399 | { |
| 400 | return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess(); |
| 401 | } |
| 402 | bool VideoCaptureModuleV4L2::CaptureProcess() |
| 403 | { |
| 404 | int retVal = 0; |
| 405 | fd_set rSet; |
| 406 | struct timeval timeout; |
| 407 | |
| 408 | _captureCritSect->Enter(); |
| 409 | |
| 410 | FD_ZERO(&rSet); |
| 411 | FD_SET(_deviceFd, &rSet); |
| 412 | timeout.tv_sec = 1; |
| 413 | timeout.tv_usec = 0; |
| 414 | |
| 415 | retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout); |
| 416 | if (retVal < 0 && errno != EINTR) // continue if interrupted |
| 417 | { |
| 418 | // select failed |
| 419 | _captureCritSect->Leave(); |
| 420 | return false; |
| 421 | } |
| 422 | else if (retVal == 0) |
| 423 | { |
| 424 | // select timed out |
| 425 | _captureCritSect->Leave(); |
| 426 | return true; |
| 427 | } |
| 428 | else if (!FD_ISSET(_deviceFd, &rSet)) |
| 429 | { |
| 430 | // not event on camera handle |
| 431 | _captureCritSect->Leave(); |
| 432 | return true; |
| 433 | } |
| 434 | |
| 435 | if (_captureStarted) |
| 436 | { |
| 437 | struct v4l2_buffer buf; |
| 438 | memset(&buf, 0, sizeof(struct v4l2_buffer)); |
| 439 | buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 440 | buf.memory = V4L2_MEMORY_MMAP; |
| 441 | // dequeue a buffer - repeat until dequeued properly! |
| 442 | while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) |
| 443 | { |
| 444 | if (errno != EINTR) |
| 445 | { |
| 446 | WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, |
| 447 | "could not sync on a buffer on device %s", strerror(errno)); |
| 448 | _captureCritSect->Leave(); |
| 449 | return true; |
| 450 | } |
| 451 | } |
| 452 | VideoCaptureCapability frameInfo; |
| 453 | frameInfo.width = _currentWidth; |
| 454 | frameInfo.height = _currentHeight; |
| 455 | frameInfo.rawType = _captureVideoType; |
| 456 | |
| 457 | // convert to to I420 if needed |
| 458 | IncomingFrame((unsigned char*) _pool[buf.index].start, |
| 459 | buf.bytesused, frameInfo); |
| 460 | // enqueue the buffer again |
| 461 | if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) |
| 462 | { |
| 463 | WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, |
| 464 | "Failed to enqueue capture buffer"); |
| 465 | } |
| 466 | } |
| 467 | _captureCritSect->Leave(); |
| 468 | usleep(0); |
| 469 | return true; |
| 470 | } |
| 471 | |
| 472 | WebRtc_Word32 VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings) |
| 473 | { |
| 474 | settings.width = _currentWidth; |
| 475 | settings.height = _currentHeight; |
| 476 | settings.maxFPS = _currentFrameRate; |
| 477 | settings.rawType=_captureVideoType; |
| 478 | |
| 479 | return 0; |
| 480 | } |
| 481 | } // namespace videocapturemodule |
| 482 | } // namespace webrtc |