Esempio n. 1
0
bool AVTDShowGraph::buildCaptureGraph(IBaseFilter * pSrcFilter, int theIndex) {
    HRESULT hr = CoCreateInstance (CLSID_CaptureGraphBuilder2 , NULL, CLSCTX_INPROC,
        IID_ICaptureGraphBuilder2, (void **) &m_pCaptureGraphBuilder2);
    if (FAILED(hr)) {
        checkForDShowError(hr, "CoCreateInstance(m_pCaptureGraphBuilder2) failed", PLUS_FILE_LINE);
        return false;
    }
    // Attach the filter graph to the capture graph

    hr = m_pCaptureGraphBuilder2->SetFiltergraph(m_pGraphBuilder);
    if (FAILED(hr)) {
        checkForDShowError(hr, "SetFiltergraph(m_pGraphBuilder) failed", PLUS_FILE_LINE);
        return false;
    }

    if (pSrcFilter == 0) {
        // Use the system device enumerator and class enumerator to find
        // a video capture/preview device, such as a desktop USB video camera.
        hr = findCaptureDevice(&pSrcFilter, theIndex);
        if (FAILED(hr)) {
            checkForDShowError(hr, "findCaptureDevice failed", PLUS_FILE_LINE);
            return false;
        }

    }
    m_pSrcFilter = pSrcFilter;
    
    setCameraParams(_myAVTCameraIndex);

    // Add Capture filter to our graph.
    hr = m_pGraphBuilder->AddFilter(pSrcFilter, L"Video Capture");
    if (FAILED(hr)) {
        checkForDShowError(hr, "AddFilter(\"Video Capture\") failed", PLUS_FILE_LINE);
        return false;
    }

    setAnalogVideoFormat();

    addExtraFilters();

    hr = selectVideoFormat();

    if (FAILED(hr)) {
        checkForDShowError(hr, "selectVideoFormat() failed", PLUS_FILE_LINE);
        //return false;  // unable to build graph
    }


    // Connect the extra filters into the graph
    IPin * pOut = get_pin( m_pSrcFilter, PINDIR_OUTPUT );
    IPin * pIn  = get_pin( m_pGrabFilter, PINDIR_INPUT);
    hr = m_pGraphBuilder->Connect(pOut, pIn);
    SafeRelease(pOut);
    SafeRelease(pIn);
    if (FAILED(hr))    {
        checkForDShowError(hr, "m_pGraphBuilder->Connect() failed", PLUS_FILE_LINE);
        return false;
    }

    pOut = get_pin(m_pGrabFilter, PINDIR_OUTPUT);
    hr = m_pGraphBuilder->Render( pOut );
    SafeRelease(pOut);
    if (FAILED(hr))    {
        checkForDShowError(hr, "m_pGraphBuilder->Render() failed", PLUS_FILE_LINE);
    }
    return true;
}
Esempio n. 2
0
bool NaoCamera::initializeCamera(int & cameraFd,
                                 char const * cameraDevice,
                                 Buffer ** buffers,
                                 int & numBuffers,
                                 int & currentBuffer,
                                 int inputId,
                                 bool isBottomCamera) {
//  struct v4l2_capability cap;
  struct v4l2_format format;
  struct v4l2_streamparm streamparm;
  struct v4l2_requestbuffers reqbuf;
  struct v4l2_buffer buffer;
//  char formatName[5];
  int i;

  errno = 0;

  std::cout << "= Opening video device ..................";
  cameraFd = open(cameraDevice, O_RDWR);
  if (!cameraFd) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to open video device.");
    return true;
  }
  std::cout << ".. DONE =" << std::endl;

//  std::cout << "= Setting initial camera params .........";
//  if (setInitialCameraParams(cameraFd, isBottomCamera)) {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Failed to set initial camera params.");
//    return true;
//  }
//  std::cout << ".. DONE =" << std::endl;
//
//  std::cout << "= Closing and re-opening video device ...";
//  close(cameraFd);
//  cameraFd = open(cameraDevice, O_RDWR);
//  if (!cameraFd) {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Failed to re-open video device.");
//    return true;
//  }
//  std::cout << ".. DONE =" << std::endl;

//  std::cout << "= Setting input .........................";
//  int numInput = inputId;
//  if (ioctl(cameraFd, VIDIOC_S_INPUT, &numInput) < 0) {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Failed to set input.");
//    return true;
//  }
//  std::cout << ".. DONE =" << std::endl;
//
//  std::cout << "= Querying capabilities .................";
//  if (ioctl(cameraFd, VIDIOC_QUERYCAP, &cap) < 0) {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Failed to query capabilities.");
//    return true;
//  }
//  if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Video device cannot capture.");
//    return true;
//  }
//  std::cout << ".. DONE =" << std::endl;

  std::cout << "= Setting resolution ....................";
  memset(&format, 0, sizeof(struct v4l2_format));
  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  if (ioctl(cameraFd, VIDIOC_G_FMT, &format) < 0)
  {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to get format (1).");
    return true;
  }
  format.fmt.pix.width = IMAGE_WIDTH;
  format.fmt.pix.height = IMAGE_HEIGHT;
  format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
  format.fmt.pix.field = V4L2_FIELD_NONE;
//  format.fmt.pix.bytesperline = format.fmt.pix.width * 2;
  if (ioctl(cameraFd, VIDIOC_S_FMT, &format) < 0)
  {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to set format.");
    return true;
  }
  if (format.fmt.pix.sizeimage != (unsigned int)IMAGE_SIZE) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Image size does not match.");
    return true;
  }
//  // Make sure the image format was set correctly
//  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//  if (ioctl(cameraFd, VIDIOC_G_FMT, &format) < 0)
//  {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Failed to get format (2).");
//    return true;
//  }
//  int width = format.fmt.pix.width;
//  int height = format.fmt.pix.height;
//  int *formatNamePtr = (int *)formatName;
//  *formatNamePtr = format.fmt.pix.pixelformat;
//  formatName[4] = 0;
//  if ((width != IMAGE_WIDTH) || (height != IMAGE_HEIGHT) || (strcmp(formatName, "YUYV") != 0)) {
//    std::cout << " FAILED =" << std::endl;
//    LOG_ERROR("Error: camera acquiring image of size %dx%d in format %s.\n", width, height, formatName);
//  }
  std::cout << ".. DONE =" << std::endl;

  std::cout << "= Setting frame rate ....................";
  memset(&streamparm, 0, sizeof(struct v4l2_streamparm));
  streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  if (ioctl(cameraFd, VIDIOC_G_PARM, &streamparm) != 0) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to get stream parameters.");
    return true;
  }

  streamparm.parm.capture.timeperframe.numerator = 1;
  streamparm.parm.capture.timeperframe.denominator = FRAMES_PER_SECOND;
//  streamparm.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
  if (ioctl(cameraFd, VIDIOC_S_PARM, &streamparm) != 0) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to set frame rate.");
    return true;
  }
  std::cout << ".. DONE =" << std::endl;

  std::cout << "= Setting up buffers ....................";
  memset(&reqbuf, 0, sizeof (reqbuf));
  reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  reqbuf.memory = V4L2_MEMORY_MMAP;
  reqbuf.count = NUM_FRAME_BUFFERS;

  if (ioctl(cameraFd, VIDIOC_REQBUFS, &reqbuf) < 0) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to set buffer request mode.");
    return true;
  }
  numBuffers = reqbuf.count;

  *buffers = (Buffer*)calloc(numBuffers, sizeof(Buffer));
  if (*buffers == NULL) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to allocate memory for buffers.");
    return true;
  }

  for (i = 0; i < numBuffers; i++) {
    memset(&buffer, 0, sizeof(buffer));
    buffer.type = reqbuf.type;
    buffer.memory = V4L2_MEMORY_MMAP;
    buffer.index = i;
    if (ioctl(cameraFd, VIDIOC_QUERYBUF, &buffer) < 0) {
      std::cout << " FAILED =" << std::endl;
      LOG_ERROR("Error in VIDIOC_QUERYBUF.");
      return true;
    }
    (*buffers)[i].length = buffer.length;               /* remember for munmap() */
    (*buffers)[i].start  = mmap(NULL, buffer.length,
                             PROT_READ | PROT_WRITE, /* recommended */
                             MAP_SHARED,             /* recommended */
                             cameraFd, buffer.m.offset);
    if (MAP_FAILED == (*buffers)[i].start) {
      /* If you do not exit here you should unmap() and free()
       * the buffers mapped so far.                            */
      std::cout << " FAILED =" << std::endl;
      LOG_ERROR("Error in mmap for buffer %d.\n", i);
      for (int j = 0; j < i; j++) {
        munmap((*buffers)[j].start, (*buffers)[j].length);
      }
      return true;
    }
  }

  currentBuffer = 0;
  // Do we need this section?
  for (i = 0; i < numBuffers; i++) {
    memset(&buffer, 0, sizeof(buffer));
    buffer.memory = V4L2_MEMORY_MMAP;
    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buffer.length = (*buffers)[i].length;
    buffer.index = i;
  }

  std::cout << ".. DONE =" << std::endl;

//  queryCameraParams(cameraFd);

  std::cout << "= Setting camera params .................";
  if (setCameraParams(cameraFd, isBottomCamera)) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to set camera params.");
    return true;
  }
  std::cout << ".. DONE =" << std::endl;

  std::cout << "= Start streaming .......................";
  i = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  if (ioctl(cameraFd, VIDIOC_STREAMON, &i) < 0) {
    std::cout << " FAILED =" << std::endl;
    LOG_ERROR("Failed to begin streaming.");
    return true;
  }
  std::cout << ".. DONE =" << std::endl;

  return false;
}