示例#1
0
bool UndistortDriver::Capture( pb::CameraMsg& vImages )
{
  m_InMsg.Clear();
  const bool success = m_Input->Capture( m_InMsg );

  // Sanity check.
  if (static_cast<size_t>(m_InMsg.image_size()) > m_CamModel.size()) {
    fprintf(stderr, "HAL: Error! Expecting %d images but captured %d\n",
             static_cast<int>(m_CamModel.size()), m_InMsg.image_size());
    return false;
  }

  if(success) {
    for (int ii = 0; ii < m_InMsg.image_size(); ++ii) {
      pb::Image inimg = pb::Image(m_InMsg.image(ii));
      pb::ImageMsg* pimg = vImages.add_image();
      pimg->set_width(inimg.Width());
      pimg->set_height(inimg.Height());
      pimg->set_type( (pb::Type)inimg.Type());
      pimg->set_format( (pb::Format)inimg.Format());
      pimg->mutable_data()->resize(inimg.Width()*inimg.Height());

      pb::Image img = pb::Image(*pimg);
      calibu::Rectify(
            m_vLuts[ii], inimg.data(),
            reinterpret_cast<unsigned char*>(&pimg->mutable_data()->front()),
            img.Width(), img.Height());
    }
  }

  return success;
}
示例#2
0
///////////////////////////////////////////////////////////////////////////////
// Consumer
bool ToyotaReaderDriver::Capture( pb::CameraMsg& vImages ) {

    if( m_qImageBuffer.size() == 0 && !m_bLoop) {
        return false;
    }

    boost::mutex::scoped_lock lock(m_Mutex);

    // Wait until the buffer has data to read
    while( m_qImageBuffer.size() == 0 ) {
        m_cBufferEmpty.wait(lock);
    }

    //***************************************************
    // consume from buffer
    //***************************************************

    // now fetch the next set of images from buffer
    vImages = m_qImageBuffer.front();

    // remove image from queue
    m_qImageBuffer.pop();

    //***************************************************
    // send notification that the buffer has space
    m_cBufferFull.notify_one();

    // sanity check
    // TODO: fix _Read so that if timestamps differ, then "align" reads
    const unsigned int nNumImgs = vImages.image_size();

    double dTime = vImages.image(0).timestamp();
    for( unsigned int ii = 1; ii < nNumImgs; ++ii ) {
        if( dTime != vImages.image(1).timestamp() ) {
            std::cerr << "error: Timestamps do not match!!!" << std::endl;
            return false;
        }
    }


    return true;
}
示例#3
0
bool SplitDriver::Capture( pb::CameraMsg& vImages )
{
    m_InMsg.Clear();
    if( m_Input->Capture( m_InMsg ) == false ) {
        return false;
    }

    if( m_InMsg.image_size() > 1 ) {
        std::cerr << "error: Split is expecting 1 image but instead got " << m_InMsg.image_size() << "." << std::endl;
        return false;
    }

    const pb::ImageMsg& InImg = m_InMsg.image(0);

    for( unsigned int ii = 0; ii < m_vROIs.size(); ++ii ) {

        pb::ImageMsg* pImg = vImages.add_image();

        pImg->set_format( InImg.format() );
        pImg->set_type( InImg.type() );
        pImg->set_timestamp( InImg.timestamp() );

        const unsigned int nChannels = InImg.format() == pb::PB_LUMINANCE ? 1 : 3;
        unsigned int nDepth = 1;
        if( InImg.type() == pb::PB_SHORT || InImg.type() == pb::PB_UNSIGNED_SHORT ) {
            nDepth = 2;
        } else if( InImg.type() == pb::PB_INT || InImg.type() == pb::PB_UNSIGNED_INT ) {
            nDepth = 4;
        } else if( InImg.type() == pb::PB_FLOAT ) {
            nDepth = 4;
        } else if( InImg.type() == pb::PB_DOUBLE ) {
            nDepth = 8;
        }

        const unsigned int nBytesPerPixel = nChannels * nDepth;

        const ImageRoi& ROI = m_vROIs[ii];
        const unsigned int nBytesPerRow = ROI.w * nBytesPerPixel;

        pImg->set_width( ROI.w );
        pImg->set_height( ROI.h );
        pImg->mutable_data()->resize( ROI.h * nBytesPerRow );

        unsigned char* pS = (unsigned char*)&InImg.data().front();
        unsigned char* pD = (unsigned char*)&pImg->mutable_data()->front();
        for( unsigned int ii = 0; ii < ROI.h; ++ii ) {
            memcpy( pD, pS + (InImg.width()*(ii+ROI.y)) + ROI.x, nBytesPerRow);
            pD = pD + ROI.w;
        }
    }

    return true;
}
示例#4
0
文件: RealSense.cpp 项目: areslp/HAL
bool RealSenseDriver::Capture( pb::CameraMsg& vImages )
{
  /*use a two-vector, where the first image is RGB, second is gray16 for depth or gray8 for IR */
    vImages.Clear();

    uvc_frame_t* frameRGB = NULL;
    uvc_frame_t* frameDepth = NULL;
    uvc_frame_t *rgb; //to hold the YUV->RGB conversion
    uvc_error_t err;
    pb::ImageMsg* pimg;
    uint64_t scrDepth, scrRGB;

    scrDepth = 0;
    scrRGB = 0;
    
    /* Try to sync up the streams
       Based on the SCR value that is shared between the two cameras, it looks like 
       RGB leads (comes before) IR by about 21475451171 units (roughly) between frames
    
       Strategy: Make sure the difference between SCR values for each frame is less than
       frameSyncLimit units.

       Also, depth takes some time to actually start publishing - keep reading RGB until we get
       a good pair

    */

    //Two functions: advanceDepth, advanceRGB
    //Pick up an RGB, wait for a depth, see if diff > synclimit
    //yes? Repeat with new RGB
    //no? Stash both into protobuf and continue

    
    /*Pick up the depth image */
    int gotPair = 0;
    int needRGB = 1;
    int needDepth = 1;
    int advRGB = 0;
    
    int64_t frameSync;
    
    while (!gotPair)
      {
	while (needRGB)
	  {
	    err = getFrame(streamh_rgb, &frameRGB);
	   
	    if ((err == UVC_SUCCESS) && (frameRGB != 0))
	      {
		//Calc the sync
		memcpy(&scrRGB, &frameRGB->capture_time, sizeof(scrRGB));
		needRGB = 0;
	      }
	    usleep(1);
	  }

	//Pick up a Depth
	while (needDepth)
	  {
	    err = getFrame(streamh_d, &frameDepth);
	    if ((err == UVC_SUCCESS) && (frameDepth != 0))
	      {
		//Calc the sync
		memcpy(&scrDepth, &frameDepth->capture_time, sizeof(scrDepth));
		needDepth = 0;
	      }
	    usleep(1);
	  }
	
	frameSync = diffSCR(scrDepth, scrRGB);


	if (!useSync)
	  {
	    //Don't care about the sync, just return the images
	    
	    gotPair = 1;
	    break;
	  }
	
	std::cout << "Sync?: R:" << scrRGB << " D: " << scrDepth << " diffSCR: " << frameSync;
		
	if (frameSync > frameSyncLimitPos)
	  {
	    //Depth is ahead of RGB, advance the RGB stream
	    advRGB++;
	    if (advRGB > 3)
	      {
		//restart, the rgb is out of whack
		needDepth = 1;
		std::cout << " Restarting acq sequence" << std::endl;
	      }
	    else
	      {
		std::cout << " Advancing RGB stream" << std::endl;
	      }
	    needRGB = 1;
	    continue;
	  }
	else if (frameSync < frameSyncLimitNeg)
	  {
	    // depth after RGB by too much, start over
	    needRGB = 1;
	    needDepth = 1;
	    std::cout << " Bad sync, starting over" << std::endl;
	    advRGB = 0;
	    continue;
	  }
	else
	  {
	    //Within limits, exit the loop
	    std::cout << " Good sync" << std::endl;
	    gotPair = 1;
	  }
      }
     
    if(frameRGB)
      {
	pimg = vImages.add_image();
	pimg->set_type( (pb::Type) pbtype_rgb );
	pimg->set_format( (pb::Format) pbformat_rgb );            
	pimg->set_width(frameRGB->width);
	pimg->set_height(frameRGB->height);
	rgb = uvc_allocate_frame(frameRGB->width * frameRGB->height * 3);
	uvc_any2rgb(frameRGB, rgb);
	//Convert the YUYV to RGB
	pimg->set_data(rgb->data, width_ * height_ * 3); //RGB uint8_t
	    
	uvc_free_frame(rgb);
      }
    
    if(frameDepth)
      {
	pimg = vImages.add_image();
	pimg->set_type( (pb::Type) pbtype_d );
	pimg->set_format( (pb::Format) pbformat_d );            
	pimg->set_width(frameDepth->width);
	pimg->set_height(frameDepth->height);
	    

	if (useIR)
	  {
	    pimg->set_data(frameDepth->data, width_ * height_); //gray uint8_t
	  }
	else
	  {
	    pimg->set_data(frameDepth->data, width_ * height_ * 2); //gray uint16_t
	  }
      }

    
    return true;
}