示例#1
0
 virtual bool threadInit() override
 {
     p.open("/readWriteTest_writer");
     image.resize(100, 100);
     image.zero();
     return true;
 }
示例#2
0
void QGLVideo::paintImage(yarp::sig::ImageOf<yarp::sig::PixelRgb> &img){

	int wWidth = this->width();
	int wHeight = this->height();
	
	int width = img.width();
	int height = img.height();

	if(width == 0 || height == 0){
		return;
	}

	if (!(wWidth == _yrpImgCache.width() || wHeight == _yrpImgCache.height())) { 
		double ratioWindow = (double)wWidth/(double)wHeight;
		double ratioImage = (double)width/(double)height;
		_yrpImgCache.setQuantum(1);
		_yrpImgCache.setTopIsLowIndex(false);	
		if(ratioWindow > ratioImage){
			// need to stretch height
			_yrpImgCache.resize((int)(((double)width)*((double)wHeight/(double)height)), wHeight); 
		}
		else{
			// need to stretch width
			_yrpImgCache.resize(wWidth, (int)(((double)height)*((double)wWidth/(double)width)));
		}		
		_yrpImgCache.zero();
	}

    _yrpImgCache.copy(img, _yrpImgCache.width(), _yrpImgCache.height()); // scale input image (if required)

	_pixData = (unsigned char*)_yrpImgCache.getRawImage();

	this->updateGL();
}
示例#3
0
bool USBCameraDriverRgb::getImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& image)
{
    if( (image.width() != _width) || (image.height() != _height) )
        image.resize(_width, _height);
    deviceRgb->getRgbBuffer(image.getRawImage());
    return true;
}
示例#4
0
bool MapGrid2D::setOccupancyGrid(yarp::sig::ImageOf<yarp::sig::PixelMono>& image)
{
    if (image.width() != m_width ||
        image.height() != m_height)
    {
        yError() << "The size of given occupancy grid does not correspond to the current map. Use method setSize() first.";
        return false;
    }
    m_map_occupancy = image;
    return true;
}
示例#5
0
文件: main.cpp 项目: robotology/yarp
void copyImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& src,
               Magick::Image& dest) {
    int h = src.height();
    int w = src.width();
    dest.size(Magick::Geometry(w,h));
    dest.depth(8);
    for (int i=0; i<h; i++) {
        // must transfer row by row, since YARP may use padding in representation
        Magick::PixelPacket *packet = dest.setPixels(0,i,w,1);
        dest.readPixels(Magick::RGBQuantum,(unsigned char *)(&src.pixel(0,i)));
    }
    dest.syncPixels();
}
示例#6
0
bool MapGrid2D::getMapImage(yarp::sig::ImageOf<PixelRgb>& image) const
{
    image.setQuantum(1);
    image.resize(m_width, m_height);
    image.zero();
    for (size_t y = 0; y < m_height; y++)
    {
        for (size_t x = 0; x < m_width; x++)
        {
            image.safePixel(x, y) = CellDataToPixel(m_map_flags.safePixel(x, y));
        }
    }
    return true;
}
示例#7
0
文件: main.cpp 项目: robotology/yarp
void copyImage(Magick::Image& src,
               yarp::sig::ImageOf<yarp::sig::PixelRgb>& dest) {
    Magick::Geometry g = src.size();
    int h = g.height();
    int w = g.width();
    src.depth(8);
    dest.resize(w,h);
    for (int i=0; i<h; i++) {
        // must transfer row by row, since YARP may use padding in representation
        Magick::PixelPacket *packet = src.getPixels(0,i,w,1);
        src.writePixels(Magick::RGBQuantum,(unsigned char *)(&dest.pixel(0,i)));
    }
    src.syncPixels();
}
示例#8
0
void TrackerPool::display(yarp::sig::ImageOf<yarp::sig::PixelRgb> &img){
  for(int ii=0; ii<trackers_.size(); ii++){
    if(trackers_[ii].is_active()){
      //yarp::sig::PixelRgb color = trackers_[ii].is_active()?yarp::sig::PixelRgb(0, 0, 255):yarp::sig::PixelRgb(0, 255, 0);
      yarp::sig::PixelRgb color = yarp::sig::PixelRgb(0, 0, 255);
      trackers_[ii].display(img, color);
    }
  }

  std::vector<int> to_delete;
  for(int ii=0; ii<collisions_disp_.size(); ii++){
    for(int jj=-1; jj<1; jj++){
      for(int kk=-1; kk<1; kk++){
        int x = collisions_disp_[ii].x + jj;
        int y = collisions_disp_[ii].y + kk;
        if(x>=0 && x<128 && y>=0 && y<128){
	  img.pixel(y, x) = yarp::sig::PixelRgb(20, 0, 255);
        }
      }
    }
    if(++collisions_disp_[ii].count_disp == 100){
      to_delete.push_back(ii);
    }
  }

  while(to_delete.size()>0){
    int to_del = to_delete.back();
    collisions_disp_.erase(collisions_disp_.begin() + to_del);
    to_delete.pop_back();
  }
}
示例#9
0
bool MapGrid2D::setMapImage(yarp::sig::ImageOf<PixelRgb>& image)
{
    if (image.width() != (int)(m_width) ||
        image.height() != (int)(m_height))
    {
        yError() << "The size of given iamge does not correspond to the current map. Use method setSize() first.";
        return false;
    }
    for (size_t y = 0; y < m_height; y++)
    {
        for (size_t x = 0; x < m_width; x++)
        {
            m_map_flags.safePixel(x, y) = PixelToCellData(image.safePixel(x, y));
        }
    }
    return true;
}
示例#10
0
bool VfwGrabber::getImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& image) {
    icvGrabFrameCAM_VFW(&HELPER(system_resource));
    Image *img = icvRetrieveFrameCAM_VFW(&HELPER(system_resource));
    //printf("image size %d %d\n", img->width(), img->height());
    image.copy(*img);
    _width = img->width();
    _height = img->height();
    return img->width()>0;
}
示例#11
0
bool realsense2Driver::getImage(yarp::sig::ImageOf<yarp::sig::PixelMono>& image)
{
    if (!m_stereoMode)
    {
        yError()<<"realsense2Driver: infrared stereo stream not enabled";
        return false;
    }

    image.resize(width(), height());
    std::lock_guard<std::mutex> guard(m_mutex);
    rs2::frameset data = m_pipeline.wait_for_frames();

    rs2::video_frame frm1 = data.get_infrared_frame(1);
    rs2::video_frame frm2 = data.get_infrared_frame(2);

    int pixCode = pixFormatToCode(frm1.get_profile().format());

    if (pixCode != VOCAB_PIXEL_MONO && pixCode != VOCAB_PIXEL_MONO16)
    {
        yError() << "realsense2Driver: expecting Pixel Format MONO or MONO16";
        return false;
    }

    size_t singleImage_rowSizeByte   =  image.getRowSize()/2;
    unsigned char * pixelLeft     =  (unsigned char*) (frm1.get_data());
    unsigned char * pixelRight    =  (unsigned char*) (frm2.get_data());
    unsigned char * pixelOutLeft  =  image.getRawImage();
    unsigned char * pixelOutRight =  image.getRawImage() + singleImage_rowSizeByte;

    for (size_t h=0; h< image.height(); h++)
    {
        memcpy(pixelOutLeft, pixelLeft, singleImage_rowSizeByte);
        memcpy(pixelOutRight, pixelRight, singleImage_rowSizeByte);
        pixelOutLeft  += 2*singleImage_rowSizeByte;
        pixelOutRight += 2*singleImage_rowSizeByte;
        pixelLeft     += singleImage_rowSizeByte;
        pixelRight    += singleImage_rowSizeByte;
    }
    return true;

}
示例#12
0
文件: YarpCam.cpp 项目: cbm/NaoYARP
bool YarpCam::getImage ( yarp::sig::ImageOf< yarp::sig::PixelRgb >& image ) {

    char* buffer;
    unsigned size;

    if ( !_camera->GetImage ( &buffer, size ) )
        return false;

    image.setExternal(buffer,width(),height());

    return true;

}
bool yarp::dev::GazeboYarpMultiCameraDriver::getImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& _image)
{
    for (unsigned int i = 0; i < m_camera_count; ++i) {
        m_dataMutex[i]->wait();
    }

    if (m_vertical) {
        _image.resize(m_max_width, m_max_height * m_camera_count);
    } else {
        _image.resize(m_max_width * m_camera_count, m_max_height);
    }

    unsigned char *pBuffer = _image.getRawImage();
    memset(pBuffer, 0, _image.getRawImageSize());

    for (unsigned int i = 0; i < m_camera_count; ++i) {

        int rowsize = 3 * m_width[i];
        int rowoffset = i * 3 * m_max_width;
        if (m_vertical) {
            if (m_max_width == m_width[i] && m_max_height == m_height[i]) {
                memcpy(pBuffer + (3 * m_max_width * m_max_height * i), m_imageBuffer[i], m_bufferSize[i]);
            } else {
                for (int r = 0; r < m_height[i]; r++) {
                    memcpy(pBuffer + (3 * m_max_width) * (m_max_height * i + r), m_imageBuffer[i] + rowsize * r, rowsize);
                }
            }
        } else {
            for (int r = 0; r < m_height[i]; r++) {
                memcpy(pBuffer + (3 * m_max_width * m_camera_count * r) + rowoffset, m_imageBuffer[i] + rowsize * r, rowsize);
            }
        }
    }

    if (m_display_time_box) {
        m_counter = (++m_counter % 10);
        for (int c = m_counter*30; c < 30 + m_counter*30; c++) {
            for (int r=0; r<30; r++) {
                for (unsigned int i = 0; i < m_camera_count; ++i) {
                    unsigned char *pixel;
                    if (m_vertical) {
                        pixel = _image.getPixelAddress(m_width[i]-c-1, m_max_height * i + m_height[i]-r-1);
                    } else {
                        pixel = _image.getPixelAddress(m_max_width * i + m_width[i]-c-1, m_height[i]-r-1);
                    }
                    pixel[0] = (m_counter % 2 == 0) ? 255 : 0;
                    pixel[1] = (m_counter % 2 == 0) ? 0 : 255;
                    pixel[2] = 0;
                }
            }
        }
    }

    for (unsigned int i = 0; i < m_camera_count; ++i) {
        m_dataMutex[i]->post();
    }

    return true;
}
示例#14
0
bool BayerCarrier::debayerHalf(yarp::sig::ImageOf<PixelMono>& src,
                               yarp::sig::ImageOf<PixelRgb>& dest) {
    // dc1394 doesn't seem safe for arbitrary data widths
    if (src.width()%8==0) {
        dc1394video_frame_t dc_src;
        dc1394video_frame_t dc_dest;
        setDcImage(src,&dc_src,dcformat);
        setDcImage(dest,&dc_dest,dcformat);
        dc1394_debayer_frames(&dc_src,&dc_dest,DC1394_BAYER_METHOD_DOWNSAMPLE);
        return true;
    }

    if (bayer_method_set && !warned) {
        fprintf(stderr, "Not using dc1394 debayer methods (image width not a multiple of 8)\n");
        warned = true;
    }

    // a safer implementation that doesn't use dc1394
    int w = src.width();
    int h = src.height();
    int wo = dest.width();
    int ho = dest.height();
    int goff1 = 1-goff;
    int roffx = roff?goff:goff1;
    int boff = 1-roff;
    int boffx = boff?goff:goff1;
    for (int yo=0; yo<ho; yo++) {
        for (int xo=0; xo<wo; xo++) {
            PixelRgb& po = dest.pixel(xo,yo);
            int x = xo*2;
            int y = yo*2;
            if (x+1>=w-1 || y+1>=h-1) {
                po = PixelRgb(0,0,0);
                continue;
            }
            po.r = src.pixel(x+roffx,y+roff);
            po.b = src.pixel(x+boffx,y+boff);
            po.g = (PixelMono)(0.5*(src.pixel(x+goff,y)+src.pixel(x+goff1,y+1)));
        }
    }
    return true;
}
示例#15
0
// Inactive trackers are displayed in blue, active trackers in red
void BlobTracker::display(yarp::sig::ImageOf<yarp::sig::PixelRgb> &img, yarp::sig::PixelRgb color){
    
    Mat orig = (IplImage *)img.getIplImage();
    
    double a, b, alpha;
    this->get_ellipse_parameters(a, b, alpha);
    
    a*=k_;
    b*=k_;
    
    alpha = alpha * 180 / M_PI; //convert to degrees for openCV ellipse function
    
    //open CV for drawing ellipse
    
    ellipse(orig, Point(cen_y_,cen_x_), Size(a,b), alpha, 0, 360, Scalar(255,0,0));

    //putText(orig, "act", Point(cen_y_,cen_x_), 1, 1, Scalar(255,0,0));
}
示例#16
0
void InputCallback::onRead(yarp::sig::ImageOf<yarp::sig::PixelRgba> &img)
#endif
{

    uchar *tmpBuf;
    QSize s = (QSize(img.width(),img.height()));
#if QT_VERSION >= 0x050302
    int imgSize = img.getRawImageSize();
#else
    int imgSize = s.width() * s.height() * img.getPixelSize();
#endif

    // Allocate a QVideoFrame
    QVideoFrame frame(imgSize,
              s,
#if QT_VERSION >= 0x050302
              img.getRowSize(),
#else
              s.width() * img.getPixelSize(),
#endif
              QVideoFrame::Format_RGB32);

    // Maps the buffer
    frame.map(QAbstractVideoBuffer::WriteOnly);
    // Takes the ownership of the buffer in write only mode
    tmpBuf = frame.bits();
    unsigned char *rawImg = img.getRawImage();
    //int j = 0;
    // Inverts the planes because Qt Wants an image in RGB format instead of BGR
   /* for(int i=0; i<imgSize; i++){
        tmpBuf[j+2] = rawImg[i];
        i++;
        tmpBuf[j+1] = rawImg[i];
        i++;
        tmpBuf[j] = rawImg[i];
        tmpBuf[j+3] = 0;
        j+=4;
    }*/

#if QT_VERSION >= 0x050302
    memcpy(tmpBuf,rawImg,imgSize);
#else
    for(int x =0; x < s.height(); x++) {
        memcpy(tmpBuf + x * (img.width() * img.getPixelSize()),
               rawImg + x * (img.getRowSize()),
               img.width() * img.getPixelSize());
    }
#endif

    //unmap the buffer
    frame.unmap();
    if(sigHandler){
        sigHandler->sendVideoFrame(frame);
    }

}
示例#17
0
void TestFrameGrabber::createTestImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>&
                                       image) {
    // to test IPreciselyTimed, make timestamps be mysteriously NNN.NNN42
    double t = Time::now();
    t -= ((t*1000)-(int)t)/1000;
    t+= 0.00042;
    stamp.update(t);
    if (background.width()>0) {
        image.copy(background);
    } else {
        image.zero();
        image.resize(w,h);
    }
    switch (mode) {
    case VOCAB_BALL:
        {
            addCircle(image,PixelRgb(0,255,0),bx,by,15);
            addCircle(image,PixelRgb(0,255,255),bx,by,8);
            if (ct%5!=0) {
                rnd *= 65537;
                rnd += 17;
                bx += (rnd%5)-2;
                rnd *= 65537;
                rnd += 17;
                by += (rnd%5)-2;
            } else {
                int dx = w/2 - bx;
                int dy = h/2 - by;
                if (dx>0) { bx++; }
                if (dx<0) { bx--; }
                if (dy>0) { by++; }
                if (dy<0) { by--; }
            }
        }
        break;
    case VOCAB_GRID:
        {
            int ww = image.width();
            int hh = image.height();
            if (ww>1&&hh>1) {
                for (int x=0; x<ww; x++) {
                    for (int y=0; y<hh; y++) {
                        double xx = ((double)x)/(ww-1);
                        double yy = ((double)y)/(hh-1);
                        int r = int(0.5+255*xx);
                        int g = int(0.5+255*yy);
                        bool act = (y==ct);
                        image.pixel(x,y) = PixelRgb(r,g,act*255);
                    }
                }
            }
        }
        break;
    case VOCAB_LINE:
    default:
        {
            for (int i=0; i<image.width(); i++) {
                image.pixel(i,ct).r = 255;
            }
        }
        break;
    case VOCAB_RAND:
        {
            // from Alessandro Scalzo

            static unsigned char r=128,g=128,b=128;
            
            int ww = image.width();
            int hh = image.height();
            
            if (ww>1&&hh>1) {
                for (int x=0; x<ww; x++) {
                    for (int y=0; y<hh; y++) {
                        //r+=(rand()%3)-1;
                        //g+=(rand()%3)-1;
                        //b+=(rand()%3)-1;
                        r += Random::uniform(-1,1);
                        g += Random::uniform(-1,1);
                        b += Random::uniform(-1,1);
                        image.pixel(x,y) = PixelRgb(r,g,b);
                    }
                }
            }
        }
        break;
    case VOCAB_NONE:
        break;
    }
    ct++;
    if (ct>=image.height()) {
        ct = 0;
    }
    if (by>=image.height()) {
        by = image.height()-1;
    }
    if (bx>=image.width()) {
        bx = image.width()-1;
    }
    if (bx<0) bx = 0;
    if (by<0) by = 0;
}
示例#18
0
bool BayerCarrier::debayerFull(yarp::sig::ImageOf<PixelMono>& src,
                               yarp::sig::ImageOf<PixelRgb>& dest) {
    // dc1394 doesn't seem safe for arbitrary data widths
    if (src.width()%8==0) {
        dc1394video_frame_t dc_src;
        dc1394video_frame_t dc_dest;
        setDcImage(src,&dc_src,dcformat);
        setDcImage(dest,&dc_dest,dcformat);
        dc1394_debayer_frames(&dc_src,&dc_dest,
                              (dc1394bayer_method_t)bayer_method);
        return true;
    }

    if (bayer_method_set && !warned) {
        fprintf(stderr, "Not using dc1394 debayer methods (image width not a multiple of 8)\n");
        warned = true;
    }
    int w = dest.width();
    int h = dest.height();
    int goff1 = 1-goff;
    int roffx = roff?goff:goff1;
    int boff = 1-roff;
    int boffx = boff?goff:goff1;
    for (int y=0; y<h; y++) {
        for (int x=0; x<w; x++) {
            PixelRgb& po = dest.pixel(x,y);

            // G
            if ((x+y)%2==goff) {
                po.g = src.pixel(x,y);
            } else {
                float g = 0;
                int ct = 0;
                if (x>0) { g += src.pixel(x-1,y); ct++; }
                if (x<w-1) { g += src.pixel(x+1,y); ct++; }
                if (y>0) { g += src.pixel(x,y-1); ct++; }
                if (y<h-1) { g += src.pixel(x,y+1); ct++; }
                if (ct>0) g /= ct;
                po.g = (int)g;
            }

            // B
            if (y%2==boff && x%2==boffx) {
                po.b = src.pixel(x,y);
            } else if (y%2==boff) {
                float b = 0;
                int ct = 0;
                if (x>0) { b += src.pixel(x-1,y); ct++; }
                if (x<w-1) { b += src.pixel(x+1,y); ct++; }
                if (ct>0) b /= ct;
                po.b = (int)b;
            } else if (x%2==boffx) {
                float b = 0;
                int ct = 0;
                if (y>0) { b += src.pixel(x,y-1); ct++; }
                if (y<h-1) { b += src.pixel(x,y+1); ct++; }
                if (ct>0) b /= ct;
                po.b = (int)b;
            } else {
                float b = 0;
                int ct = 0;
                if (x>0&&y>0) { b += src.pixel(x-1,y-1); ct++; }
                if (x>0&&y<h-1) { b += src.pixel(x-1,y+1); ct++; }
                if (x<w-1&&y>0) { b += src.pixel(x+1,y-1); ct++; }
                if (x<w-1&&y<h-1) { b += src.pixel(x+1,y+1); ct++; }
                if (ct>0) b /= ct;
                po.b = (int)b;
            }

            // R
            if (y%2==roff && x%2==roffx) {
                po.r = src.pixel(x,y);
            } else if (y%2==roff) {
                float r = 0;
                int ct = 0;
                if (x>0) { r += src.pixel(x-1,y); ct++; }
                if (x<w-1) { r += src.pixel(x+1,y); ct++; }
                if (ct>0) r /= ct;
                po.r = (int)r;
            } else if (x%2==roffx) {
                float r = 0;
                int ct = 0;
                if (y>0) { r += src.pixel(x,y-1); ct++; }
                if (y<h-1) { r += src.pixel(x,y+1); ct++; }
                if (ct>0) r /= ct;
                po.r = (int)r;
            } else {
                float r = 0;
                int ct = 0;
                if (x>0&&y>0) { r += src.pixel(x-1,y-1); ct++; }
                if (x>0&&y<h-1) { r += src.pixel(x-1,y+1); ct++; }
                if (x<w-1&&y>0) { r += src.pixel(x+1,y-1); ct++; }
                if (x<w-1&&y<h-1) { r += src.pixel(x+1,y+1); ct++; }
                if (ct>0) r /= ct;
                po.r = (int)r;
            }
        }
    }
    return true;
}
示例#19
0
bool FfmpegGrabber::getAudioVisual(yarp::sig::ImageOf<yarp::sig::PixelRgb>& image,
                                   yarp::sig::Sound& sound) {

    FfmpegHelper& helper = HELPER(system_resource);
    DecoderState& videoDecoder = helper.videoDecoder;
    DecoderState& audioDecoder = helper.audioDecoder;

    bool tryAgain = false;
    bool triedAgain = false;

    do {

        bool gotAudio = false;
        bool gotVideo = false;
        if (startTime<0.5) {
            startTime = Time::now();
        }
        double time_target = 0;
        while(av_read_frame(pFormatCtx, &packet)>=0) {
            // Is this a packet from the video stream?
            DBG printf("frame ");
            bool done = false;
            if (packet.stream_index==videoDecoder.getIndex()) {
                DBG printf("video ");
                done = videoDecoder.getVideo(packet);
                image.resize(1,1);
                if (done) {
                    //printf("got a video frame\n");
                    gotVideo = true;
                }
            } if (packet.stream_index==audioDecoder.getIndex()) {
                DBG printf("audio ");
                done = audioDecoder.getAudio(packet,sound);
                if (done) {
                    //printf("got an audio frame\n");
                    gotAudio = true;
                }
            } else {
                DBG printf("other ");
            }
            AVRational& time_base = pFormatCtx->streams[packet.stream_index]->time_base;
            double rbase = av_q2d(time_base);

            DBG printf(" time=%g ", packet.pts*rbase);
            time_target = packet.pts*rbase;

            av_free_packet(&packet);
            DBG printf(" %d\n", done);
            if (((imageSync?gotVideo:videoDecoder.haveFrame())||!_hasVideo)&&
                ((imageSync?1:gotAudio)||!_hasAudio)) {
                if (_hasVideo) {
                    videoDecoder.getVideo(image);
                } else {
                    image.resize(0,0);
                }
                if (needRateControl) {
                    double now = (Time::now()-startTime)*pace;
                    double delay = time_target-now;
                    if (delay>0) {
                        DBG printf("DELAY %g ", delay);
                        Time::delay(delay);
                    } else {
                        DBG printf("NODELAY %g ", delay);
                    }
                }
                DBG printf("IMAGE size %dx%d  ", image.width(), image.height());
                DBG printf("SOUND size %d\n", sound.getSamples());
                if (!_hasAudio) {
                    sound.resize(0,0);
                }
                return true;
            }
        }

        tryAgain = !triedAgain;

        if (tryAgain) {
            if (!shouldLoop) {
                return false;
            }
            av_seek_frame(pFormatCtx,-1,0,AVSEEK_FLAG_BACKWARD);
            startTime = Time::now();
        }
    } while (tryAgain);

    return false;
}