bool USBCameraDriverRgb::getImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& image) { if( (image.width() != _width) || (image.height() != _height) ) image.resize(_width, _height); deviceRgb->getRgbBuffer(image.getRawImage()); return true; }
void QGLVideo::paintImage(yarp::sig::ImageOf<yarp::sig::PixelRgb> &img){ int wWidth = this->width(); int wHeight = this->height(); int width = img.width(); int height = img.height(); if(width == 0 || height == 0){ return; } if (!(wWidth == _yrpImgCache.width() || wHeight == _yrpImgCache.height())) { double ratioWindow = (double)wWidth/(double)wHeight; double ratioImage = (double)width/(double)height; _yrpImgCache.setQuantum(1); _yrpImgCache.setTopIsLowIndex(false); if(ratioWindow > ratioImage){ // need to stretch height _yrpImgCache.resize((int)(((double)width)*((double)wHeight/(double)height)), wHeight); } else{ // need to stretch width _yrpImgCache.resize(wWidth, (int)(((double)height)*((double)wWidth/(double)width))); } _yrpImgCache.zero(); } _yrpImgCache.copy(img, _yrpImgCache.width(), _yrpImgCache.height()); // scale input image (if required) _pixData = (unsigned char*)_yrpImgCache.getRawImage(); this->updateGL(); }
void InputCallback::onRead(yarp::sig::ImageOf<yarp::sig::PixelRgba> &img) #endif { uchar *tmpBuf; QSize s = (QSize(img.width(),img.height())); #if QT_VERSION >= 0x050302 int imgSize = img.getRawImageSize(); #else int imgSize = s.width() * s.height() * img.getPixelSize(); #endif // Allocate a QVideoFrame QVideoFrame frame(imgSize, s, #if QT_VERSION >= 0x050302 img.getRowSize(), #else s.width() * img.getPixelSize(), #endif QVideoFrame::Format_RGB32); // Maps the buffer frame.map(QAbstractVideoBuffer::WriteOnly); // Takes the ownership of the buffer in write only mode tmpBuf = frame.bits(); unsigned char *rawImg = img.getRawImage(); //int j = 0; // Inverts the planes because Qt Wants an image in RGB format instead of BGR /* for(int i=0; i<imgSize; i++){ tmpBuf[j+2] = rawImg[i]; i++; tmpBuf[j+1] = rawImg[i]; i++; tmpBuf[j] = rawImg[i]; tmpBuf[j+3] = 0; j+=4; }*/ #if QT_VERSION >= 0x050302 memcpy(tmpBuf,rawImg,imgSize); #else for(int x =0; x < s.height(); x++) { memcpy(tmpBuf + x * (img.width() * img.getPixelSize()), rawImg + x * (img.getRowSize()), img.width() * img.getPixelSize()); } #endif //unmap the buffer frame.unmap(); if(sigHandler){ sigHandler->sendVideoFrame(frame); } }
bool BayerCarrier::debayerHalf(yarp::sig::ImageOf<PixelMono>& src, yarp::sig::ImageOf<PixelRgb>& dest) { // dc1394 doesn't seem safe for arbitrary data widths if (src.width()%8==0) { dc1394video_frame_t dc_src; dc1394video_frame_t dc_dest; setDcImage(src,&dc_src,dcformat); setDcImage(dest,&dc_dest,dcformat); dc1394_debayer_frames(&dc_src,&dc_dest,DC1394_BAYER_METHOD_DOWNSAMPLE); return true; } if (bayer_method_set && !warned) { fprintf(stderr, "Not using dc1394 debayer methods (image width not a multiple of 8)\n"); warned = true; } // a safer implementation that doesn't use dc1394 int w = src.width(); int h = src.height(); int wo = dest.width(); int ho = dest.height(); int goff1 = 1-goff; int roffx = roff?goff:goff1; int boff = 1-roff; int boffx = boff?goff:goff1; for (int yo=0; yo<ho; yo++) { for (int xo=0; xo<wo; xo++) { PixelRgb& po = dest.pixel(xo,yo); int x = xo*2; int y = yo*2; if (x+1>=w-1 || y+1>=h-1) { po = PixelRgb(0,0,0); continue; } po.r = src.pixel(x+roffx,y+roff); po.b = src.pixel(x+boffx,y+boff); po.g = (PixelMono)(0.5*(src.pixel(x+goff,y)+src.pixel(x+goff1,y+1))); } } return true; }
bool MapGrid2D::setOccupancyGrid(yarp::sig::ImageOf<yarp::sig::PixelMono>& image) { if (image.width() != m_width || image.height() != m_height) { yError() << "The size of given occupancy grid does not correspond to the current map. Use method setSize() first."; return false; } m_map_occupancy = image; return true; }
void copyImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& src, Magick::Image& dest) { int h = src.height(); int w = src.width(); dest.size(Magick::Geometry(w,h)); dest.depth(8); for (int i=0; i<h; i++) { // must transfer row by row, since YARP may use padding in representation Magick::PixelPacket *packet = dest.setPixels(0,i,w,1); dest.readPixels(Magick::RGBQuantum,(unsigned char *)(&src.pixel(0,i))); } dest.syncPixels(); }
bool MapGrid2D::setMapImage(yarp::sig::ImageOf<PixelRgb>& image) { if (image.width() != (int)(m_width) || image.height() != (int)(m_height)) { yError() << "The size of given iamge does not correspond to the current map. Use method setSize() first."; return false; } for (size_t y = 0; y < m_height; y++) { for (size_t x = 0; x < m_width; x++) { m_map_flags.safePixel(x, y) = PixelToCellData(image.safePixel(x, y)); } } return true; }
bool realsense2Driver::getImage(yarp::sig::ImageOf<yarp::sig::PixelMono>& image) { if (!m_stereoMode) { yError()<<"realsense2Driver: infrared stereo stream not enabled"; return false; } image.resize(width(), height()); std::lock_guard<std::mutex> guard(m_mutex); rs2::frameset data = m_pipeline.wait_for_frames(); rs2::video_frame frm1 = data.get_infrared_frame(1); rs2::video_frame frm2 = data.get_infrared_frame(2); int pixCode = pixFormatToCode(frm1.get_profile().format()); if (pixCode != VOCAB_PIXEL_MONO && pixCode != VOCAB_PIXEL_MONO16) { yError() << "realsense2Driver: expecting Pixel Format MONO or MONO16"; return false; } size_t singleImage_rowSizeByte = image.getRowSize()/2; unsigned char * pixelLeft = (unsigned char*) (frm1.get_data()); unsigned char * pixelRight = (unsigned char*) (frm2.get_data()); unsigned char * pixelOutLeft = image.getRawImage(); unsigned char * pixelOutRight = image.getRawImage() + singleImage_rowSizeByte; for (size_t h=0; h< image.height(); h++) { memcpy(pixelOutLeft, pixelLeft, singleImage_rowSizeByte); memcpy(pixelOutRight, pixelRight, singleImage_rowSizeByte); pixelOutLeft += 2*singleImage_rowSizeByte; pixelOutRight += 2*singleImage_rowSizeByte; pixelLeft += singleImage_rowSizeByte; pixelRight += singleImage_rowSizeByte; } return true; }
bool BayerCarrier::debayerFull(yarp::sig::ImageOf<PixelMono>& src, yarp::sig::ImageOf<PixelRgb>& dest) { // dc1394 doesn't seem safe for arbitrary data widths if (src.width()%8==0) { dc1394video_frame_t dc_src; dc1394video_frame_t dc_dest; setDcImage(src,&dc_src,dcformat); setDcImage(dest,&dc_dest,dcformat); dc1394_debayer_frames(&dc_src,&dc_dest, (dc1394bayer_method_t)bayer_method); return true; } if (bayer_method_set && !warned) { fprintf(stderr, "Not using dc1394 debayer methods (image width not a multiple of 8)\n"); warned = true; } int w = dest.width(); int h = dest.height(); int goff1 = 1-goff; int roffx = roff?goff:goff1; int boff = 1-roff; int boffx = boff?goff:goff1; for (int y=0; y<h; y++) { for (int x=0; x<w; x++) { PixelRgb& po = dest.pixel(x,y); // G if ((x+y)%2==goff) { po.g = src.pixel(x,y); } else { float g = 0; int ct = 0; if (x>0) { g += src.pixel(x-1,y); ct++; } if (x<w-1) { g += src.pixel(x+1,y); ct++; } if (y>0) { g += src.pixel(x,y-1); ct++; } if (y<h-1) { g += src.pixel(x,y+1); ct++; } if (ct>0) g /= ct; po.g = (int)g; } // B if (y%2==boff && x%2==boffx) { po.b = src.pixel(x,y); } else if (y%2==boff) { float b = 0; int ct = 0; if (x>0) { b += src.pixel(x-1,y); ct++; } if (x<w-1) { b += src.pixel(x+1,y); ct++; } if (ct>0) b /= ct; po.b = (int)b; } else if (x%2==boffx) { float b = 0; int ct = 0; if (y>0) { b += src.pixel(x,y-1); ct++; } if (y<h-1) { b += src.pixel(x,y+1); ct++; } if (ct>0) b /= ct; po.b = (int)b; } else { float b = 0; int ct = 0; if (x>0&&y>0) { b += src.pixel(x-1,y-1); ct++; } if (x>0&&y<h-1) { b += src.pixel(x-1,y+1); ct++; } if (x<w-1&&y>0) { b += src.pixel(x+1,y-1); ct++; } if (x<w-1&&y<h-1) { b += src.pixel(x+1,y+1); ct++; } if (ct>0) b /= ct; po.b = (int)b; } // R if (y%2==roff && x%2==roffx) { po.r = src.pixel(x,y); } else if (y%2==roff) { float r = 0; int ct = 0; if (x>0) { r += src.pixel(x-1,y); ct++; } if (x<w-1) { r += src.pixel(x+1,y); ct++; } if (ct>0) r /= ct; po.r = (int)r; } else if (x%2==roffx) { float r = 0; int ct = 0; if (y>0) { r += src.pixel(x,y-1); ct++; } if (y<h-1) { r += src.pixel(x,y+1); ct++; } if (ct>0) r /= ct; po.r = (int)r; } else { float r = 0; int ct = 0; if (x>0&&y>0) { r += src.pixel(x-1,y-1); ct++; } if (x>0&&y<h-1) { r += src.pixel(x-1,y+1); ct++; } if (x<w-1&&y>0) { r += src.pixel(x+1,y-1); ct++; } if (x<w-1&&y<h-1) { r += src.pixel(x+1,y+1); ct++; } if (ct>0) r /= ct; po.r = (int)r; } } } return true; }
bool FfmpegGrabber::getAudioVisual(yarp::sig::ImageOf<yarp::sig::PixelRgb>& image, yarp::sig::Sound& sound) { FfmpegHelper& helper = HELPER(system_resource); DecoderState& videoDecoder = helper.videoDecoder; DecoderState& audioDecoder = helper.audioDecoder; bool tryAgain = false; bool triedAgain = false; do { bool gotAudio = false; bool gotVideo = false; if (startTime<0.5) { startTime = Time::now(); } double time_target = 0; while(av_read_frame(pFormatCtx, &packet)>=0) { // Is this a packet from the video stream? DBG printf("frame "); bool done = false; if (packet.stream_index==videoDecoder.getIndex()) { DBG printf("video "); done = videoDecoder.getVideo(packet); image.resize(1,1); if (done) { //printf("got a video frame\n"); gotVideo = true; } } if (packet.stream_index==audioDecoder.getIndex()) { DBG printf("audio "); done = audioDecoder.getAudio(packet,sound); if (done) { //printf("got an audio frame\n"); gotAudio = true; } } else { DBG printf("other "); } AVRational& time_base = pFormatCtx->streams[packet.stream_index]->time_base; double rbase = av_q2d(time_base); DBG printf(" time=%g ", packet.pts*rbase); time_target = packet.pts*rbase; av_free_packet(&packet); DBG printf(" %d\n", done); if (((imageSync?gotVideo:videoDecoder.haveFrame())||!_hasVideo)&& ((imageSync?1:gotAudio)||!_hasAudio)) { if (_hasVideo) { videoDecoder.getVideo(image); } else { image.resize(0,0); } if (needRateControl) { double now = (Time::now()-startTime)*pace; double delay = time_target-now; if (delay>0) { DBG printf("DELAY %g ", delay); Time::delay(delay); } else { DBG printf("NODELAY %g ", delay); } } DBG printf("IMAGE size %dx%d ", image.width(), image.height()); DBG printf("SOUND size %d\n", sound.getSamples()); if (!_hasAudio) { sound.resize(0,0); } return true; } } tryAgain = !triedAgain; if (tryAgain) { if (!shouldLoop) { return false; } av_seek_frame(pFormatCtx,-1,0,AVSEEK_FLAG_BACKWARD); startTime = Time::now(); } } while (tryAgain); return false; }
void TestFrameGrabber::createTestImage(yarp::sig::ImageOf<yarp::sig::PixelRgb>& image) { // to test IPreciselyTimed, make timestamps be mysteriously NNN.NNN42 double t = Time::now(); t -= ((t*1000)-(int)t)/1000; t+= 0.00042; stamp.update(t); if (background.width()>0) { image.copy(background); } else { image.zero(); image.resize(w,h); } switch (mode) { case VOCAB_BALL: { addCircle(image,PixelRgb(0,255,0),bx,by,15); addCircle(image,PixelRgb(0,255,255),bx,by,8); if (ct%5!=0) { rnd *= 65537; rnd += 17; bx += (rnd%5)-2; rnd *= 65537; rnd += 17; by += (rnd%5)-2; } else { int dx = w/2 - bx; int dy = h/2 - by; if (dx>0) { bx++; } if (dx<0) { bx--; } if (dy>0) { by++; } if (dy<0) { by--; } } } break; case VOCAB_GRID: { int ww = image.width(); int hh = image.height(); if (ww>1&&hh>1) { for (int x=0; x<ww; x++) { for (int y=0; y<hh; y++) { double xx = ((double)x)/(ww-1); double yy = ((double)y)/(hh-1); int r = int(0.5+255*xx); int g = int(0.5+255*yy); bool act = (y==ct); image.pixel(x,y) = PixelRgb(r,g,act*255); } } } } break; case VOCAB_LINE: default: { for (int i=0; i<image.width(); i++) { image.pixel(i,ct).r = 255; } } break; case VOCAB_RAND: { // from Alessandro Scalzo static unsigned char r=128,g=128,b=128; int ww = image.width(); int hh = image.height(); if (ww>1&&hh>1) { for (int x=0; x<ww; x++) { for (int y=0; y<hh; y++) { //r+=(rand()%3)-1; //g+=(rand()%3)-1; //b+=(rand()%3)-1; r += Random::uniform(-1,1); g += Random::uniform(-1,1); b += Random::uniform(-1,1); image.pixel(x,y) = PixelRgb(r,g,b); } } } } break; case VOCAB_NONE: break; } ct++; if (ct>=image.height()) { ct = 0; } if (by>=image.height()) { by = image.height()-1; } if (bx>=image.width()) { bx = image.width()-1; } if (bx<0) bx = 0; if (by<0) by = 0; }