示例#1
0
int main (){
  OMXCAM_ERROR error;
  long ms;
  int diff;
  
  int frames = 30;
  int width = 640;
  int height = 480;
  
  printf ("frames: %d\n", frames);
  printf ("width: %d\n", width);
  printf ("height: %d\n", height);
  
  ms = now ();
  if ((error = rgb (frames, width, height))) return logError (error);
  diff = now () - ms;
  printf ("rgb: %.2f fps (%d ms)\n", frames/(diff/1000.0), diff);
  
  ms = now ();
  if ((error = yuv (frames, width, height))) return logError (error);
  diff = now () - ms;
  printf ("yuv: %.2f fps (%d ms)\n", frames/(diff/1000.0), diff);
  
  return 0;
}
HRESULT TimgFilterLogoaway::process(TfilterQueue::iterator it,TffPict &pict,const TfilterSettingsVideo *cfg0)
{
    if (is(pict,cfg0)) {
        const TlogoawaySettings *cfg=(const TlogoawaySettings*)cfg0;
        init(pict,cfg->full,cfg->half);
        bool cspChanged;
        if (cfg->lumaonly) {
            cspChanged=getCurNext(FF_CSPS_MASK_YUV_PLANAR,pict,cfg->full,COPYMODE_DEF,&plane[0].dst,NULL,NULL,NULL);
        } else {
            cspChanged=getCurNext(FF_CSPS_MASK_YUV_PLANAR,pict,cfg->full,COPYMODE_DEF,&plane[0].dst,&plane[1].dst,&plane[2].dst,NULL);
        }

        if (cspChanged || logotemp.rectFull.dx!=(unsigned int)cfg->dx || logotemp.rectFull.dy!=(unsigned int)cfg->dy || !plane[0] || oldLumaOnly!=cfg->lumaonly || oldBlur!=cfg->blur || oldMode!=cfg->mode || stricmp(oldparambitmap,cfg->parambitmap)!=0) {
            oldLumaOnly=cfg->lumaonly;
            oldBlur=cfg->blur;
            oldMode=cfg->mode;
            ff_strncpy(oldparambitmap, cfg->parambitmap, countof(oldparambitmap));
            done();
            if (cfg->mode==TlogoawaySettings::MODE_SHAPEXY || cfg->mode==TlogoawaySettings::MODE_SHAPEUGLARM) {
                parambitmap=new TffPict(csp2,cfg->parambitmap,parambitmapbuf,deci);
                if (parambitmap->rectFull.dx!=(unsigned int)cfg->dx || parambitmap->rectFull.dy!=(unsigned int)cfg->dy) {
                    delete parambitmap;
                    parambitmap=NULL;
                }
            }
            for (int i=0; i<(cfg->lumaonly?1:3); i++) {
                plane[i].shiftX=pict.cspInfo.shiftX[i];
                plane[i].shiftY=pict.cspInfo.shiftY[i];
                plane[i].w=cfg->dx>>plane[i].shiftX;
                plane[i].h=cfg->dy>>plane[i].shiftY;
                if (parambitmap) {
                    plane[i].parambitmapdata=parambitmap->data[i];
                    plane[i].parambitmapstride=parambitmap->stride[i];
                } else {
                    plane[i].parambitmapdata=NULL;
                    plane[i].parambitmapstride=0;
                }
                plane[i].init(cfg);
            }
        }

        Trect tempR(cfg->x,cfg->y,cfg->dx,cfg->dy);
        logotemp.copyFrom(pict,logotempbuf,&tempR);

        YUVcolor yuv(cfg->solidcolor);
        for (int i=0; i<(cfg->lumaonly?1:3); i++) {
            plane[i].stride2=stride2[i];
            plane[i].logotempdata=logotemp.data[i];
            plane[i].logotempstride=logotemp.stride[i];
            plane[i].solidcolor=(i==0?yuv.Y:(i==1?yuv.V+128:yuv.U+128));
            plane[i].process(cfg);
        }
    }
示例#3
0
int main(int argc, char** argv){
	uint frames = 0;
	float t_Y, entropy_Y = 0.0f;
	float t_U, entropy_U = 0.0f;
	float t_V, entropy_V = 0.0f;
	
	FILE* fp = fopen("output", "wb");

	if( argc < 2 ) {
		fprintf( stderr, "Usage: YuvEntropy filename\n" );
		return 1;
	}
	
	YUV yuv(argv[argc - 1]);
	
	
	while (yuv.readFrame()!=-1){
		
		//~ reduce.apply(orig, reduced);
		//~ expand.apply(reduced, expanded);
		
		//~ reduced.displayFrame();
		//~ expanded.displayFrame();
		
		t_Y = frameEntropy(yuv, 0);
		t_U = frameEntropy(yuv, 1);
		t_V = frameEntropy(yuv, 2);
		
		fprintf(fp,"%f %f %f\n", t_Y, t_V, t_V);
		
		entropy_Y += t_Y;
		entropy_U += t_U;
		entropy_V += t_V;
		frames++;
	}
	
	fclose(fp);
	printf("Y: %f, U:%f, V:%f\n", entropy_Y/(float)frames, entropy_U/(float)frames, entropy_V/(float)frames);
	
	return 0;
}
示例#4
0
void GLLUTWidget::drawSlice(Slice * s, LUT3D * lut, int idx, bool draw_bg, bool draw_selection, bool draw_sampler) {
  if (lut->getColorSpace()==CSPACE_YUV) {
    unsigned char  xn=lut->getSizeY();
    unsigned char  yn=lut->getSizeZ();
    //printf("size: %d   max: %d    bits: %d     shift: %d \n"
    //,lut->getSizeZ(),lut->getMaxZ(),lut->Z_BITS,lut->Z_SHIFT);
    for (unsigned char x=0;x<xn;x++) {
      for (unsigned char  y=0;y<yn;y++) {
        //printf("yn: %d\n",(yn/2)-1);
        //printf("lut: %d\n",lut->lut2normZ((yn/2)-1));
        if (draw_bg) s->bg->surface.setPixel( x,y,Conversions::rgb2rgba(Conversions::yuv2rgb(yuv(lut->lut2normX(idx),lut->lut2normY(x),lut->lut2normZ(y))),128));
        if (draw_selection) {
          if ((_mode==LUTChannelMode_Bitwise) && (actionViewToggleOtherChannels->isChecked()==false)) {
            s->selection->surface.setPixel( x,y,maskToRGBA(lut,(0x01 << state.channel) & lut->get_preshrunk(idx,x,y)));
          } else {
            s->selection->surface.setPixel( x,y,maskToRGBA(lut,lut->get_preshrunk(idx,x,y)));
          }
        }
        //s->bg->surface.setPixel( x,y,rgba(255,0,255,128));
      }
    }
    if (draw_bg) s->bg_update_pending=true;
    if (draw_selection) s->selection_update_pending=true;
    if (draw_sampler) s->sampler_update_pending=true;

  } else {
    printf("currently only YUV colorspace-luts are supported\n");
  }
}
void XtionGrabber::read_thread()
{
	fd_set fds;

	while(!m_shouldExit)
	{
		FD_ZERO(&fds);
		FD_SET(m_depth_fd, &fds);
		FD_SET(m_color_fd, &fds);

		int ret = select(std::max(m_depth_fd, m_color_fd)+1, &fds, 0, 0, 0);

		if(ret < 0)
		{
			perror("Could not select()");
			return;
		}

		if(FD_ISSET(m_color_fd, &fds))
		{
			v4l2_buffer buf;
			memset(&buf, 0, sizeof(buf));
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory = V4L2_MEMORY_USERPTR;

			if(ioctl(m_color_fd, VIDIOC_DQBUF, &buf) != 0)
			{
				if(errno == EAGAIN)
					continue;
				perror("Could not dequeue buffer");
				return;
			}

			ColorBuffer* buffer = &m_color_buffers[buf.index];

			sensor_msgs::ImagePtr img = m_color_pool->create();
			img->width = m_colorWidth;
			img->height = m_colorHeight;
			img->step = img->width * 4;
			img->data.resize(img->step * img->height);
			img->header.stamp = timeFromTimeval(buf.timestamp);
			img->header.frame_id = "/camera_optical";

			img->encoding = sensor_msgs::image_encodings::BGRA8;

#if HAVE_LIBYUV
			libyuv::ConvertToARGB(
				buffer->data.data(), buffer->data.size(),
				img->data.data(),
				m_colorWidth*4, 0, 0, m_colorWidth, m_colorHeight, m_colorWidth, m_colorHeight,
				libyuv::kRotate0, libyuv::FOURCC_UYVY
			);
#else
			uint32_t* dptr = (uint32_t*)img->data.data();

			for(size_t y = 0; y < img->height; ++y)
			{
					for(size_t x = 0; x < img->width-1; x += 2)
					{
							unsigned char* base = &buffer->data[y*m_colorWidth*2+x*2];
							float y1 = base[1];
							float u  = base[0];
							float y2 = base[3];
							float v  = base[2];

							uint32_t rgb1 = yuv(y1, u, v);
							uint32_t rgb2 = yuv(y2, u, v);

							dptr[y*img->width + x + 0] = rgb1;
							dptr[y*img->width + x + 1] = rgb2;
					}
			}
#endif

			m_lastColorImage = img;
			m_lastColorSeq = buf.sequence;

			m_color_info.header.stamp = img->header.stamp;
			m_pub_color.publish(img, boost::make_shared<sensor_msgs::CameraInfo>(m_color_info));

			if(ioctl(m_color_fd, VIDIOC_QBUF, &buffer->buf) != 0)
			{
				perror("Could not queue buffer");
				return;
			}
		}

		if(FD_ISSET(m_depth_fd, &fds))
		{
			v4l2_buffer buf;
			memset(&buf, 0, sizeof(buf));
			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory = V4L2_MEMORY_USERPTR;

			if(ioctl(m_depth_fd, VIDIOC_DQBUF, &buf) != 0)
			{
				if(errno == EAGAIN)
					continue;
				perror("Could not dequeue buffer");
				return;
			}

			DepthBuffer* buffer = &m_depth_buffers[buf.index];

			buffer->image->header.stamp = timeFromTimeval(buf.timestamp);

			m_lastDepthImage = buffer->image;
			m_lastDepthSeq = buf.sequence;

			m_depth_info.header.stamp = buffer->image->header.stamp;
			m_pub_depth.publish(buffer->image, boost::make_shared<sensor_msgs::CameraInfo>(m_depth_info));

			buffer->image.reset();

			buffer->image = createDepthImage();
			buffer->buf.m.userptr = (long unsigned int)buffer->image->data.data();

			if(ioctl(m_depth_fd, VIDIOC_QBUF, &buffer->buf) != 0)
			{
				perror("Could not queue buffer");
				return;
			}
		}

		if(m_lastColorSeq == m_lastDepthSeq)
		{
			if(m_pub_cloud.getNumSubscribers() != 0)
				publishPointCloud(m_lastDepthImage, &m_cloudGenerator, &m_pub_cloud);

			if(m_pub_filledCloud.getNumSubscribers() != 0)
			{
				sensor_msgs::ImagePtr filledDepth = m_depthFiller.fillDepth(
					m_lastDepthImage, m_lastColorImage
				);
				publishPointCloud(filledDepth, &m_filledCloudGenerator, &m_pub_filledCloud);
			}
		}
	}

	fprintf(stderr, "read thread exit now\n");
}