Ejemplo n.º 1
0
    void onRead(T &obj) override
    {
        if (++cnt==dwnsample)
        {
            if (firstIncomingData)
            {
                yInfo() << "Incoming data detected";
                firstIncomingData=false;
            }

            DumpItem item;
            Stamp info;

            BufferedPort<T>::getEnvelope(info);
            item.seqNumber=info.getCount();

            if (txTime || (info.isValid() && !rxTime))
                item.timeStamp.setTxStamp(info.getTime());

            if (rxTime || !info.isValid())
                item.timeStamp.setRxStamp(Time::now());

            item.obj=factory(obj);

            buf.lock();
            buf.push_back(item);
            buf.unlock();

            cnt=0;
        }
    }
Ejemplo n.º 2
0
int main(int argc, char *argv[]) {

	//Declair yarp
	Network yarp;
	if (!yarp.checkNetwork())
	{
		printf("YARP server not available!\n");
		return -1;
	}

	//Creating a Resiving yarp port
	BufferedPort<Sound> bufferPort;
	bufferPort.open("/receiver");
	if (!Network::exists("/receiver")) {
		printf("receiver not exists \n");
		return -1;
	}

	//Connecting the resiving audio port with the sender on the pc104
	Network::connect("/sender", "/receiver");
	if (!Network::isConnected("/sender", "/receiver")) {
		printf("no connection \n");
		return -1;
	}

	double empty[samplingBuffer *4] = {0}; //plus 2 for time and sample stamps

	//Setup for memory mapping
	FILE *fid;
	fid = fopen("/tmp/AudioMemMap.tmp", "w");
	fwrite(empty, sizeof(double), samplingBuffer * 4, fid); 
	fclose(fid);
	int mappedFileID;
	mappedFileID = open("/tmp/AudioMemMap.tmp", O_RDWR);
	double *mappedAudioData;
	mappedAudioData = (double *)mmap(0, memoryMapSize_bytes, PROT_WRITE, MAP_SHARED , mappedFileID, 0);
	

	//Main loop that maps the sound to the memory mapped region delaired above
	Sound* s;
	Stamp ts;
	while (true) {
		//This is a blocking read
		s = bufferPort.read(true);
		bufferPort.getEnvelope(ts);
		printf("count:%d time:%f \n", ts.getCount(), ts.getTime());
		int e0 = ts.getCount();
		double e1 = ts.getTime();
		int row = 0;
		for (int col = 0 ; col < samplingBuffer; col++) {
			NetInt16 temp_c = (NetInt16) s->get(col, 0);
			NetInt16 temp_d = (NetInt16) s->get(col, 1);
			mappedAudioData[row]        	= (double) 	temp_c / normDivid ;
			mappedAudioData[row + 1] 		= (double) 	temp_d / normDivid;
			mappedAudioData[row + 2] 		= (double) 	(e0 * samplingBuffer) + col;
			mappedAudioData[row + 3]		= (double) 	(e1 + col * samplePeriod);
			row += 4;

		}
	}
	return 0;
}
Ejemplo n.º 3
0
bool GBSegmModule::updateModule()
{
    ImageOf<PixelRgb> *yrpImgIn;
    static int cycles = 0;

    yrpImgIn = _imgPort.read();
    if (yrpImgIn == NULL)   // this is the case if module is requested to quit while waiting for image
        return true;

    bool use_private_stamp;
    Stamp s;
    if(!_imgPort.getEnvelope(s))
    {
            cout << "No stamp found in input image. Will use private stamp" << endl;
            use_private_stamp = true;
    }
    else
    {
            cout << "Received image #" << s.getCount() << " generated at time " << s.getTime() << endl;
            use_private_stamp = false;
    }

    if(cycles == 0)
            _timestart = yarp::os::Time::now();
    cycles++;

    //IplImage *iplimg = (IplImage*)yrpImgIn->getIplImage();
 cout << "converting image of size " << yrpImgIn->width() << yrpImgIn->height() <<" to size" << input->width() << input->height() << endl;
    YarpImageToRGBImage(input, yrpImgIn);
    cout << "converted" << endl;
    segMutex.wait();
    if(seg)
        delete seg;
    seg=segment_image(input, sigma, k, min_size, &num_components);
    segMutex.post();
    
    cout << "processed" << endl;
    //prepare timestamps
    if(use_private_stamp)
    {
        _stamp.update();
        _viewPort.setEnvelope(_stamp);
    }
    else
    {
        _viewPort.setEnvelope(s);
    }



    ImageOf<PixelRgb> &yrpImgView = _viewPort.prepare();
    //Rescale image if required

    yrpImgView.resize(seg->width(), seg->height());
    RGBImageToYarpImage(seg, &yrpImgView);
    _viewPort.write();


    //report the frame rate
    if(cycles % 100 == 0)
    {
            double cps = ((double)cycles)/(yarp::os::Time::now() - _timestart);
            printf("fps: %02.2f\n", cps);
    }
    return true;
}
Ejemplo n.º 4
0
bool EdisonSegmModule::updateModule()
{
    ImageOf<PixelRgb> *yrpImgIn;
    static int cycles = 0;

    yrpImgIn = _imgPort.read();
    if (yrpImgIn == NULL)   // this is the case if module is requested to quit while waiting for image
        return true;

    bool use_private_stamp;
    Stamp s;
    if(!_imgPort.getEnvelope(s))
    {
            cout << "No stamp found in input image. Will use private stamp" << endl;
            use_private_stamp = true;
    }
    else
    {
            cout << "Received image #" << s.getCount() << " generated at time " << s.getTime() << endl;
            use_private_stamp = false;
    }

    if(cycles == 0)
            _timestart = yarp::os::Time::now();
    cycles++;

    IplImage *iplimg = (IplImage*)yrpImgIn->getIplImage();

    //computing the ROI to crop the image
	/*struct _IplROI roi;
	roi.coi = 0; // all channels are selected
	roi.height = height_;
	roi.width = width_;
	roi.xOffset = ( orig_width_ - width_ ) / 2;
	roi.yOffset = ( orig_height_ - height_ ) / 2;*/
	
	//copying roi data to buffer
	/*iplimg->roi = &roi;
	cvCopy( iplimg, inputImage.getIplImage());*/

    //Rescale image if required
    if( (width_ != orig_width_) || (height_ != orig_height_ ) )
            cvResize(iplimg, inputImage.getIplImage(), CV_INTER_NN);
    else
            cvCopy( iplimg, inputImage.getIplImage());

    double edgetime = yarp::os::Time::now();
    //compute gradient and confidence maps
    BgEdgeDetect edgeDetector(gradWindRad);
    BgImage bgImage;
    bgImage.SetImage(inputImage_, width_, height_, true);
    edgeDetector.ComputeEdgeInfo(&bgImage, confMap_, gradMap_);
    //compute the weigth map
    for(int i = 0; i < width_*height_; i++) {
      if(gradMap_[i] > 0.02) {
		weightMap_[i] = mixture*gradMap_[i] + (1 - mixture)*confMap_[i];
      } else {
		weightMap_[i] = 0;
      }
    }
	///////////////////////////// This block can be parallelized
	cout << "Edge computation Time (ms): " << (yarp::os::Time::now() - edgetime)*1000.0 << endl;

	msImageProcessor iProc;
	if( dim_ == 3 )
		iProc.DefineImage(inputImage_, COLOR, height_, width_);
	else
	{	
		cvCvtColor(inputImage.getIplImage(), inputHsv.getIplImage(), CV_RGB2HSV);
		cvSplit(inputHsv.getIplImage(), inputHue.getIplImage(), 0, 0, 0);
		iProc.DefineImage(inputHue_, GRAYSCALE, height_, width_);
	}
	if(iProc.ErrorStatus) {
		cout << "MeanShift Error" << endl;
		return false;
	}
	iProc.SetWeightMap(weightMap_, threshold);
	if(iProc.ErrorStatus) {
		cout << "MeanShift Error" << endl;
		return false;
	}


	double filtertime = yarp::os::Time::now();
	iProc.Filter(sigmaS, sigmaR, speedup);
        if(iProc.ErrorStatus) {
		cout << "MeanShift Error" << endl;
		return false;
	}
	cout << "Mean Shift Filter Computation Time (ms): " << (yarp::os::Time::now() - filtertime)*1000.0 << endl;


    //obtain the filtered image
    iProc.GetResults(filtImage_);
	if(iProc.ErrorStatus) {
		cout << "MeanShift Error" << endl;
		return false;
	}
    
    //fuse regions
	double fusetime = yarp::os::Time::now();
    iProc.FuseRegions(sigmaR, minRegion);
    if(iProc.ErrorStatus) {
		cout << "MeanShift Error" << endl;
		return false;
	}
	cout << "Region Fusion Computation Time (ms): " << (yarp::os::Time::now() - fusetime)*1000.0 << endl;

    //obtain the segmented image
    iProc.GetResults(segmImage_);
    if(iProc.ErrorStatus) {
		cout << "MeanShift Error" << endl;
		return false;
	}

	//define the boundaries - do not need this
	/*
	RegionList *regionList        = iProc.GetBoundaries();
	int        *regionIndeces     = regionList->GetRegionIndeces(0);
	int        numRegions         = regionList->GetNumRegions();
	numBoundaries_ = 0;
	for(int i = 0; i < numRegions; i++) {
		numBoundaries_ += regionList->GetRegionCount(i);
	}
	if(boundaries_) delete [] boundaries_;
	boundaries_ = new int [numBoundaries_];
	for(int i = 0; i < numBoundaries_; i++) {
		boundaries_[i] = regionIndeces[i];
	}*/
		
	int regionCount; // how many regions have been found
	int *labels = NULL; //pointer for the labels (should this be released in the end?) 
	float *modes; //pointer for the Luv values (should this be released in the end?) 
	int *modePointCounts; //the area of each region (should this be released in the end?) 

	regionCount = iProc.GetRegions(&labels, &modes, &modePointCounts);
	int *labelp = (int*)labelImage.getRawImage();
	for(int i = 0; i < width_*height_; i++)
		labelp[i] = labels[i];
	
	IplImage *labelint = (IplImage*)labelImage.getIplImage();
	IplImage *labelchar = (IplImage*)labelView.getIplImage();

	cvConvert(labelint, labelchar);

	//prepare timestamps
	if(use_private_stamp)
	{
		_stamp.update();
		_labelPort.setEnvelope(_stamp);
		_labelViewPort.setEnvelope(_stamp);
		_viewPort.setEnvelope(_stamp);
		_filtPort.setEnvelope(_stamp);
		_rawPort.setEnvelope(_stamp);
	}
	else
	{
		_labelPort.setEnvelope(s);
		_labelViewPort.setEnvelope(s);
		_viewPort.setEnvelope(s);
		_filtPort.setEnvelope(s);
		_rawPort.setEnvelope(s);
	}

	ImageOf<PixelInt> &yrpImgLabel = _labelPort.prepare();
	//Rescale image if required
	if( (width_ != orig_width_) || (height_ != orig_height_ ) )
	{
		yrpImgLabel.resize(orig_width_, orig_height_);
		cvResize(labelImage.getIplImage(), yrpImgLabel.getIplImage(), CV_INTER_NN);
	}
	else
		yrpImgLabel = labelImage;
	_labelPort.write();

	ImageOf<PixelMono> &yrpImgDebug = _labelViewPort.prepare();
	//Rescale image if required
	if( (width_ != orig_width_) || (height_ != orig_height_ ) )
	{
		yrpImgDebug.resize(orig_width_, orig_height_);
		cvResize(labelView.getIplImage(), yrpImgDebug.getIplImage(), CV_INTER_NN);
	}
	else
		yrpImgDebug = labelView;
	_labelViewPort.write();


	ImageOf<PixelRgb> &yrpFiltOut = _filtPort.prepare();
	//Rescale image if required
	if( (width_ != orig_width_) || (height_ != orig_height_ ) )
	{
		yrpFiltOut.resize(orig_width_, orig_height_);
		cvResize(filtImage.getIplImage(), yrpFiltOut.getIplImage(), CV_INTER_NN);
	}
	else
		yrpFiltOut = filtImage;
	_filtPort.write();

	ImageOf<PixelRgb> &yrpImgView = _viewPort.prepare();
	//Rescale image if required
	if( (width_ != orig_width_) || (height_ != orig_height_ ) )
	{
		yrpImgView.resize(orig_width_, orig_height_);
		cvResize(segmImage.getIplImage(), yrpImgView.getIplImage(), CV_INTER_NN);
	}
	else
		yrpImgView = segmImage;
	_viewPort.write();

	ImageOf<PixelRgb> &yrpImgOut = _rawPort.prepare();
	yrpImgOut = *yrpImgIn;
	_rawPort.write();


	//report the frame rate
	if(cycles % 100 == 0)
	{
		double cps = ((double)cycles)/(yarp::os::Time::now() - _timestart);
		printf("fps: %02.2f\n", cps);
	}
    return true;
}