예제 #1
0
int main(){
	Communicator* comm = new Communicator(512, "192.168.2.1", 9000, "*", 9001);

	//initialize camera
    VideoCapture cap(0);
    if(!cap.isOpened()){
        cout << "No camera found." << endl;
        return -1;
    }

    cap.set(CV_CAP_PROP_FRAME_WIDTH,320);
    cap.set(CV_CAP_PROP_FRAME_HEIGHT,240);
    cap.set(CV_CAP_PROP_FPS, 30);

    //initialize frame
    Mat frame; Mat grayFrame;
    cap >> frame;
    
    //give information to PC about frame size
    char frameWidthBuf[3];
    char frameHeightBuf[3];
    sprintf(frameWidthBuf, "%d", frame.rows);
    sprintf(frameHeightBuf, "%d", frame.cols);
    comm->send(frameWidthBuf, 3, 0);
    comm->send(frameHeightBuf, 3, 0);
    int frameSize = frame.rows*frame.cols;

    //for encoding the frame
    vector<uchar> enc;

    while(1){
    	// capture gray image
        cap >> frame;
    	cvtColor(frame, grayFrame, CV_BGR2GRAY);

        // send encoded image
    	comm->send(grayFrame.data, frameSize, 0);
    }
}
예제 #2
0
int main(int argc, char *argv[]){
	Communicator* comm = new Communicator(512, "10.42.0.1", 8000, "*", 8002);

    thread t(recvOdometry, comm);

	//initialize laser
    urg_t urg;
    long int *scan;
    long max_distance = 5000;
    long min_distance = 150;
    long time_stamp;
    int i;
    int n;

    //check if laser is available
    if (open_urg_sensor(&urg, argc, argv) < 0) {
        return -1;
    }

    //allocate necessary memory for scanning
    int numberOfPoints = 685;
    int sizeOfScan = sizeof(int);
    int bytesPerScan = numberOfPoints*sizeOfScan;
    scan = (long int*)malloc(urg_max_data_size(&urg) * sizeof(scan[0]));
    if (!scan) {
        perror("urg_max_index()");
        return -1;
    }

    //allocate memory for unsigned int
    int* intScan = (int*) malloc(bytesPerScan);


    //give information to PC about bytes per scan
    char buf[4];
    sprintf(buf, "%d", bytesPerScan);
    comm->send(buf, 4, 0);

    cout << numberOfPoints << endl;
    cout << sizeOfScan << endl;

    while(1){
    	//get new scan
        urg_start_measurement(&urg, URG_DISTANCE, 1, 0);
        n = urg_get_distance(&urg, scan, &time_stamp);
        if (n < 0) {
            printf("urg_get_distance: %s\n", urg_error(&urg));
            urg_close(&urg);
            return -1;
        }

        //first four values are odometry
        //put latest odometry-data in scan.
        mtx.lock();
        intScan[0] = dx;
        intScan[1] = dy;
        intScan[2] = tyaw;
        intScan[3] = tt;
        tt = 0;
        tyaw = 0;
        mtx.unlock();

        //better to parse to unsigned ints --> less data will be sent
        for(int i=4; i<numberOfPoints; i++){
            intScan[i] = (int)scan[i];
        }

        //send new scan
        comm->send(intScan, bytesPerScan, 0);
    }

    t.join();

    delete scan;
    urg_close(&urg);
}
예제 #3
0
int main(){
	thread t(waitForKeyPress);

	list<CvPoint> opticalFlowVectors;

	//set up communicator	
	Communicator* comm = new Communicator(512, "192.168.2.3", 9002, "*", 9000);

	//receive size of one image
	char frameWidthBuf[3];
	char frameHeightBuf[3];
	comm->recv(frameWidthBuf, 3, 0);
	comm->recv(frameHeightBuf, 3, 0);
	//extract data
	int frameWidth = atoi(frameWidthBuf);
	int frameHeight = atoi(frameHeightBuf);
	int frameSize = frameWidth*frameHeight;

	cout << frameSize << endl;

	//declare frames
	Mat frame1(frameWidth,frameHeight,CV_8UC1);
	Mat frame2(frameWidth,frameHeight,CV_8UC1);

	//second get the image
	comm->recv(frame1.data, frameSize, 0);

	//build pyramid for frame 1
	buildOpticalFlowPyramid(frame1, pyramid1, cvSize(pyrWindowSize,pyrWindowSize), 3);


	//start optical flow algorithm
	cout << "Started optical flow algorithm." << endl;
	high_resolution_clock::time_point t1 = high_resolution_clock::now();
	int iter = 0;
    mtx.lock();
    while(loop)
    {
    	mtx.unlock();
    	
    	//recv frame 2
		comm->recv(frame2.data, frameSize, 0);

		FeatureDetector* detector = new FastFeatureDetector(FASTThreshold,true);
		detector->detect(frame1, KeyPointVector);
		delete detector;

		if(KeyPointVector.size() > 30)
			FASTThreshold++;
		else 
			FASTThreshold--;

		//build pyramid for frame 2
		buildOpticalFlowPyramid(frame2, pyramid2, cvSize(pyrWindowSize,pyrWindowSize), 3);
		KeyPoint::convert(KeyPointVector, pointVector1);

		//run Lucas Kanade optical flow if features have been found
		if(KeyPointVector.size() > 0)
		{
			calcOpticalFlowPyrLK(pyramid1, pyramid2, pointVector1,
			 pointVector2, featuresFound, featuresError,
			 cvSize(pyrWindowSize,pyrWindowSize), 0,
			 cvTermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 10, 0.2),
			 0,0.0001);
		}


		Mat frame3;
		cvtColor(frame2, frame3, CV_GRAY2RGB);

		for(int i=0; i < pointVector1.size(); i++){
			if(featuresFound[i]==0 || featuresError[i]>50)
			{
				//printf("Error is: %f\n",featuresError[i]);
				
			} else {
				CvPoint p0 = cvPoint(
						cvRound(pointVector1[i].x),
						cvRound(pointVector1[i].y));

				CvPoint p1 = cvPoint(
						cvRound(pointVector2[i].x),
						cvRound(pointVector2[i].y));

				line(frame3, p0, p1, CV_RGB(255,0,0), 1, 8, 0);
			}
		}

		ostringstream fileName2;
		fileName2 << "flightphoto/flow" << iter <<".jpg";
		imwrite(fileName2.str(), frame3);

		//store pyramid 2 in pyramid 1
		frame1 = frame2.clone();
		pyramid1.swap(pyramid2);

		//find the average displacement
		displacement = trajectoryCalc(pointVector1, pointVector2, featuresFound,
		 featuresError, KeyPointVector.size());
		//Compensate angle: must be done on RPI

		char xBuf[4]; char yBuf[4];
		int xBufLen = sprintf(xBuf, "%d", displacement.x);
		int yBufLen = sprintf(yBuf, "%d", displacement.y);
		comm->send(xBuf,xBufLen,ZMQ_NOBLOCK);
		comm->send(yBuf,yBufLen,ZMQ_NOBLOCK);

		opticalFlowVectors.push_back(displacement);
		mtx.lock();
		iter ++;
	}
	t.join();

	high_resolution_clock::time_point t2 = high_resolution_clock::now();
	duration<double> time_span = duration_cast<duration<double>>(t2 - t1);
	double fps = ((double)opticalFlowVectors.size())/time_span.count();

	ofstream myFile;
	myFile.open ("opticalFlow.txt");	

	myFile << "FPS: \t" << fps << endl;

	for (list<CvPoint>::iterator it=opticalFlowVectors.begin(); it!=opticalFlowVectors.end(); ++it){
		  myFile << "x:\t" << it->x << "\ty:\t" << it->y << endl;
	}

  	myFile.close();		

}
예제 #4
0
int main(){
	thread t(waitForKeyPress);

	list<CvPoint> opticalFlowVectors;

	//set up communicator	
	Communicator* comm = new Communicator(512, "192.168.2.3", 9002, "*", 9000);

	//receive size of one image
	char frameWidthBuf[3];
	char frameHeightBuf[3];
	comm->recv(frameWidthBuf, 3, 0);
	comm->recv(frameHeightBuf, 3, 0);
	//extract data
	int frameWidth = atoi(frameWidthBuf);
	int frameHeight = atoi(frameHeightBuf);
	int frameSize = frameWidth*frameHeight;

	//declare frames
	Mat frame1(frameWidth,frameHeight,CV_8UC1);
	Mat frame2(frameWidth,frameHeight,CV_8UC1);

	//second recv the first frame

	//recv the size of the encoded frame
	char encSizeBuf[6];
	comm->recv(encSizeBuf, 6, 0);
	int encSize = atoi(encSizeBuf);

	vector<uchar> enc = vector<uchar>(encSize);

	//recv the encoded frame
	comm->recv(&enc[0], encSize, 0);

	//decode the frame
	qlz_state_decompress *state_decompress = (qlz_state_decompress *)malloc(sizeof(qlz_state_decompress));
	qlz_decompress((const char*) &enc[0], (char*) frame1.data, state_decompress);


	//build pyramid for frame 1
	buildOpticalFlowPyramid(frame1, pyramid1, cvSize(pyrWindowSize,pyrWindowSize), 3);


	//start optical flow algorithm
	cout << "Started optical flow algorithm." << endl;
	high_resolution_clock::time_point t1 = high_resolution_clock::now();
    mtx.lock();
    while(loop)
    {
    	mtx.unlock();

    	//recv frame 2		
		//recv the size of the encoded frame
		comm->recv(encSizeBuf, 6, 0);
		encSize = atoi(encSizeBuf);

		enc = vector<uchar>(encSize);

		//recv the encoded frame
		comm->recv(&enc[0], encSize, 0);

		//uncompress recv data
		qlz_decompress((const char*) &enc[0], (char*) frame2.data, state_decompress);
		

		FeatureDetector* detector = new FastFeatureDetector(FASTThreshold,true);
		detector->detect(frame1, KeyPointVector);
		delete detector;

		if(KeyPointVector.size() > 30)
			FASTThreshold++;
		else 
			FASTThreshold--;

		//build pyramid for frame 2
		buildOpticalFlowPyramid(frame2, pyramid2, cvSize(pyrWindowSize,pyrWindowSize), 3);
		KeyPoint::convert(KeyPointVector, pointVector1);

		//run Lucas Kanade optical flow if features have been found
		if(KeyPointVector.size() > 0)
		{
			calcOpticalFlowPyrLK(pyramid1, pyramid2, pointVector1,
			 pointVector2, featuresFound, featuresError,
			 cvSize(pyrWindowSize,pyrWindowSize), 0,
			 cvTermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 10, 0.2),
			 0,0.0001);
		}

		//store pyramid 2 in pyramid 1
		frame1 = frame2.clone();
		pyramid1.swap(pyramid2);

		//find the average displacement
		displacement = trajectoryCalc(pointVector1, pointVector2, featuresFound,
		 featuresError, KeyPointVector.size());
		//Compensate angle: must be done on RPI

		char xBuf[4]; char yBuf[4];
		int xBufLen = sprintf(xBuf, "%d", displacement.x);
		int yBufLen = sprintf(yBuf, "%d", displacement.y);
		comm->send(xBuf,xBufLen,ZMQ_NOBLOCK);
		comm->send(yBuf,yBufLen,ZMQ_NOBLOCK);

		opticalFlowVectors.push_back(displacement);
		mtx.lock();
	}
	t.join();

	high_resolution_clock::time_point t2 = high_resolution_clock::now();
	duration<double> time_span = duration_cast<duration<double>>(t2 - t1);
	double fps = ((double)opticalFlowVectors.size())/time_span.count();

	ofstream myFile;
	myFile.open ("opticalFlow.txt");	

	myFile << "FPS: \t" << fps << endl;

	for (list<CvPoint>::iterator it=opticalFlowVectors.begin(); it!=opticalFlowVectors.end(); ++it){
		  myFile << "x:\t" << it->x << "\ty:\t" << it->y << endl;
	}

  	myFile.close();		

  	free(state_decompress);
}