Ejemplo n.º 1
0
void LiveSLAMWrapper::Loop()
{
	while (true) {
		boost::unique_lock<boost::recursive_mutex> waitLock(imageStream->getBuffer()->getMutex());
		while (!fullResetRequested && !(imageStream->getBuffer()->size() > 0)) {
			notifyCondition.wait(waitLock);
		}
		waitLock.unlock();
		
		
		if(fullResetRequested)
		{
			resetAll();
			fullResetRequested = false;
			if (!(imageStream->getBuffer()->size() > 0))
				continue;
		}
		
		TimestampedMat image = imageStream->getBuffer()->first();
		imageStream->getBuffer()->popFront();
		
		// process image
		//Util::displayImage("MyVideo", image.data);
		newImageCallback(image.data, image.timestamp);
	}
}
Ejemplo n.º 2
0
// called every time a new frame is available.
// needs to be able to 
// - (finally) roll forward filter
// - query it's state 
// - add a PTAM observation to filter.
void LSDWrapper::HandleFrame()
{
	//printf("tracking Frame at ms=%d (from %d)\n",getMS(ros::Time::now()),mimFrameTime-filter->delayVideo);


	// prep data
	msg = "";
	ros::Time startedFunc = ros::Time::now();

	// reset?
	if(resetLSDRequested)
		ResetInternal();



	// make filter thread-safe.
	// --------------------------- ROLL FORWARD TIL FRAME. This is ONLY done here. ---------------------------
	pthread_mutex_lock( &filter->filter_CS );
	//filter->predictUpTo(mimFrameTime,true, true);
	TooN::Vector<10> filterPosePreLSD = filter->getPoseAtAsVec(mimFrameTime_workingCopy-filter->delayVideo,true);
	pthread_mutex_unlock( &filter->filter_CS );

	// ------------------------ do PTAM -------------------------
	myGLWindow->SetupViewport();
	myGLWindow->SetupVideoOrtho();
	myGLWindow->SetupVideoRasterPosAndZoom();



	// 1. transform with filter
	TooN::Vector<6> LSDPoseGuess = filter->backTransformLSDObservation(filterPosePreLSD.slice<0,6>());
	// 2. convert to se3
	predConvert->setPosRPY(LSDPoseGuess[0], LSDPoseGuess[1], LSDPoseGuess[2], LSDPoseGuess[3], LSDPoseGuess[4], LSDPoseGuess[5]);
	// 3. multiply with rotation matrix	
	TooN::SE3<> LSDPoseGuessSE3 = predConvert->droneToFrontNT * predConvert->globaltoDrone;


	boost::unique_lock<boost::recursive_mutex> waitLock(imageStream->getBuffer()->getMutex());
		while (!fullResetRequested && !(imageStream->getBuffer()->size() > 0)) {
			notifyCondition.wait(waitLock);
		}
		waitLock.unlock();
		
	
	// Track image and get current pose estimate
	/***Note: image is of type imagedata from lsd-slam change it***/
	TooN::SE3<> LSDResultSE3;
	newImageCallback(mimFrameBW_workingCopy.data, mimFrameBW_workingCopy.timestamp);

	LSDResultSE3 = monoOdometry->getCurrentPoseEstimate();
	
	ros::Duration timeLSD= ros::Time::now() - startedLSD;

	TooN::Vector<6> LSDResultSE3TwistOrg = LSDResultSE3.ln();

	node->publishTf(LSDResultSE3, mimFrameTimeRos_workingCopy, mimFrameSEQ_workingCopy,"cam_front");


	// 1. multiply from left by frontToDroneNT.
	// 2. convert to xyz,rpy
	predConvert->setPosSE3_globalToDrone(predConvert->frontToDroneNT * LSDResultSE3);
	TooN::Vector<6> LSDResult = TooN::makeVector(predConvert->x, predConvert->y, predConvert->z, predConvert->roll, predConvert->pitch, predConvert->yaw);

	// 3. transform with filter
	TooN::Vector<6> LSDResultTransformed = filter->transformLSDObservation(LSDResult);

	//Init failed code removed



	// --------------------------- assess result ------------------------------
	bool isGood = true;
	bool diverged = false;
	
	// calculate absolute differences.
	TooN::Vector<6> diffs = LSDResultTransformed - filterPosePreLSD.slice<0,6>();
	for(int i=0;1<1;i++) diffs[i] = abs(diffs[i]);

	if(filter->getNumGoodLSDObservations() < 10 && monoOdometry->IsGood())
	{
		isGood = true;
	}

	//If the last tracking step result is lost
	else if(lsdTracker->lastStepResult == LOST)
		isGood = false;
		diverged = true;

	else
	{