void LSDWrapper::run()
{
	std::cout << "Waiting for Video" << std::endl;

	// wait for firsst image
	while(!newImageAvailable)
		usleep(100000);	// sleep 100ms
	newImageAvailable = false;
	while(!newImageAvailable)
		usleep(100000);	// sleep 100ms

	// read image height and width**temporarily*** 
	frameWidth = 640;
	frameHeight = 480;

	ResetInternal();


	snprintf(charBuf,200,"Video resolution: %d x %d",frameWidth,frameHeight);
	ROS_INFO(charBuf);
	node->publishCommand(std::string("u l ")+charBuf);

	// create window
	Util::displayImage("LSD SLAM Drone Camera Feed", image.data);

	// Framewidth size removed
	changeSizeNextRender = true;

	boost::unique_lock<boost::mutex> lock(new_frame_signal_mutex);

	while(keepRunning)
	{
		if(newImageAvailable)
		{
			newImageAvailable = false;

			mimFrameBW_workingCopy.copy_from(mimFrameBW);

			// release lock and do the work-intensive stuff.....
			lock.unlock();

			HandleFrame();


			if(changeSizeNextRender)
			{
				myGLWindow->set_size(desiredWindowSize);
				changeSizeNextRender = false;
			}

			// get lock again
			lock.lock();
		}
		else
			new_frame_signal.wait(lock);
	}

	lock.unlock();
	delete myGLWindow;
}
Exemple #2
0
bool TEffect::ApplyInternal(bass_p pChannel)
{
	if (IsRemoved()) return false;
	bUpdated = true;

	iErrorCode = BASS_OK;

	if (eType == BASS_FX_UNKNOWN)
	{
		iErrorCode = BASS_ERROR_ILLPARAM;
		return false;
	}

	if (pChannel == BASS_NULL)
	{
		iErrorCode = BASS_ERROR_HANDLE;
		return false;
	}

	bass_p pOldChannel = this->pChannel;

	if (pOldChannel != pChannel)
	{
		this->pChannel = pChannel;

		if (pOldChannel != BASS_NULL && pFX != BASS_NULL)
		{
			BASS_ChannelRemoveFX(pOldChannel, pFX);
			pFX = BASS_NULL;
		}
	}
	else
	{
		if (pFX != BASS_NULL && bEnabled)
		{
			return true;
		}
	}

	if (!bEnabled)
	{
		RemoveFX();
		return true;
	}

	pFX = BASS_ChannelSetFX(pChannel, eType, 0);
	iErrorCode = BASS_ErrorGetCode();

	if (iErrorCode != BASS_OK)
	{
		RemoveFX();
		return false;
	}

	UpdateInternal();
	ResetInternal();
	return true;
}
Exemple #3
0
void TEffect::RemoveFX()
{
	if (pChannel != BASS_NULL && pFX != BASS_NULL)
	{
		BASS_ChannelRemoveFX(pChannel, pFX);
	}

	ResetInternal();

	pFX = BASS_NULL;
	bUpdated = true;
}
void PTAMWrapper::run()
{
	std::cout << "Waiting for Video" << std::endl;

	// wait for firsst image
	while(!newImageAvailable)
		usleep(100000);	// sleep 100ms
	newImageAvailable = false;
	while(!newImageAvailable)
		usleep(100000);	// sleep 100ms

	// read image height and width
	frameWidth = mimFrameBW.size().x;
	frameHeight = mimFrameBW.size().y;

	ResetInternal();


	snprintf(charBuf,200,"Video resolution: %d x %d",frameWidth,frameHeight);
	ROS_INFO(charBuf);
	node->publishCommand(std::string("u l ")+charBuf);

	// create window
    myGLWindow = new GLWindow2(CVD::ImageRef(frameWidth,frameHeight), "PTAM Drone Camera Feed", this);
	myGLWindow->set_title("PTAM Drone Camera Feed");

	changeSizeNextRender = true;
	if(frameWidth < 640)
		desiredWindowSize = CVD::ImageRef(frameWidth*2,frameHeight*2);
	else
		desiredWindowSize = CVD::ImageRef(frameWidth,frameHeight);

	while(keepRunning)
	{
		if(newImageAvailable)
		{
			HandleFrame();

			if(changeSizeNextRender)
			{
				myGLWindow->set_size(desiredWindowSize);
				changeSizeNextRender = false;
			}
		}
		else
			usleep(2000);	// TODO: really, really, REALLY dirty hack: busy-wait. replace by some proper signal.
	}

	delete myGLWindow;
}
CWeaponSharedParams::~CWeaponSharedParams()
{
    ResetInternal();
}
// called every time a new frame is available.
// needs to be able to 
// - (finally) roll forward filter
// - query it's state 
// - add a PTAM observation to filter.
void PTAMWrapper::HandleFrame()
{



	// prep data
	msg = "";
	ros::Time startedFunc = ros::Time::now();

	// reset?
	if(resetPTAMRequested)
		ResetInternal();


	// make filter thread-safe.
	// --------------------------- ROLL FORWARD TIL FRAME. This is ONLY done here. ---------------------------
	pthread_mutex_lock( &filter->filter_CS );
	//filter->predictUpTo(mimFrameTime,true, true);
	TooN::Vector<10> filterPosePrePTAM = filter->getPoseAtAsVec(mimFrameTime-filter->delayVideo,true);
	pthread_mutex_unlock( &filter->filter_CS );

	// ------------------------ do PTAM -------------------------
	myGLWindow->SetupViewport();
	myGLWindow->SetupVideoOrtho();
	myGLWindow->SetupVideoRasterPosAndZoom();



	// 1. transform with filter
	TooN::Vector<6> PTAMPoseGuess = filter->backTransformPTAMObservation(filterPosePrePTAM.slice<0,6>());
	// 2. convert to se3
	predConvert->setPosRPY(PTAMPoseGuess[0], PTAMPoseGuess[1], PTAMPoseGuess[2], PTAMPoseGuess[3], PTAMPoseGuess[4], PTAMPoseGuess[5]);
	// 3. multiply with rotation matrix	
	TooN::SE3<> PTAMPoseGuessSE3 = predConvert->droneToFrontNT * predConvert->globaltoDrone;


	// set
	mpTracker->setPredictedCamFromW(PTAMPoseGuessSE3);
	//mpTracker->setLastFrameLost((isGoodCount < -10), (videoFrameID%2 != 0));
	mpTracker->setLastFrameLost((isGoodCount < -20), (mimFrameSEQ%3 == 0));

	// track
	ros::Time startedPTAM = ros::Time::now();
	mpTracker->TrackFrame(mimFrameBW, true);
	TooN::SE3<> PTAMResultSE3 = mpTracker->GetCurrentPose();
	lastPTAMMessage = msg = mpTracker->GetMessageForUser();
	ros::Duration timePTAM= ros::Time::now() - startedPTAM;





	// 1. multiply from left by frontToDroneNT.
	// 2. convert to xyz,rpy
	predConvert->setPosSE3_globalToDrone(predConvert->frontToDroneNT * PTAMResultSE3);
	TooN::Vector<6> PTAMResult = TooN::makeVector(predConvert->x, predConvert->y, predConvert->z, predConvert->roll, predConvert->pitch, predConvert->yaw);

	// 3. transform with filter
	TooN::Vector<6> PTAMResultTransformed = filter->transformPTAMObservation(PTAMResult);




	// init failed?
	if(mpTracker->lastStepResult == mpTracker->I_FAILED)
	{
		ROS_INFO("initializing PTAM failed, resetting!");
		resetPTAMRequested = true;
	}
	if(mpTracker->lastStepResult == mpTracker->I_SECOND)
	{
		PTAMInitializedClock = getMS();
		filter->setCurrentScales(TooN::makeVector(mpMapMaker->initialScaleFactor*1.5,mpMapMaker->initialScaleFactor*1.5,mpMapMaker->initialScaleFactor*1.5));
		mpMapMaker->currentScaleFactor = filter->getCurrentScales()[0];
		ROS_INFO("PTAM initialized!");
		ROS_INFO("initial scale: %f\n",mpMapMaker->initialScaleFactor*1.5);
		node->publishCommand("u l PTAM initialized (took second KF)");
		framesIncludedForScaleXYZ = -1;
		lockNextFrame = true;
		imuOnlyPred->resetPos();
	}
	if(mpTracker->lastStepResult == mpTracker->I_FIRST)
	{
		node->publishCommand("u l PTAM initialization started (took first KF)");
	}






	// --------------------------- assess result ------------------------------
	bool isGood = true;
	bool isVeryGood = true;
	// calculate absolute differences.
	TooN::Vector<6> diffs = PTAMResultTransformed - filterPosePrePTAM.slice<0,6>();
	for(int i=0;1<1;i++) diffs[i] = abs(diffs[i]);


	if(filter->getNumGoodPTAMObservations() < 10 && mpMap->IsGood())
	{
		isGood = true;
		isVeryGood = false;
	}
	else if(mpTracker->lastStepResult == mpTracker->I_FIRST ||
		mpTracker->lastStepResult == mpTracker->I_SECOND || 
		mpTracker->lastStepResult == mpTracker->I_FAILED ||
		mpTracker->lastStepResult == mpTracker->T_LOST ||
		mpTracker->lastStepResult == mpTracker->NOT_TRACKING ||
		mpTracker->lastStepResult == mpTracker->INITIALIZING)
		isGood = isVeryGood = false;
	else
	{
		// some chewy heuristic when to add and when not to.
		bool dodgy = mpTracker->lastStepResult == mpTracker->T_DODGY ||
			mpTracker->lastStepResult == mpTracker->T_RECOVERED_DODGY;

		// if yaw difference too big: something certainly is wrong.
		// maximum difference is 5 + 2*(number of seconds since PTAM observation).
		double maxYawDiff = 10.0 + (getMS()-lastGoodYawClock)*0.002;
		if(maxYawDiff > 20) maxYawDiff = 1000;
		if(false && diffs[5] > maxYawDiff) 
			isGood = false;

		if(diffs[5] < 10) 
			lastGoodYawClock = getMS();

		if(diffs[5] > 4.0) 
			isVeryGood = false;

		// if rp difference too big: something certainly is wrong.
		if(diffs[3] > 20 || diffs[4] > 20)
			isGood = false;

		if(diffs[3] > 3 || diffs[4] > 3 || dodgy)
			isVeryGood = false;
	}

	if(isGood)
	{
		if(isGoodCount < 0) isGoodCount = 0;
		isGoodCount++;
	}
	else
	{
		if(isGoodCount > 0) isGoodCount = 0;
		isGoodCount--;
		
		if(mpTracker->lastStepResult == mpTracker->T_RECOVERED_DODGY)
			isGoodCount = std::max(isGoodCount,-2);
		if(mpTracker->lastStepResult == mpTracker->T_RECOVERED_GOOD)
			isGoodCount = std::max(isGoodCount,-5);

	}

	TooN::Vector<10> filterPosePostPTAM;
	// --------------------------- scale estimation & update filter (REDONE) -----------------------------
	// interval length is always between 1s and 2s, to enshure approx. same variances.
	// if interval contained height inconsistency, z-distances are simply both set to zero, which does not generate a bias.
	// otherwise distances are scaled such that height is weighted more.
	// if isGood>=3 && framesIncludedForScale < 0			===> START INTERVAL
	// if 18 <= framesIncludedForScale <= 36 AND isGood>=3	===> ADD INTERVAL, START INTERVAL
	// if framesIncludedForScale > 36						===> set framesIncludedForScale=-1 

	// include!

	// TODO: make shure filter is handled properly with permanent roll-forwards.
	pthread_mutex_lock( &filter->filter_CS );
	if(filter->usePTAM && isGoodCount >= 3)
	{
		filter->addPTAMObservation(PTAMResult,mimFrameTime-filter->delayVideo);
	}
	else
		filter->addFakePTAMObservation(mimFrameTime-filter->delayVideo);

	filterPosePostPTAM = filter->getCurrentPoseSpeedAsVec();
	pthread_mutex_unlock( &filter->filter_CS );

	TooN::Vector<6> filterPosePostPTAMBackTransformed = filter->backTransformPTAMObservation(filterPosePostPTAM.slice<0,6>());


	// if interval is started: add one step.
	int includedTime = mimFrameTime - ptamPositionForScaleTakenTimestamp;
	if(framesIncludedForScaleXYZ >= 0) framesIncludedForScaleXYZ++;

	// if interval is overdue: reset & dont add
	if(includedTime > 3000) 
	{
		framesIncludedForScaleXYZ = -1;
	}

	if(isGoodCount >= 3)
	{
		// filter stuff
		lastScaleEKFtimestamp = mimFrameTime;

		if(includedTime >= 2000 && framesIncludedForScaleXYZ > 1)	// ADD! (if too many, was resetted before...)
		{
			TooN::Vector<3> diffPTAM = filterPosePostPTAMBackTransformed.slice<0,3>() - PTAMPositionForScale;
			bool zCorrupted, allCorrupted;
			TooN::Vector<3> diffIMU = evalNavQue(ptamPositionForScaleTakenTimestamp - filter->delayVideo + filter->delayXYZ,mimFrameTime - filter->delayVideo + filter->delayXYZ,&zCorrupted, &allCorrupted);

			if(!allCorrupted)
			{
				// filtering: z more weight, but only if not corrupted.
				double xyFactor = 0.25;
				double zFactor = zCorrupted ? 0 : 3;
			
				diffPTAM.slice<0,2>() *= xyFactor; diffPTAM[2] *= zFactor;
				diffIMU.slice<0,2>() *= xyFactor; diffIMU[2] *= zFactor;

				filter->updateScaleXYZ(diffPTAM, diffIMU, PTAMResult.slice<0,3>());
				mpMapMaker->currentScaleFactor = filter->getCurrentScales()[0];
			}
			framesIncludedForScaleXYZ = -1;	// causing reset afterwards
		}

		if(framesIncludedForScaleXYZ == -1)	// RESET!
		{
			framesIncludedForScaleXYZ = 0;
			PTAMPositionForScale = filterPosePostPTAMBackTransformed.slice<0,3>();
			//predIMUOnlyForScale->resetPos();	// also resetting z corrupted flag etc. (NOT REquired as reset is done in eval)
			ptamPositionForScaleTakenTimestamp = mimFrameTime;
		}
	}
	

	if(lockNextFrame && isGood)
	{
		filter->scalingFixpoint = PTAMResult.slice<0,3>();
		lockNextFrame = false;	
		//filter->useScalingFixpoint = true;

		snprintf(charBuf,500,"locking scale fixpoint to %.3f %.3f %.3f",PTAMResultTransformed[0], PTAMResultTransformed[1], PTAMResultTransformed[2]);
		ROS_INFO(charBuf);
		node->publishCommand(std::string("u l ")+charBuf);
	}


	// ----------------------------- Take KF? -----------------------------------
	if(!mapLocked && isVeryGood && (forceKF || mpMap->vpKeyFrames.size() < maxKF || maxKF <= 1))
	{
		mpTracker->TakeKF(forceKF);
		forceKF = false;
	}

	// ---------------- save PTAM status for KI --------------------------------
	if(mpTracker->lastStepResult == mpTracker->NOT_TRACKING)
		PTAMStatus = PTAM_IDLE;
	else if(mpTracker->lastStepResult == mpTracker->I_FIRST ||
		mpTracker->lastStepResult == mpTracker->I_SECOND ||
		mpTracker->lastStepResult == mpTracker->T_TOOK_KF)
		PTAMStatus = PTAM_TOOKKF;
	else if(mpTracker->lastStepResult == mpTracker->INITIALIZING)
		PTAMStatus = PTAM_INITIALIZING;
	else if(isVeryGood)
		PTAMStatus = PTAM_BEST;
	else if(isGood)
		PTAMStatus = PTAM_GOOD;
	else if(mpTracker->lastStepResult == mpTracker->T_DODGY ||
		mpTracker->lastStepResult == mpTracker->T_GOOD)
		PTAMStatus = PTAM_FALSEPOSITIVE;
	else
		PTAMStatus = PTAM_LOST;

/*
	// ------------------------- LED anim --------------------------------
	int sinceLastAnim = clock() - lastAnimSentClock;
	if(sinceLastAnim > 19000) lastAnimSent = ANIM_NONE;
	if(mpTracker->lastStepResult == mpTracker->T_TOOK_KF && (lastAnimSent != ANIM_TOOKKF || sinceLastAnim > 200))	// if kf taken: send respective anim, but only if no kf was sent within 200ms.
	{
		lastAnimSent = ANIM_TOOKKF;
		lastAnimSentClock = clock();
		l->writePipeUI("FORWARD LED blink_orange 10 1\n");
	}
	else if(sinceLastAnim > 500)
	{
		bool sent = true;
		if((PTAMStatus == PTAM_BEST || PTAMStatus == PTAM_GOOD) && lastAnimSent != ANIM_GOOD)
		{
			l->writePipeUI("FORWARD LED green 5 20\n");
			lastAnimSentClock = clock();
			lastAnimSent = ANIM_GOOD;
		}

		if(PTAMStatus == PTAM_INITIALIZING && lastAnimSent != ANIM_INIT)
		{
			l->writePipeUI("FORWARD LED blink_green 5 20\n");
			lastAnimSentClock = clock();
			lastAnimSent = ANIM_INIT;
		}

		if(PTAMStatus == PTAM_LOST && lastAnimSent != ANIM_LOST)
		{
			l->writePipeUI("FORWARD LED red 5 20\n");
			lastAnimSentClock = clock();
			lastAnimSent = ANIM_LOST;
		}

		if(PTAMStatus == PTAM_FALSEPOSITIVE && lastAnimSent != ANIM_FALSEPOS)
		{
			l->writePipeUI("FORWARD LED blink_red 5 20\n");
			lastAnimSentClock = clock();
			lastAnimSent = ANIM_FALSEPOS;
		}
	}
*/
	 
	// ----------------------------- update shallow map --------------------------
	if(!mapLocked)
	{
		pthread_mutex_lock(&shallowMapCS);
		mapPointsTransformed.clear();
		keyFramesTransformed.clear();
		for(unsigned int i=0;i<mpMap->vpKeyFrames.size();i++)
		{
			predConvert->setPosSE3_globalToDrone(predConvert->frontToDroneNT * mpMap->vpKeyFrames[i]->se3CfromW);
			TooN::Vector<6> CamPos = TooN::makeVector(predConvert->x, predConvert->y, predConvert->z, predConvert->roll, predConvert->pitch, predConvert->yaw);
			CamPos = filter->transformPTAMObservation(CamPos);
			predConvert->setPosRPY(CamPos[0], CamPos[1], CamPos[2], CamPos[3], CamPos[4], CamPos[5]);
			keyFramesTransformed.push_back(predConvert->droneToGlobal);
		}
		TooN::Vector<3> PTAMScales = filter->getCurrentScales();
		TooN::Vector<3> PTAMOffsets = filter->getCurrentOffsets().slice<0,3>();
		for(unsigned int i=0;i<mpMap->vpPoints.size();i++)
		{
			TooN::Vector<3> pos = (mpMap->vpPoints)[i]->v3WorldPos;
			pos[0] *= PTAMScales[0];
			pos[1] *= PTAMScales[1];
			pos[2] *= PTAMScales[2];
			pos += PTAMOffsets;
			mapPointsTransformed.push_back(pos);
		}
		pthread_mutex_unlock(&shallowMapCS);
	}



	// ---------------------- output and render! ---------------------------
	ros::Duration timeALL = ros::Time::now() - startedFunc;
	if(isVeryGood) snprintf(charBuf,1000,"\nQuality: best            ");
	else if(isGood) snprintf(charBuf,1000,"\nQuality: good           ");
	else snprintf(charBuf,1000,"\nQuality: lost                       ");
	
	snprintf(charBuf+20,800, "scale: %.3f (acc: %.3f)                            ",filter->getCurrentScales()[0],(double)filter->getScaleAccuracy());
	snprintf(charBuf+50,800, "PTAM time: %i ms                            ",(int)(1000*timeALL.toSec()));
	snprintf(charBuf+68,800, "(%i ms total)  ",(int)(1000*timeALL.toSec()));
	if(mapLocked) snprintf(charBuf+83,800, "m.l. ");
	else snprintf(charBuf+83,800, "     ");
	if(filter->allSyncLocked) snprintf(charBuf+88,800, "s.l. ");
	else snprintf(charBuf+88,800, "     ");


	msg += charBuf;

	if(mpMap->IsGood())
	{
		if(drawUI == UI_DEBUG)
		{
			snprintf(charBuf,1000,"\nPTAM Diffs:              ");
			snprintf(charBuf+13,800, "x: %.3f                          ",diffs[0]);
			snprintf(charBuf+23,800, "y: %.3f                          ",diffs[1]);
			snprintf(charBuf+33,800, "z: %.3f                          ",diffs[2]);
			snprintf(charBuf+43,800, "r: %.2f                          ",diffs[3]);
			snprintf(charBuf+53,800, "p: %.2f                          ",diffs[4]);
			snprintf(charBuf+63,800, "y: %.2f",diffs[5]);
			msg += charBuf;


			snprintf(charBuf,1000,"\nPTAM Pose:              ");
			snprintf(charBuf+13,800, "x: %.3f                          ",PTAMResultTransformed[0]);
			snprintf(charBuf+23,800, "y: %.3f                          ",PTAMResultTransformed[1]);
			snprintf(charBuf+33,800, "z: %.3f                          ",PTAMResultTransformed[2]);
			snprintf(charBuf+43,800, "r: %.2f                          ",PTAMResultTransformed[3]);
			snprintf(charBuf+53,800, "p: %.2f                          ",PTAMResultTransformed[4]);
			snprintf(charBuf+63,800, "y: %.2f",PTAMResultTransformed[5]);
			msg += charBuf;


			snprintf(charBuf,1000,"\nPTAM WiggleDist:              ");
			snprintf(charBuf+18,800, "%.3f                          ",mpMapMaker->lastWiggleDist);
			snprintf(charBuf+24,800, "MetricDist: %.3f",mpMapMaker->lastMetricDist);
			msg += charBuf;
		}
	}

	if(drawUI != UI_NONE)
	{
		// render grid
		predConvert->setPosRPY(filterPosePostPTAM[0], filterPosePostPTAM[1], filterPosePostPTAM[2], filterPosePostPTAM[3], filterPosePostPTAM[4], filterPosePostPTAM[5]);

		//renderGrid(predConvert->droneToFrontNT * predConvert->globaltoDrone);
		//renderGrid(PTAMResultSE3);


		// draw HUD
		//if(mod->getControlSystem()->isControlling())
		{
			myGLWindow->SetupViewport();
			myGLWindow->SetupVideoOrtho();
			myGLWindow->SetupVideoRasterPosAndZoom();

			//glDisable(GL_LINE_SMOOTH);
			glLineWidth(2);
			glBegin(GL_LINES);
			glColor3f(0,0,1);

			glVertex2f(0,frameHeight/2);
			glVertex2f(frameWidth,frameHeight/2);

			glVertex2f(frameWidth/2,0);
			glVertex2f(frameWidth/2,frameHeight);

			// 1m lines
			glVertex2f(0.25*frameWidth,0.48*frameHeight);
			glVertex2f(0.25*frameWidth,0.52*frameHeight);
			glVertex2f(0.75*frameWidth,0.48*frameHeight);
			glVertex2f(0.75*frameWidth,0.52*frameHeight);
			glVertex2f(0.48*frameWidth,0.25*frameHeight);
			glVertex2f(0.52*frameWidth,0.25*frameHeight);
			glVertex2f(0.48*frameWidth,0.75*frameHeight);
			glVertex2f(0.52*frameWidth,0.75*frameHeight);

			glEnd();
		}


		myGLWindow->DrawCaption(msg);
	}





	lastPTAMResultRaw = PTAMResultSE3; 
	// ------------------------ LOG --------------------------------------
	// log!
	if(node->logfilePTAM != NULL)
	{
		TooN::Vector<3> scales = filter->getCurrentScalesForLog();
		TooN::Vector<3> sums = TooN::makeVector(0,0,0);
		TooN::Vector<6> offsets = filter->getCurrentOffsets();
		pthread_mutex_lock(&(node->logPTAM_CS));
		// log:
		// - filterPosePrePTAM estimated for videoFrameTimestamp-delayVideo.
		// - PTAMResulttransformed estimated for videoFrameTimestamp-delayVideo. (using imu only for last step)
		// - predictedPoseSpeed estimated for lastNfoTimestamp+filter->delayControl	(actually predicting)
		// - predictedPoseSpeedATLASTNFO estimated for lastNfoTimestamp	(using imu only)
		if(node->logfilePTAM != NULL)
			(*(node->logfilePTAM)) << (isGood ? (isVeryGood ? 2 : 1) : 0) << " " <<
				(mimFrameTime-filter->delayVideo) << " " << filterPosePrePTAM[0] << " " << filterPosePrePTAM[1] << " " << filterPosePrePTAM[2] << " " << filterPosePrePTAM[3] << " " << filterPosePrePTAM[4] << " " << filterPosePrePTAM[5] << " " << filterPosePrePTAM[6] << " " << filterPosePrePTAM[7] << " " << filterPosePrePTAM[8] << " " << filterPosePrePTAM[9] << " " <<
				filterPosePostPTAM[0] << " " << filterPosePostPTAM[1] << " " << filterPosePostPTAM[2] << " " << filterPosePostPTAM[3] << " " << filterPosePostPTAM[4] << " " << filterPosePostPTAM[5] << " " << filterPosePostPTAM[6] << " " << filterPosePostPTAM[7] << " " << filterPosePostPTAM[8] << " " << filterPosePostPTAM[9] << " " << 
				PTAMResultTransformed[0] << " " << PTAMResultTransformed[1] << " " << PTAMResultTransformed[2] << " " << PTAMResultTransformed[3] << " " << PTAMResultTransformed[4] << " " << PTAMResultTransformed[5] << " " << 
				scales[0] << " " << scales[1] << " " << scales[2] << " " << 
				offsets[0] << " " << offsets[1] << " " << offsets[2] << " " << offsets[3] << " " << offsets[4] << " " << offsets[5] << " " <<
				sums[0] << " " << sums[1] << " " << sums[2] << " " << 
				videoFramePing << std::endl;

		pthread_mutex_unlock(&(node->logPTAM_CS));
	}

	myGLWindow->swap_buffers();
	myGLWindow->HandlePendingEvents();

}
Exemple #7
0
void AndroidEGL::InitEGL(APIVariant API)
{
	// make sure we only do this once (it's optionally done early for cooker communication)
	static bool bAlreadyInitialized = false;
	if (bAlreadyInitialized)
	{
		return;
	}
	bAlreadyInitialized = true;

	check(PImplData->eglDisplay == EGL_NO_DISPLAY)
	PImplData->eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
	checkf(PImplData->eglDisplay, TEXT(" eglGetDisplay error : 0x%x "), eglGetError());
	
	EGLBoolean  result = 	eglInitialize(PImplData->eglDisplay, 0 , 0);
	checkf( result == EGL_TRUE, TEXT("elgInitialize error: 0x%x "), eglGetError());

	// Get the EGL Extension list to determine what is supported
	FString Extensions = ANSI_TO_TCHAR( eglQueryString( PImplData->eglDisplay, EGL_EXTENSIONS));

	FPlatformMisc::LowLevelOutputDebugStringf( TEXT("EGL Extensions: \n%s" ), *Extensions );

	bSupportsKHRCreateContext = Extensions.Contains(TEXT("EGL_KHR_create_context"));
	bSupportsKHRSurfacelessContext = Extensions.Contains(TEXT("EGL_KHR_surfaceless_context"));

	if (API == AV_OpenGLES)
	{
		result = eglBindAPI(EGL_OPENGL_ES_API);
	}
	else if (API == AV_OpenGLCore)
	{
		result = eglBindAPI(EGL_OPENGL_API);
	}
	else
	{
		checkf( 0, TEXT("Attempt to initialize EGL with unedpected API type"));
	}

	checkf( result == EGL_TRUE, TEXT("eglBindAPI error: 0x%x "), eglGetError());

#if ENABLE_CONFIG_FILTER

	EGLConfig* EGLConfigList = NULL;
	result = eglChooseConfig(PImplData->eglDisplay, Attributes, NULL, 0, &PImplData->eglNumConfigs);
	if (result)
	{
		int NumConfigs = PImplData->eglNumConfigs;
		EGLConfigList = new EGLConfig[NumConfigs];
		result = eglChooseConfig(PImplData->eglDisplay, Attributes, EGLConfigList, NumConfigs, &PImplData->eglNumConfigs);
	}
	if (!result)
	{
		ResetInternal();
	}

	checkf(result == EGL_TRUE , TEXT(" eglChooseConfig error: 0x%x"), eglGetError());

	checkf(PImplData->eglNumConfigs != 0  ,TEXT(" eglChooseConfig num EGLConfigLists is 0 . error: 0x%x"), eglGetError());

	int ResultValue = 0 ;
	bool haveConfig = false;
	int64 score = LONG_MAX;
	for (uint32_t i = 0; i < PImplData->eglNumConfigs; i++)
	{
		int64 currScore = 0;
		int r, g, b, a, d, s, sb, sc, nvi;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_RED_SIZE, &ResultValue); r = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_GREEN_SIZE, &ResultValue); g = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_BLUE_SIZE, &ResultValue); b = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_ALPHA_SIZE, &ResultValue); a = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_DEPTH_SIZE, &ResultValue); d = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_STENCIL_SIZE, &ResultValue); s = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_SAMPLE_BUFFERS, &ResultValue); sb = ResultValue;
		eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_SAMPLES, &ResultValue); sc = ResultValue;

		// Optional, Tegra-specific non-linear depth buffer, which allows for much better
		// effective depth range in relatively limited bit-depths (e.g. 16-bit)
		int bNonLinearDepth = 0;
		if (eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_DEPTH_ENCODING_NV, &ResultValue))
		{
			bNonLinearDepth = (ResultValue == EGL_DEPTH_ENCODING_NONLINEAR_NV) ? 1 : 0;
		}

		// Favor EGLConfigLists by RGB, then Depth, then Non-linear Depth, then Stencil, then Alpha
		currScore = 0;
		currScore |= ((int64)FPlatformMath::Min(FPlatformMath::Abs(sb - PImplData->Parms.sampleBuffers), 15)) << 29;
		currScore |= ((int64)FPlatformMath::Min(FPlatformMath::Abs(sc - PImplData->Parms.sampleSamples), 31)) << 24;
		currScore |= FPlatformMath::Min(
						FPlatformMath::Abs(r - PImplData->Parms.redSize) +
						FPlatformMath::Abs(g - PImplData->Parms.greenSize) +
						FPlatformMath::Abs(b - PImplData->Parms.blueSize), 127) << 17;
		currScore |= FPlatformMath::Min(FPlatformMath::Abs(d - PImplData->Parms.depthSize), 63) << 11;
		currScore |= FPlatformMath::Min(FPlatformMath::Abs(1 - bNonLinearDepth), 1) << 10;
		currScore |= FPlatformMath::Min(FPlatformMath::Abs(s - PImplData->Parms.stencilSize), 31) << 6;
		currScore |= FPlatformMath::Min(FPlatformMath::Abs(a - PImplData->Parms.alphaSize), 31) << 0;

#if ENABLE_EGL_DEBUG
		LogConfigInfo(EGLConfigList[i]);
#endif

		if (currScore < score || !haveConfig)
		{
			PImplData->eglConfigParam	= EGLConfigList[i];
			PImplData->DepthSize = d;		// store depth/stencil sizes
			haveConfig	= true;
			score		= currScore;
			eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[i], EGL_NATIVE_VISUAL_ID, &ResultValue);PImplData->NativeVisualID = ResultValue;
		}
	}
	check(haveConfig);
	delete[] EGLConfigList;
#else

	EGLConfig EGLConfigList[1];
	if(!( result = eglChooseConfig(PImplData->eglDisplay, Attributes, EGLConfigList, 1,  &PImplData->eglNumConfigs)))
	{
		ResetInternal();
	}

	checkf(result == EGL_TRUE , TEXT(" eglChooseConfig error: 0x%x"), eglGetError());

	checkf(PImplData->eglNumConfigs != 0  ,TEXT(" eglChooseConfig num EGLConfigLists is 0 . error: 0x%x"), eglGetError());
	PImplData->eglConfigParam	= EGLConfigList[0];
	int ResultValue = 0 ;
	eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[0], EGL_DEPTH_SIZE, &ResultValue); PImplData->DepthSize = ResultValue;
	eglGetConfigAttrib(PImplData->eglDisplay, EGLConfigList[0], EGL_NATIVE_VISUAL_ID, &ResultValue);PImplData->NativeVisualID = ResultValue;
#endif
}
Exemple #8
0
void AndroidEGL::CreateEGLSurface(ANativeWindow* InWindow)
{
	// due to possible early initialization, don't redo this
	if (PImplData->eglSurface != EGL_NO_SURFACE)
	{
		return;
	}

	//need ANativeWindow
	PImplData->eglSurface = eglCreateWindowSurface(PImplData->eglDisplay, PImplData->eglConfigParam,InWindow, NULL);

	if(PImplData->eglSurface == EGL_NO_SURFACE )
	{
		checkf(PImplData->eglSurface != EGL_NO_SURFACE, TEXT("eglCreateWindowSurface error : 0x%x"), eglGetError());
		ResetInternal();
	}

	// On some Android devices, eglChooseConfigs will lie about valid configurations (specifically 32-bit color)
	/*	if (eglGetError() == EGL10.EGL_BAD_MATCH)
	{
	Logger.LogOut("eglCreateWindowSurface FAILED, retrying with more restricted context");

	// Dump what's already been initialized
	cleanupEGL();

	// Reduce target color down to 565
	eglAttemptedParams.redSize = 5;
	eglAttemptedParams.greenSize = 6;
	eglAttemptedParams.blueSize = 5;
	eglAttemptedParams.alphaSize = 0;
	initEGL(eglAttemptedParams);

	// try again
	eglSurface = eglCreateWindowSurface(PImplData->eglDisplay, eglConfig, surface, null);
	}

	*/
	EGLBoolean result = EGL_FALSE;
	if (!( result =  ( eglQuerySurface(PImplData->eglDisplay, PImplData->eglSurface, EGL_WIDTH, &PImplData->eglWidth) && eglQuerySurface(PImplData->eglDisplay, PImplData->eglSurface, EGL_HEIGHT, &PImplData->eglHeight) ) ) )
	{
		ResetInternal();
	}

	checkf(result == EGL_TRUE, TEXT("eglQuerySurface error : 0x%x"), eglGetError());

	EGLint pbufferAttribs[] =
	{
		EGL_WIDTH, 1,
		EGL_HEIGHT, 1,
		EGL_TEXTURE_TARGET, EGL_NO_TEXTURE,
		EGL_TEXTURE_FORMAT, EGL_NO_TEXTURE,
		EGL_NONE
	};

	pbufferAttribs[1] = PImplData->eglWidth;
	pbufferAttribs[3] = PImplData->eglHeight;

	PImplData->auxSurface = eglCreatePbufferSurface(PImplData->eglDisplay, PImplData->eglConfigParam, pbufferAttribs);
	if(PImplData->auxSurface== EGL_NO_SURFACE )
	{
		checkf(PImplData->auxSurface != EGL_NO_SURFACE, TEXT("eglCreatePbufferSurface error : 0x%x"), eglGetError());
		ResetInternal();
	}
}
// called every time a new frame is available.
// needs to be able to 
// - (finally) roll forward filter
// - query it's state 
// - add a PTAM observation to filter.
void LSDWrapper::HandleFrame()
{
	//printf("tracking Frame at ms=%d (from %d)\n",getMS(ros::Time::now()),mimFrameTime-filter->delayVideo);


	// prep data
	msg = "";
	ros::Time startedFunc = ros::Time::now();

	// reset?
	if(resetLSDRequested)
		ResetInternal();



	// make filter thread-safe.
	// --------------------------- ROLL FORWARD TIL FRAME. This is ONLY done here. ---------------------------
	pthread_mutex_lock( &filter->filter_CS );
	//filter->predictUpTo(mimFrameTime,true, true);
	TooN::Vector<10> filterPosePreLSD = filter->getPoseAtAsVec(mimFrameTime_workingCopy-filter->delayVideo,true);
	pthread_mutex_unlock( &filter->filter_CS );

	// ------------------------ do PTAM -------------------------
	myGLWindow->SetupViewport();
	myGLWindow->SetupVideoOrtho();
	myGLWindow->SetupVideoRasterPosAndZoom();



	// 1. transform with filter
	TooN::Vector<6> LSDPoseGuess = filter->backTransformLSDObservation(filterPosePreLSD.slice<0,6>());
	// 2. convert to se3
	predConvert->setPosRPY(LSDPoseGuess[0], LSDPoseGuess[1], LSDPoseGuess[2], LSDPoseGuess[3], LSDPoseGuess[4], LSDPoseGuess[5]);
	// 3. multiply with rotation matrix	
	TooN::SE3<> LSDPoseGuessSE3 = predConvert->droneToFrontNT * predConvert->globaltoDrone;


	boost::unique_lock<boost::recursive_mutex> waitLock(imageStream->getBuffer()->getMutex());
		while (!fullResetRequested && !(imageStream->getBuffer()->size() > 0)) {
			notifyCondition.wait(waitLock);
		}
		waitLock.unlock();
		
	
	// Track image and get current pose estimate
	/***Note: image is of type imagedata from lsd-slam change it***/
	TooN::SE3<> LSDResultSE3;
	newImageCallback(mimFrameBW_workingCopy.data, mimFrameBW_workingCopy.timestamp);

	LSDResultSE3 = monoOdometry->getCurrentPoseEstimate();
	
	ros::Duration timeLSD= ros::Time::now() - startedLSD;

	TooN::Vector<6> LSDResultSE3TwistOrg = LSDResultSE3.ln();

	node->publishTf(LSDResultSE3, mimFrameTimeRos_workingCopy, mimFrameSEQ_workingCopy,"cam_front");


	// 1. multiply from left by frontToDroneNT.
	// 2. convert to xyz,rpy
	predConvert->setPosSE3_globalToDrone(predConvert->frontToDroneNT * LSDResultSE3);
	TooN::Vector<6> LSDResult = TooN::makeVector(predConvert->x, predConvert->y, predConvert->z, predConvert->roll, predConvert->pitch, predConvert->yaw);

	// 3. transform with filter
	TooN::Vector<6> LSDResultTransformed = filter->transformLSDObservation(LSDResult);

	//Init failed code removed



	// --------------------------- assess result ------------------------------
	bool isGood = true;
	bool diverged = false;
	
	// calculate absolute differences.
	TooN::Vector<6> diffs = LSDResultTransformed - filterPosePreLSD.slice<0,6>();
	for(int i=0;1<1;i++) diffs[i] = abs(diffs[i]);

	if(filter->getNumGoodLSDObservations() < 10 && monoOdometry->IsGood())
	{
		isGood = true;
	}

	//If the last tracking step result is lost
	else if(lsdTracker->lastStepResult == LOST)
		isGood = false;
		diverged = true;

	else
	{
Exemple #10
0
void PTAMWrapper::run()
{
	std::cout << "Waiting for Video" << std::endl;

	// wait for firsst image
	while(!newImageAvailable)
		usleep(100000);	// sleep 100ms
	newImageAvailable = false;
	while(!newImageAvailable)
		usleep(100000);	// sleep 100ms

	// read image height and width
	frameWidth = mimFrameBW.size().x;
	frameHeight = mimFrameBW.size().y;

	ResetInternal();


	snprintf(charBuf,200,"Video resolution: %d x %d",frameWidth,frameHeight);
	ROS_INFO(charBuf);
	node->publishCommand(std::string("u l ")+charBuf);

	// create window
//    myGLWindow = new GLWindow2(CVD::ImageRef(frameWidth,frameHeight), "PTAM Drone Camera Feed", this);
//	myGLWindow->set_title("PTAM Drone Camera Feed");

    std::cout << "hein 0" << std::endl;
	changeSizeNextRender = true;
	if(frameWidth < 640)
		desiredWindowSize = CVD::ImageRef(frameWidth*2,frameHeight*2);
	else
		desiredWindowSize = CVD::ImageRef(frameWidth,frameHeight);


	boost::unique_lock<boost::mutex> lock(new_frame_signal_mutex);

	std::cout << "hein 1" << std::endl;

	while(keepRunning)
	{
        std::cout << "hein 2" << std::endl;
		if(newImageAvailable)
		{
			newImageAvailable = false;

			// copy to working copy
			mimFrameBW_workingCopy.copy_from(mimFrameBW);
			mimFrameTime_workingCopy = mimFrameTime;
			mimFrameSEQ_workingCopy = mimFrameSEQ;
			mimFrameTimeRos_workingCopy = mimFrameTimeRos;

			std::cout << "hein 3" << std::endl;
			// release lock and do the work-intensive stuff.....
			lock.unlock();

			HandleFrame();


			if(changeSizeNextRender)
			{
				//myGLWindow->set_size(desiredWindowSize);
				changeSizeNextRender = false;
			}
            std::cout << "hein 4" << std::endl;
			// get lock again
			lock.lock();
		}
		else
			new_frame_signal.wait(lock);
	}

	lock.unlock();
	//delete myGLWindow;
}
Exemple #11
0
void TEffect::Reset()
{
	lock_guard<mutex> Lock(MutexLock);
	ResetInternal();
}