예제 #1
0
 bool operator()(I1 begin1, S1 end1, I2 begin2, S2 end2,
     C pred_ = C{}, P1 proj1_ = P1{}, P2 proj2_ = P2{}) const
 {
     auto &&pred = as_function(pred_);
     auto &&proj1 = as_function(proj1_);
     auto &&proj2 = as_function(proj2_);
     for(; begin2 != end2; ++begin1)
     {
         if(begin1 == end1 || pred(proj2(*begin2), proj1(*begin1)))
             return false;
         if(!pred(proj1(*begin1), proj2(*begin2)))
             ++begin2;
     }
     return true;
 }
예제 #2
0
bool collides(Entity* const entityOne, Entity* const entityTwo)
{//Function which  uses the separating axis theorem to detect for collisions between two entities
	//Projection getProjection(const sf::Vector2f&, const Square&);
	std::vector<sf::Vector2f> axes1(entityOne->getVertexCount());
	std::vector<sf::Vector2f> axes2(entityTwo->getVertexCount());

	for (int i(0); i < entityOne->getVertexCount(); ++i)
	{//Loop through the first shape and get all normals to each side
		int index(0);
		(i + 1) == entityOne->getVertexCount() ? index = 0 : index = i + 1;

		axes1[i] = entityOne->getNormal(i, index);
	}

	for (int i(0); i < entityTwo->getVertexCount(); ++i)
	{//Loop through the second shape and get all normals to each side
		int index(0);
		(i + 1) == entityTwo->getVertexCount() ? index = 0 : index = i + 1;

		axes2[i] = entityTwo->getNormal(i, index);
	}

	for (int i(0); i < axes1.size(); ++i)
	{//Project shape2 onto shape1's axis and determine if there's a gap
		sf::Vector2f normal(axes1[i]);

		SATProjection proj1(getProjection(normal, entityOne)); //max and minimum values of the first shape projection
		SATProjection proj2(getProjection(normal, entityTwo)); //max and minimum values of the second shape projection


		if (!overlaps(proj1, proj2))
			return false;
	}

	for (int i(0); i < axes2.size(); ++i)
	{
		sf::Vector2f normal(axes2[i]);

		SATProjection proj1(getProjection(normal, entityOne)); //max and minimum values of the first shape projection
		SATProjection proj2(getProjection(normal, entityTwo)); //max and minimum values of the second shape projection


		if (!overlaps(proj1, proj2))
			return false;
	}

	return(true);
}
예제 #3
0
 bool lexicographical_compare(I1 first1, S1 last1, I2 first2, S2 last2,
                              Comp comp_ = Comp{}, Proj1 proj1_ = Proj1{},
                              Proj2 proj2_ = Proj2{}) {
   auto&& comp = __stl2::as_function(comp_);
   auto&& proj1 = __stl2::as_function(proj1_);
   auto&& proj2 = __stl2::as_function(proj2_);
   for (; first1 != last1 && first2 != last2; ++first1, ++first2) {
     if (comp(proj1(*first1), proj2(*first2))) {
       return true;
     }
     if (comp(proj2(*first2), proj1(*first1))) {
       return false;
     }
   }
   return first1 == last1 && first2 != last2;
 }
예제 #4
0
            static bool four_iter_impl(I1 begin1, S1 end1, I2 begin2, S2 end2, C pred_, P1 proj1_,
                P2 proj2_)
            {
                auto &&pred = as_function(pred_);
                auto &&proj1 = as_function(proj1_);
                auto &&proj2 = as_function(proj2_);
                // shorten sequences as much as possible by lopping of any equal parts
                for(; begin1 != end1 && begin2 != end2; ++begin1, ++begin2)
                    if(!pred(proj1(*begin1), proj2(*begin2)))
                        goto not_done;
                return begin1 == end1 && begin2 == end2;
            not_done:
                // begin1 != end1 && begin2 != end2 && *begin1 != *begin2
                auto l1 = distance(begin1, end1);
                auto l2 = distance(begin2, end2);
                if(l1 != l2)
                    return false;

                // For each element in [f1, l1) see if there are the same number of
                //    equal elements in [f2, l2)
                for(I1 i = begin1; i != end1; ++i)
                {
                    // Have we already counted the number of *i in [f1, l1)?
                    for(I1 j = begin1; j != i; ++j)
                        if(pred(proj1(*j), proj1(*i)))
                            goto next_iter;
                    {
                        // Count number of *i in [f2, l2)
                        iterator_difference_t<I1> c2 = 0;
                        for(I2 j = begin2; j != end2; ++j)
                            if(pred(proj1(*i), proj2(*j)))
                                ++c2;
                        if(c2 == 0)
                            return false;
                        // Count number of *i in [i, l1) (we can start with 1)
                        iterator_difference_t<I1> c1 = 1;
                        for(I1 j = next(i); j != end1; ++j)
                            if(pred(proj1(*i), proj1(*j)))
                                ++c1;
                        if(c1 != c2)
                            return false;
                    }
            next_iter:;
                }
                return true;
            }
void reconstruct(Mat& K, Mat& R, Mat& T, vector<Point2f>& p1, vector<Point2f>& p2, Mat& structure)
{
    Mat proj1(3, 4, CV_32FC1);
    Mat proj2(3, 4, CV_32FC1);

    proj1(Range(0, 3), Range(0, 3)) = Mat::eye(3, 3, CV_32FC1);
    proj1.col(3) = Mat::zeros(3, 1, CV_32FC1);

    R.convertTo(proj2(Range(0, 3), Range(0, 3)), CV_32FC1);
    T.convertTo(proj2.col(3), CV_32FC1);

    Mat fK;
    K.convertTo(fK, CV_32FC1);
    proj1 = fK*proj1;
    proj2 = fK*proj2;

    triangulatePoints(proj1, proj2, p1, p2, structure);
}
            T operator()(I1 begin1, S1 end1, I2 begin2, S2 end2, T init, BOp1 bop1_ = BOp1{},
                BOp2 bop2_ = BOp2{}, P1 proj1_ = P1{}, P2 proj2_ = P2{}) const
            {
                auto &&bop1 = as_function(bop1_);
                auto &&bop2 = as_function(bop2_);
                auto &&proj1 = as_function(proj1_);
                auto &&proj2 = as_function(proj2_);

                for (; begin1 != end1 && begin2 != end2; ++begin1, ++begin2)
                  init = bop1(init, bop2(proj1(*begin1), proj2(*begin2)));
                return init;
            }
예제 #7
0
 tagged_pair<tag::in1(I1), tag::in2(I2)>
 operator()(I1 begin1, S1 end1, I2 begin2, S2 end2, C pred_ = C{}, P1 proj1_ = P1{},
     P2 proj2_ = P2{}) const
 {
     auto &&pred = as_function(pred_);
     auto &&proj1 = as_function(proj1_);
     auto &&proj2 = as_function(proj2_);
     for(; begin1 != end1 &&  begin2 != end2; ++begin1, ++begin2)
         if(!pred(proj1(*begin1), proj2(*begin2)))
             break;
     return {begin1, begin2};
 }
예제 #8
0
            T operator()(I1 begin1, S1 end1, I2 begin2, T init, BOp1 bop1_ = BOp1{},
                BOp2 bop2_ = BOp2{}, P1 proj1_ = P1{}, P2 proj2_ = P2{}) const
            {
                auto &&bop1 = invokable(bop1_);
                auto &&bop2 = invokable(bop2_);
                auto &&proj1 = invokable(proj1_);
                auto &&proj2 = invokable(proj2_);

                for (; begin1 != end1; ++begin1, ++begin2)
                  init = bop1(init, bop2(proj1(*begin1), proj2(*begin2)));
                return init;
            }
예제 #9
0
 std::pair<I1, I2>
 operator()(I1 begin1, S1 end1, I2 begin2, C pred_ = C{}, P1 proj1_ = P1{},
     P2 proj2_ = P2{}) const
 {
     auto &&pred = as_function(pred_);
     auto &&proj1 = as_function(proj1_);
     auto &&proj2 = as_function(proj2_);
     for(; begin1 != end1; ++begin1, ++begin2)
         if(!pred(proj1(*begin1), proj2(*begin2)))
             break;
     return {begin1, begin2};
 }
예제 #10
0
파일: includes.hpp 프로젝트: respu/cmcstl2
  bool includes(I1 first1, S1 last1, I2 first2, S2 last2, Comp comp_ = Comp{},
                Proj1 proj1_ = Proj1{}, Proj2 proj2_ = Proj2{}) {
    auto&& comp = __stl2::as_function(comp_);
    auto&& proj1 = __stl2::as_function(proj1_);
    auto&& proj2 = __stl2::as_function(proj2_);

    while (true) {
      if (first2 == last2) {
        return true;
      }
      if (first1 == last1) {
        return false;
      }
      if (comp(proj2(*first2), proj1(*first1))) {
        return false;
      }
      if (!comp(proj1(*first1), proj2(*first2))) {
        ++first2;
      }
      ++first1;
    }
  }
예제 #11
0
파일: mismatch.hpp 프로젝트: respu/cmcstl2
  mismatch(I1 first1, S1 last1, I2 first2, Pred pred_ = Pred{},
           Proj1 proj1_ = Proj1{}, Proj2 proj2_ = Proj2{}) {
    auto&& pred = __stl2::as_function(pred_);
    auto&& proj1 = __stl2::as_function(proj1_);
    auto&& proj2 = __stl2::as_function(proj2_);

    for (; first1 != last1; ++first1, ++first2) {
      if (!pred(proj1(*first1), proj2(*first2))) {
        break;
      }
    }
    return {first1, first2};
  }
예제 #12
0
	PxGeometry& Camera::getPixelFrustum(FDreal pixelXSize, FDreal pixelYSize) {
		
		if (pixelFrustum.isValid()) {
			return pixelFrustum;
		}

		Vector3 proj1(-pixelXSize / 2, -pixelYSize / 2, 1);
		Vector3 proj2(-pixelXSize / 2, pixelYSize / 2, 1);
		Vector3 proj3(pixelXSize / 2, -pixelYSize / 2, 1);
		Vector3 proj4(pixelXSize / 2, pixelYSize / 2, 1);

		fdmath::Matrix44 projInverse;
		fdmath::gluInvertMatrix44(projection, projInverse);

		FDreal len = -100.0f;
		Vector3 view1 = projInverse.transform(proj1).getNormalized() * len;
		Vector3 view2 = projInverse.transform(proj2).getNormalized() * len;
		Vector3 view3 = projInverse.transform(proj3).getNormalized() * len;
		Vector3 view4 = projInverse.transform(proj4).getNormalized() * len;

		static const PxVec3 convexVerts[] = {PxVec3(0,0,0), view1, 
				view2, view3, view4};

		PhysicsSystem* physics = FreeThread__getWorld().
				getSystem<PhysicsSystem>();

		PxConvexMeshDesc convexDesc;
		convexDesc.points.count     = 5;
		convexDesc.points.stride    = sizeof(PxVec3);
		convexDesc.points.data      = convexVerts;
		convexDesc.flags            = PxConvexFlag::eCOMPUTE_CONVEX;
		convexDesc.vertexLimit      = 256;

		PxDefaultMemoryOutputStream buf;
		if (!physics->cooking->cookConvexMesh(convexDesc, buf)) {
			FD_THROW(GenericException("Unable to cook convex pixel mesh!"));
		}
		PxDefaultMemoryInputData input(buf.getData(), buf.getSize());
		PxConvexMesh* convexMesh = physics->physics->createConvexMesh(input);

		pixelFrustum = PxConvexMeshGeometry(convexMesh);

		return pixelFrustum;
	}
예제 #13
0
파일: TestProjection.cpp 프로젝트: ALX5/PJS
cv::Mat TestProjection::test(double userX, double userY, double userZ, 
        const char* filename) {

    //Coordinates of the projection in the real world
    /*cv::Point3f p11(-480, 735, -420);
    cv::Point3f p12(0, 935, 0);
    cv::Point3f p13(0, 220, 0);
    cv::Point3f p14(-480, 240, -420);
    Plane3d proj1(p11, p12, p13, p14);

    cv::Point3f p21(0, 935, 0);
    cv::Point3f p22(480, 735, -420);
    cv::Point3f p23(480, 240, -420);
    cv::Point3f p24(0, 220, 0);
    Plane3d proj2(p21, p22, p23, p24);*/

    cv::Point3f p11(-590, 725, -350);
    cv::Point3f p12(0, 955, 0);
    cv::Point3f p13(0, 200, 0);
    cv::Point3f p14(-590, 227, -350);
    Plane3d proj1(p11, p12, p13, p14);

    cv::Point3f p21(0, 955, 0);
    cv::Point3f p22(567, 755, -350);
    cv::Point3f p23(567, 227, -350);
    cv::Point3f p24(0, 200, 0);
    Plane3d proj2(p21, p22, p23, p24);

    std::vector<Plane3d> planes;
    planes.push_back(proj1);
    planes.push_back(proj2);

    Projection proj(planes);

    //    proj.print();

    //Create the user with the obtained projection coordinates
    User u(proj);

    //Update his position
    u.updatePosition(userX, userY, userZ);
    //    u.print();

    //Create the distorted-corrected plane pairs, using the projections
    //on the user's view plane
    //Plane 1
    //****************************************************************************************************
    Plane2d p1 = u.getProjectedPlanes().at(0).to2d();
    Plane2d p2(cv::Point2f(0, 0), cv::Point2f(480, 0), cv::Point2f(480, 540), cv::Point2f(0, 540));
//    Plane2d p2(cv::Point2f(0, 0), cv::Point2f(230, 0), cv::Point2f(230, 520), cv::Point2f(0, 520));
//    Plane2d p2(cv::Point2f(0, 0), cv::Point2f(270, 0), cv::Point2f(270, 405), cv::Point2f(0, 405));
    //****************************************************************************************************
    //Invert the plane y coordinates
    Plane2d inv1 = p1.yInverted();
    //Move it so that it's closer to the target plane
    cv::Vec2f dist = pjs::distance(inv1, p2);
    Plane2d pp1(cv::Point2f(inv1.getPoint(0).x - dist[0], inv1.getPoint(0).y - dist[1]),
            cv::Point2f(inv1.getPoint(1).x - dist[0], inv1.getPoint(1).y - dist[1]),
            cv::Point2f(inv1.getPoint(2).x - dist[0], inv1.getPoint(2).y - dist[1]),
            cv::Point2f(inv1.getPoint(3).x - dist[0], inv1.getPoint(3).y - dist[1]));

    //Plane 2
    //****************************************************************************************************
    Plane2d p3 = u.getProjectedPlanes().at(1).to2d();
    Plane2d p4(cv::Point2f(0, 0), cv::Point2f(480, 0), cv::Point2f(480, 540), cv::Point2f(0, 540));
//    Plane2d p4(cv::Point2f(0, 0), cv::Point2f(230, 0), cv::Point2f(230, 520), cv::Point2f(0, 520));
//    Plane2d p4(cv::Point2f(0, 0), cv::Point2f(270, 0), cv::Point2f(270, 405), cv::Point2f(0, 405));
    //****************************************************************************************************
    //Invert the plane y coordinates
    Plane2d inv2 = p3.yInverted();
    //Move it so that it's closer to the target plane
    dist = pjs::distance(inv2, p4);
    Plane2d pp3(cv::Point2f(inv2.getPoint(0).x - dist[0], inv2.getPoint(0).y - dist[1]),
            cv::Point2f(inv2.getPoint(1).x - dist[0], inv2.getPoint(1).y - dist[1]),
            cv::Point2f(inv2.getPoint(2).x - dist[0], inv2.getPoint(2).y - dist[1]),
            cv::Point2f(inv2.getPoint(3).x - dist[0], inv2.getPoint(3).y - dist[1]));



    //***********************
    //Load the target image
    //***********************    
    cv::Mat img = cv::imread(filename, CV_LOAD_IMAGE_COLOR);
    if (!img.data) {
        std::cout << " --(!) Error reading image" << std::endl;
        throw std::exception();
    }

    //Helper object
    Utils utils;

    //Divide the image in two
    //    std::vector<cv::Mat> images = utils.divideImageInTwo(img);

    //Build the surfaces with their reference planes and their corresponding
    //image
    Surface s1(pp1, p2);
    Surface s2(pp3, p4);

    std::vector<Surface*> surfaces;
    surfaces.push_back(&s1);
    surfaces.push_back(&s2);

    int originX;
    int padding;
    int screenWidth = 1280;
    int screenHeight = 800;
    //TODO recursive position correction
    int width1 = s1.getWidth();
    int width2 = s2.getWidth();
    int diffW = width1 - width2;
    if (diffW < 0) {
        originX = screenWidth / 2 - width1;
        padding = 0;
    } else {
        originX = 0 + screenWidth / 2 - width1;
        padding = 0;
    }

    //1st position correction
    cv::Point2f origin(originX, 0);
    s1.correctBBPosition(origin);
    cv::Point2f s1ur = s1.getUpperRightCorner();    
    s2.correctPosition(s1ur);

    cv::Point2f upperLeft = s2.getUpperLeftCorner();
    cv::Point2f upperRight = s2.getUpperRightCorner();
    double topY;
    if (upperLeft.y < upperRight.y) {
        topY = upperLeft.y;
    } else {
        topY = upperRight.y;
    }
    cv::Size size = utils.getFinalSize(surfaces);
    int diffH = screenHeight - size.height;
    //2nd position correction if necessary (if second plane is still outside)
    if (!topY < 0) {
        topY = 0;
    }
    cv::Point2f newOrigin(originX, -topY + diffH / 2);
    s1.correctBBPosition(newOrigin);
    s1ur = s1.getUpperRightCorner();
    s2.correctPosition(s1ur);

    //    cv::Size size = utils.getFinalSize(surfaces);
    size.width += padding;

    size.width = std::max(screenWidth, size.width);
    size.height = screenHeight;

    cv::Size sizeS1(size.width / 2, size.height);

    s1.setSize(sizeS1);
    s2.setSize(size);

    std::vector<cv::Mat> images = utils.divideImageInTwo(img);

    s1.setImage(images.at(0));
    s2.setImage(images.at(1));

    s1.applyHomography();
    s2.applyHomography();
    //        s1.addTransparency();
    //        s2.addTransparency();

    cv::Mat finalImage = utils.getImageFromSurfaces(surfaces);

    surfaces.clear();

    return finalImage;
}
예제 #14
0
int main ()
{
	SingleParticle2dx::ConfigContainer* config = SingleParticle2dx::ConfigContainer::Instance();
	
	int n = config->getParticleSize();
	
	config->setProjectionMethod(4);
	config->setCacheProjections(false);
	config->setParallelProjection(false);
	config->setProjectionMaskingMethod(0);
	config->setRefinementMethod(0);
	config->setTrialAngleGenerator(4);
	config->setBackprojectionMethod(0);
	config->setLPProjectionRadius(n);
	
	SingleParticle2dx::DataStructures::Reconstruction3d rec3d(n,n,n);
	
	SingleParticle2dx::Utilities::UtilityFunctions::generateInitialModelForInitRef(rec3d);
	
	rec3d.scale(1/1.34);
	
	SingleParticle2dx::Utilities::UtilityFunctions::setProgressBar( 33 );
	
	SingleParticle2dx::DataStructures::ParticleContainer c_dummy;
	rec3d.forceProjectionPreparation(c_dummy);
	
	SingleParticle2dx::DataStructures::Projection2d proj1(n,n);
	SingleParticle2dx::DataStructures::Projection2d proj2(n,n);
	SingleParticle2dx::DataStructures::Projection2d proj3(n,n);
	
	SingleParticle2dx::DataStructures::Orientation o1(0,0,0);
	SingleParticle2dx::DataStructures::Orientation o2(0,90,0);
	SingleParticle2dx::DataStructures::Orientation o3(0,90,90);
	
	rec3d.calculateProjection(o1, proj1);
	rec3d.calculateProjection(o2, proj2);
	rec3d.calculateProjection(o3, proj3);
	
	if(config->getShowSights())
	{	
		proj1.setMidddleTarget();
		proj2.setMidddleTarget();
		proj3.setMidddleTarget();
	}
	
	SingleParticle2dx::Utilities::UtilityFunctions::setProgressBar( 66 );
	
	std::string cont_folder_name = config->getContainerName() + "/Div_output/";
	std::string filename_p1 = cont_folder_name + "init_topview.mrc";
	std::string filename_p2 = cont_folder_name + "init_sideview1.mrc";
	std::string filename_p3 = cont_folder_name + "init_sideview2.mrc";
	
	proj1.writeToFile(filename_p1);
	proj2.writeToFile(filename_p2);
	proj3.writeToFile(filename_p3);
	
	SingleParticle2dx::Utilities::UtilityFunctions::generateImageOutput(filename_p1, "Top View", config->getScriptName(), false, false);
	SingleParticle2dx::Utilities::UtilityFunctions::generateImageOutput(filename_p2, "Side View X", config->getScriptName(), false, false);
	SingleParticle2dx::Utilities::UtilityFunctions::generateImageOutput(filename_p3, "Side View Y", config->getScriptName(), false, false);
	
	rec3d.writeToFile(config->getContainerName() + "/Rec_3d/Init3DFromMRC.map");
	SingleParticle2dx::Utilities::UtilityFunctions::generateImageOutput(config->getContainerName() + "/Rec_3d/Init3DFromMRC.map", "Initial 3D Reference", config->getScriptName(), true, true);
	
	SingleParticle2dx::Utilities::UtilityFunctions::setProgressBar( 100 );
	
	return 0;
}
예제 #15
0
/**
 * @brief Calculates the view point of the newFrame relative to the oldFrames using pcl::estimateRigidTransformationSVD and RANSAC.
 * @return True if a good position has been found.
 */
bool utils::pclRigidTransform(const std::vector<KeyframeContainer>& oldFrames,KeyframeContainer& newFrame)
{
	//fill point clouds with all matches and another set with the best matches
	pcl::PointCloud<pcl::PointXYZ> cloud1,cloud2;
	std::vector<cv::Mat_<double> > points2D,points3D;
	pcl::PointCloud<pcl::PointXYZ> completeCloud1,completeCloud2;
	std::vector<cv::Mat_<double> > completePoints2D,completePoints3D;

	//precalculate the extended projection matrices
	cv::Mat_<double> proj2(3,4,0.0);
	std::vector<cv::Mat_<double> > proj1Collected;
	for(uint i=0;i<oldFrames.size();++i)
	{
		cv::Mat_<double> proj1E;
		if(oldFrames[i].projectionMatrix.rows>0 && oldFrames[i].projectionMatrix.cols>0)
		{
			proj1E=cv::Mat_<double>(4,4,0.0);
			for(int x=0;x<3;x++) for(int y=0;y<4;y++)
			proj1E(x,y)=oldFrames[i].projectionMatrix(x,y);
			proj1E(3,3)=1;
		}

		proj1Collected.push_back(proj1E);
	}

	//save all the matched points and the best matches
	for(uint me=0;me<newFrame.matches.size();++me)
	{
		//search the best match to the current best match by comparing match quality
		uint currentBestMatchIndex=0;
		double currentBestQuality=INFINITY;
		uint tmpIndex=0;
		for(uint you=0;you<newFrame.matches[me].size();++you)
		{
			if(currentBestQuality>newFrame.matches[me][you].second)
			{
				//find the Frame that the keypoint belongs to
				tmpIndex=0;
				while(oldFrames[tmpIndex].ID!=newFrame.matches[me][you].first.val[0])
					++tmpIndex;

				//if this frame hasn't been flagged invalid use this keypoint
				if(!oldFrames[tmpIndex].invalid)
				{
					currentBestMatchIndex=you;
					currentBestQuality=newFrame.matches[me][you].second;
				}
			}
		}

		//calculate all the points from the matches
		for(uint you=0;you<newFrame.matches[me].size();++you)
		{
			//we only know the ID of the frame and need to find the index
			uint currentFrame1Index=0;
			while(oldFrames[currentFrame1Index].ID!=newFrame.matches[me][you].first.val[0])
				++currentFrame1Index;

			//add points only if the frame is valid
			if(!oldFrames[currentFrame1Index].invalid)
			{
				//get the 2D points from both frames
				cv::Point2f p1=oldFrames[currentFrame1Index].keypoints[newFrame.matches[me][you].first.val[1]].pt;
				cv::Point2f p2=newFrame.keypoints[me].pt;
				//get the depth information from both frames
				float depth1=oldFrames[currentFrame1Index].depthImg.image.at<float>(p1.y,p1.x);
				float depth2=newFrame.depthImg.image.at<float>(p2.y,p2.x);

				//if both depths are available and the projectivity matrix is valid
				if(isnan(depth1)==0 && isnan(depth2)==0 && proj1Collected[currentFrame1Index].rows>0 && proj1Collected[currentFrame1Index].cols>0)
				{
					//calculate the 2D and 3D point of the first frame
					pcl::PointXYZ tmpPoint1;
					cv::Mat_<double> tmpPoint2D(2,1,0.0);
					tmpPoint2D(0,0)=p1.x;
					tmpPoint2D(1,0)=p1.y;
					cv::Mat_<double> tmpPointHomo3D=reprojectImagePointTo3D(tmpPoint2D,oldFrames[currentFrame1Index].cameraCalibration,proj1Collected[currentFrame1Index],depth1);
					//convert 3D point to pcl format
					tmpPoint1.x=tmpPointHomo3D(0,0);
					tmpPoint1.y=tmpPointHomo3D(1,0);
					tmpPoint1.z=tmpPointHomo3D(2,0);

					//calculate the 3D point of the second frame
					pcl::PointXYZ tmpPoint2;
					cv::Mat_<double> tmpP2D(3,1,1.0);
					tmpP2D(0,0)=p2.x;
					tmpP2D(1,0)=p2.y;
					tmpP2D=utils::reprojectImagePointTo2DNormalised(tmpP2D,newFrame.cameraCalibration);
					//convert 3D point to pcl format
					tmpPoint2.x=tmpP2D(0,0)*depth2;
					tmpPoint2.y=tmpP2D(1,0)*depth2;
					tmpPoint2.z=depth2;

					//put the match in the complete point cloud
					completeCloud1.push_back(tmpPoint1);
					completePoints3D.push_back(tmpPointHomo3D);
					completeCloud2.push_back(tmpPoint2);
					completePoints2D.push_back(tmpP2D);

					//if you is the best match put it into the corresponding clouds
					if(you==currentBestMatchIndex)
					{
						cloud1.push_back(tmpPoint1);
						points3D.push_back(tmpPointHomo3D);
						cloud2.push_back(tmpPoint2);
						points2D.push_back(tmpP2D);
					}
				}
			}
		}
	}

#ifndef NDEBUG
	ROS_INFO("cloud size for ransac: %lu",cloud1.points.size());
#endif

	//RANSAC only if there are some points available
	if(cloud1.size()>20)
	{
		Eigen::Matrix4f transformation;
		const double REPROJECTION_THRESHOLD_SQUARE=1e-4;
		const double DESIRED_CONFIDENCE_LOG=std::log(0.01);
		const int MAX_ITERATIONS=200;
		double iterationsNeeded=MAX_ITERATIONS;
		int iteration=0;
		std::vector<int> inliers;
		std::vector<int> bestInliers;
		std::vector<int> randomIndices;
		cv::RNG Rander(std::time(NULL));
		cv::Mat_<double> tP;
		cv::Mat_<double> proj2(3,4,0.0);

		//do adaptive RANSAC on the best match clouds
		while((double)iteration<std::ceil(iterationsNeeded))
		{
			//get random indices
			randomIndices.clear();
			while(randomIndices.size()<3)
			{
				int newIndex=Rander.uniform(0,(int) cloud1.points.size());
				for(uint ind=0;ind<randomIndices.size();ind++)
					if(randomIndices[ind]==newIndex) newIndex=-1;
				if(newIndex>=0) randomIndices.push_back(newIndex);
			}

			//estimate the transform
			pcl::estimateRigidTransformationSVD(cloud1,randomIndices,cloud2,randomIndices,transformation);

			//convert projection matrix to opencv format
			for(int x=0;x<3;x++) for(int y=0;y<4;y++)
				proj2(x,y)=transformation(x,y);

			//estimate the inliers
			inliers.clear();
			for(int i=0;i<(int)points3D.size();i++)
			{
				tP=proj2*points3D[i];
				tP/=tP(2,0);
				tP-=points2D[i];
				if(tP(0,0)*tP(0,0)+tP(1,0)*tP(1,0)<REPROJECTION_THRESHOLD_SQUARE)
					inliers.push_back(i);
			}

			//save the solution if it's better than before
			if(inliers.size()>bestInliers.size())
			{
				bestInliers=inliers;
				iterationsNeeded=DESIRED_CONFIDENCE_LOG/std::log(1-std::pow(((double)bestInliers.size())/((double)cloud1.points.size()),3));
			}
			iteration++;
		}

#ifndef NDEBUG
		ROS_INFO("inlier ratio: %f",((double)bestInliers.size())/((double)cloud1.points.size()));
#endif

		//calculate final solution from the complete point clouds only if enough inliers have been found and the inlier ration is high enough
		if(bestInliers.size()>9 && ((double)bestInliers.size())/((double)cloud1.points.size())>0.1)
		{
			//reestimate the best transformation from the reduced point cloud
			pcl::estimateRigidTransformationSVD(cloud1,bestInliers,cloud2,bestInliers,transformation);

			for(int x=0;x<3;x++) for(int y=0;y<4;y++)
				proj2(x,y)=transformation(x,y);

			//find all inliers in the complete point cloud
			inliers.clear();
			for(int i=0;i<(int)completePoints3D.size();i++)
			{
				tP=proj2*completePoints3D[i];
				tP/=tP(2,0);
				tP-=completePoints2D[i];
				if(tP(0,0)*tP(0,0)+tP(1,0)*tP(1,0)<REPROJECTION_THRESHOLD_SQUARE)
					inliers.push_back(i);
			}

			//estimate the transform from the complete cloud
			pcl::estimateRigidTransformationSVD(completeCloud1,inliers,completeCloud2,inliers,transformation);

			for(int x=0;x<3;x++) for(int y=0;y<4;y++)
				proj2(x,y)=transformation(x,y);

			newFrame.projectionMatrix=proj2;
			return true;
		}
	}

	return false;
}