Exemple #1
0
void PGRKinect::SetIDAndMatrix(int id, cv::Mat_<double> kinect_intr, cv::Mat_<double> pgr_intr){
	//set kinect intrinsic matrix
	ConvertKinect2D3D->setCameraParameters(kinect_intr, Kinect::Width, Kinect::Height);
	//set point grey intrinsic matrix
	ConvertPGR2D3D->setCameraParameters(pgr_intr, PGRCamera::Width, PGRCamera::Height);
	//set id
	ID = id;
	//load rotation and translation
	cv::Mat_<double> R, t, F;
	std::ostringstream PGRKinectfile;
	PGRKinectfile << "./Calibration/PGRKinect_" << ID << ".xml";
	cv::FileStorage cvfs(PGRKinectfile.str(), CV_STORAGE_READ);
	cv::FileNode node(cvfs.fs, NULL);
	cv::read(node["rotation"], R);
	cv::read(node["translation"], t);
	cv::read(node["f_matrix"], F);
	//convert to float
	R.convertTo(Rotation_Host, CV_32F);
	t.convertTo(Translation_Host, CV_32F);
	//set parameter to depth estimator
	DepthEstimator->setParameter(kinect_intr, pgr_intr, R, t, F);

	//calculate epipolar constraint
	DepthEstimator->calcEpiLine();
	
}
	// Ground truthの保存
    __declspec(dllexport) void saveUnityXML(const char* fileName, double* camera, double* external)
    {
		cv::Mat cameraMat(4, 4, CV_64F, camera);
		cv::Mat externalMat(4, 4, CV_64F, external);

		cv::FileStorage cvfs(fileName, CV_STORAGE_WRITE);
		cv::write(cvfs,"cameraMatrix", cameraMat);
		cv::write(cvfs,"externalMatrix", externalMat);
    }
Exemple #3
0
static void PGRKinectCalibration(){ 

	SinglePGRCamera pgr;
	SingleKinect kinect;
	PGRKinect pgr_kinect;
	cv::Mat_<double> depth_matrix = cv::Mat::zeros(kinect.GetSize()->height, kinect.GetSize()->width, CV_64F); 
	bool ShouldRun = false;
	char key;
	int count = 0;
	
	while(ShouldRun){
		kinect.UpdateAll();
		pgr.Update();
		cv::imshow("PGRImage", *pgr.GetImage());
		key = cv::waitKey(1);
		cv::imshow("KinectImage", *kinect.GetColorImage());
		key = cv::waitKey(1);
		if(key == 's'){
			//PGRImageの保存
			std::ostringstream PGRChessName;
			PGRChessName <<"./Calibration/PointGrey_0/chess"<< count << ".bmp";
			cv::imwrite(PGRChessName.str(), *pgr.GetImage());
			std::cout << PGRChessName.str() << std::endl;
			//KinectImageの保存
			std::ostringstream KinectChessName;
			KinectChessName <<"./Calibration/Kinect_0/chess"<< count << ".bmp";
			cv::imwrite(KinectChessName.str(), *kinect.GetColorImage());
			std::cout << KinectChessName.str() << std::endl;
			//depthの保存
			for(int y=0; y<kinect.GetSize()->height; y++){
				for(int x=0; x<kinect.GetSize()->width; x++){
					depth_matrix.at<double>(y, x) = (*kinect.GetDepthMD())(x, y);
				}
			}
			std::ostringstream filename;
			filename <<"./Calibration/Depth_0/depth"<< count << ".xml";
			cv::FileStorage cvfs(filename.str(), CV_STORAGE_WRITE);
			cv::write(cvfs, "depth_matrix",depth_matrix); 
			std::cout << filename.str() <<std::endl;
			count++;
		}
		if(key == 'q')
			ShouldRun = false;
	}
//	pgr.Calibration();
	pgr_kinect.SetKinect(&kinect);
	pgr_kinect.SetPGRCamera(&pgr);
	pgr_kinect.Calibration();
}
void MultiCursorAppCpp::loadCalibData()
{
	windowOffsetX.push_back(0);

	// Load each display informations
	for (size_t i = 0; i < DISP_INFO_FILENAMES.size(); ++i)
	{
		FileStorage cvfs(DISP_INFO_FILENAMES[i], CV_STORAGE_READ);
		FileNode node(cvfs.fs, NULL);

		// Loat window size
		int winWidth = node["WindowWidth"];
		int winHeight = node["WindowHeight"];
		FileNode fn = node[string("mat_array")];
		
		// Load transformation matrixes
		Mat TK2D, TD2P;
		read(fn[0], TK2D);	// Load transformation matrix from kinect depth camera to display plane
		*TK2D.ptr<float>(0, 3) *= -1;	// 左右反転を直す
		read(fn[1], TD2P);	// Load transformation matrix from display plane to display pixel image 

		if (winWidth > 0 && winHeight > 0 && !TK2D.empty() && !TD2P.empty())
		{
			VEC_WIN_WIDTH.push_back(winWidth);
			VEC_WIN_HEIGHT.push_back(winHeight);
			TKinect2Display.push_back(TK2D);
			TDisplay2Pixel.push_back(TD2P);

			int offsetX = windowOffsetX[i - 1] + winWidth;
			windowOffsetX.push_back(offsetX);
			
			cout << "Succeeded to load display[" << i << "]" << endl;
			cout << "(width, height) = " << winWidth << ", " << winHeight << endl;
			cout << "TKinect2Display: " << endl << TKinect2Display[i] << endl;
			cout << "TDisplay2Pixel: " << endl << TDisplay2Pixel[i] << endl;
		}
		else
		{
			cout << "Failed to load display[" << i << "]" << endl;
		}
	}

	if (VEC_WIN_WIDTH.size() <= 0)
	{
		cout << "Error(loadCalibData): No display information was loaded" << endl;
		exit(0);
	}

}
	// 透視投影変換行列の読み込み
	__declspec(dllexport) void loadPerspectiveMatrix(double projectionMatrix[], double externalMatrix[])
    {
		cv::Mat cameraMat(4, 4, CV_64F);
		cv::Mat externalMat(4, 4, CV_64F);

		cv::FileStorage cvfs("Calibration/calibration.xml", CV_STORAGE_READ);
		cvfs["cameraMatrix"] >> cameraMat;
		cvfs["externalMatrix"] >> externalMat;

		for(int i = 0; i < 16; ++i)
		{
			projectionMatrix[i] = cameraMat.at<double>(i);
			externalMatrix[i] = externalMat.at<double>(i);
		}
	}
	// 外部パラメータ(プロジェクタのカメラに対する)の読み込み
	DLLExport void loadExternalParam(double R[], double T[])
    {
		cv::Mat r(3, 3, CV_64F);
		cv::Mat t(3, 1, CV_64F);

		cv::FileStorage cvfs("Calibration/calibration.xml", CV_STORAGE_READ);
		cvfs["R"] >> r;
		cvfs["T"] >> t;

		for(int i = 0; i < 9; ++i)
		{
			R[i] = r.at<double>(i);
		}
		for(int i = 0; i < 3; ++i)
		{
			T[i] = t.at<double>(i);
		}
	}
	// カメラの内部パラメータ、歪み係数の読み込み
	DLLExport void loadCameraParam(double projectionMatrix[], double dist[])
    {
		cv::Mat cameraMat(3, 3, CV_64F);
		cv::Mat distMat(1, 5, CV_64F);

		cv::FileStorage cvfs("Calibration/calibration.xml", CV_STORAGE_READ);
		cvfs["cam_K"] >> cameraMat;
		cvfs["cam_dist"] >> distMat;

		for(int i = 0; i < 9; ++i)
		{
			projectionMatrix[i] = cameraMat.at<double>(i);
		}
		for(int i = 0; i < 5; ++i)
		{
			dist[i] = distMat.at<double>(i);
		}
	}
pcl::PointCloud<pcl::PointXYZRGB> Transform3dPoint::convLocalToWorld(pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud){
	cv::FileStorage cvfs("receivedParam_DIND105-PC.xml", CV_STORAGE_READ);  //DIND毎にここのパラメータxmlを変える
	cv::FileNode node(cvfs.fs, NULL);
	cv::FileNode fn = node[std::string("param3D_array")];

	std::vector<cv::Mat>transformationParameter3D;

	for (int i = 0; i < fn.size(); i++) {
		cv::Mat m;
		cv::read(fn[i], m);
		transformationParameter3D.push_back(m);
	}

	for (auto Rt : transformationParameter3D){
		auto ER = convMatToEigenMat(Rt);
		pcl::transformPointCloud(*cloud, *cloud, ER);
	}

	return *cloud;
}
Exemple #9
0
void TunerBody::openSetting(std::string fileName)
{
  /* open file strage */
  cv::FileStorage cvfs(fileName, CV_STORAGE_READ);

  /* read data from file */
  cv::FileNode topNode(cvfs.fs, NULL);
  {
    cv::FileNode nd_red = topNode[std::string("RED")];
    {
      cv::FileNode nd_hue = nd_red["Hue"];
      Red_set.hue.center  = nd_hue["center"];
      Red_set.hue.range   = nd_hue["range"];
    }

    {
      cv::FileNode nd_sat = nd_red["Saturation"];
      Red_set.sat.center  = nd_sat["center"];
      Red_set.sat.range   = nd_sat["range"];
    }

    {
      cv::FileNode nd_val = nd_red["Value"];
      Red_set.val.center  = nd_val["center"];
      Red_set.val.range   = nd_val["range"];
    }

    Red_set.isUpdated = true;
  }

  {
    cv::FileNode nd_yellow = topNode[std::string("YELLOW")];
    {
      cv::FileNode nd_hue   = nd_yellow["Hue"];
      Yellow_set.hue.center = nd_hue["center"];
      Yellow_set.hue.range  = nd_hue["range"];
    }

    {
      cv::FileNode nd_sat   = nd_yellow["Saturation"];
      Yellow_set.sat.center = nd_sat["center"];
      Yellow_set.sat.range  = nd_sat["range"];
    }

    {
      cv::FileNode nd_val   = nd_yellow["Value"];
      Yellow_set.val.center = nd_val["center"];
      Yellow_set.val.range  = nd_val["range"];
    }

    Yellow_set.isUpdated = true;
  }

  {
    cv::FileNode nd_green = topNode[std::string("GREEN")];
    {
      cv::FileNode nd_hue  = nd_green["Hue"];
      Green_set.hue.center = nd_hue["center"];
      Green_set.hue.range  = nd_hue["range"];
    }

    {
      cv::FileNode nd_sat  = nd_green["Saturation"];
      Green_set.sat.center = nd_sat["center"];
      Green_set.sat.range  = nd_sat["range"];
    }

    {
      cv::FileNode nd_val  = nd_green["Value"];
      Green_set.val.center = nd_val["center"];
      Green_set.val.range  = nd_val["range"];
    }

    Green_set.isUpdated = true;
  }

  /* set trackbar position to current status */
  cv::setTrackbarPos("H", windowName, Selected_set->hue.range);
  cv::setTrackbarPos("S", windowName, Selected_set->sat.range);
  cv::setTrackbarPos("V", windowName, Selected_set->val.range);

  /* sat mask image to current status */
  cv::Mat hsv_img;
  cv::cvtColor(src_img, hsv_img, cv::COLOR_BGR2HSV);
  colorTrack(hsv_img,
             Selected_set->hue.center,
             Selected_set->sat.center,
             Selected_set->val.center,
             Selected_set->hue.range,
             Selected_set->sat.range,
             Selected_set->val.range,
             &mask);

} /* void TunerBody::openSetting() */
Exemple #10
0
void TunerBody::saveResult(std::string fileName)
{

  if (Red_set.isUpdated == false) {
    /*
      ========== Red : Default values ==========
      340               < Hue < 50 (circled)
      DEFAULT_SAT_LOWER < Sat < DEFAULT_SAT_UPPER
      DEFAULT_VAL_LOWER < Val < DEFAULT_VAL_UPPER
    */
    Red_set.hue.center = ( (((360 + 50) - 340 ) / 2 ) + 340) % 360;
    Red_set.hue.range  = (((360 + 50) - 340 ) / 2 );
    Red_set.sat.center = ((DEFAULT_SAT_UPPER - DEFAULT_SAT_LOWER) / 2 ) + DEFAULT_SAT_LOWER;
    Red_set.sat.range  = (DEFAULT_SAT_UPPER - DEFAULT_SAT_LOWER) / 2;
    Red_set.val.center = ((DEFAULT_VAL_UPPER - DEFAULT_VAL_LOWER) / 2 ) + DEFAULT_VAL_LOWER;
    Red_set.val.range  = (DEFAULT_VAL_UPPER - DEFAULT_VAL_LOWER) / 2;
    std::cout << "Red is default setting" << std::endl;
  }

  if (Yellow_set.isUpdated == false) {
    /*
      ========== Yellow : Default values ==========
      50                < Hue < 70
      DEFAULT_SAT_LOWER < Sat < DEFAULT_SAT_UPPER
      DEFAULT_VAL_LOWER < Val < DEFAULT_VAL_UPPER
     */
    Yellow_set.hue.center = ( ((70 - 50 ) / 2 ) + 50) % 360;
    Yellow_set.hue.range  = ((70 - 50 ) / 2 );
    Yellow_set.sat.center = ((DEFAULT_SAT_UPPER - DEFAULT_SAT_LOWER) / 2 ) + DEFAULT_SAT_LOWER;
    Yellow_set.sat.range  = (DEFAULT_SAT_UPPER - DEFAULT_SAT_LOWER) / 2;
    Yellow_set.val.center = ((DEFAULT_VAL_UPPER - DEFAULT_VAL_LOWER) / 2 ) + DEFAULT_VAL_LOWER;
    Yellow_set.val.range  = (DEFAULT_VAL_UPPER - DEFAULT_VAL_LOWER) / 2;
    std::cout << "Yellow is default setting" << std::endl;
  }

  if (Green_set.isUpdated == false) {
    /*
      ========== Green : Default values ==========
      80                < Hue < 190
      DEFAULT_SAT_LOWER < Sat < DEFAULT_SAT_UPPER
      DEFAULT_VAL_LOWER < Val < DEFAULT_VAL_UPPER
     */
    Green_set.hue.center = ( ((190 - 80 ) / 2 ) + 80) % 360;
    Green_set.hue.range  = ((190 - 80 ) / 2 );
    Green_set.sat.center = ((DEFAULT_SAT_UPPER - DEFAULT_SAT_LOWER) / 2 ) + DEFAULT_SAT_LOWER;
    Green_set.sat.range  = (DEFAULT_SAT_UPPER - DEFAULT_SAT_LOWER) / 2;
    Green_set.val.center = ((DEFAULT_VAL_UPPER - DEFAULT_VAL_LOWER) / 2 ) + DEFAULT_VAL_LOWER;
    Green_set.val.range  = (DEFAULT_VAL_UPPER - DEFAULT_VAL_LOWER) / 2;
    std::cout << "Green is default setting" << std::endl;
  }

  /* open file strage */
  cv::FileStorage cvfs(fileName, CV_STORAGE_WRITE);

  /* write data to file */
  {
    cv::WriteStructContext st_red(cvfs, "RED", CV_NODE_MAP);
    {
      cv::WriteStructContext st_hue(cvfs, "Hue", CV_NODE_MAP);
      cv::write(cvfs, "center", Red_set.hue.center);
      cv::write(cvfs, "range", Red_set.hue.range);
    }

    {
      cv::WriteStructContext st_hue(cvfs, "Saturation", CV_NODE_MAP);
      cv::write(cvfs, "center", Red_set.sat.center);
      cv::write(cvfs, "range", Red_set.sat.range);
    }

    {
      cv::WriteStructContext st_hue(cvfs, "Value", CV_NODE_MAP);
      cv::write(cvfs, "center", Red_set.val.center);
      cv::write(cvfs, "range", Red_set.val.range);
    }
  }

  {
    cv::WriteStructContext st_yellow(cvfs, "YELLOW", CV_NODE_MAP);
    {
      cv::WriteStructContext st_hue(cvfs, "Hue", CV_NODE_MAP);
      cv::write(cvfs, "center", Yellow_set.hue.center);
      cv::write(cvfs, "range", Yellow_set.hue.range);
    }

    {
      cv::WriteStructContext st_hue(cvfs, "Saturation", CV_NODE_MAP);
      cv::write(cvfs, "center", Yellow_set.sat.center);
      cv::write(cvfs, "range", Yellow_set.sat.range);
    }

    {
      cv::WriteStructContext st_hue(cvfs, "Value", CV_NODE_MAP);
      cv::write(cvfs, "center", Yellow_set.val.center);
      cv::write(cvfs, "range", Yellow_set.val.range);
    }
  }

  {
    cv::WriteStructContext st_green(cvfs, "GREEN", CV_NODE_MAP);
    {
      cv::WriteStructContext st_hue(cvfs, "Hue", CV_NODE_MAP);
      cv::write(cvfs, "center", Green_set.hue.center);
      cv::write(cvfs, "range", Green_set.hue.range);
    }

    {
      cv::WriteStructContext st_hue(cvfs, "Saturation", CV_NODE_MAP);
      cv::write(cvfs, "center", Green_set.sat.center);
      cv::write(cvfs, "range", Green_set.sat.range);
    }

    {
      cv::WriteStructContext st_hue(cvfs, "Value", CV_NODE_MAP);
      cv::write(cvfs, "center", Green_set.val.center);
      cv::write(cvfs, "range", Green_set.val.range);
    }
  }

} /* void TunerBody::saveResult() */
Exemple #11
0
static void KinectCalibration(){
	MultiKinectHandler mulkin;
	MultiPGRHandler multi_pgr;
	//multi_pgr.SwitchCameraID(0, 1);

	//mulkin.ShowAllImage();
	for(int i=1; i<2; i++){
	multi_pgr.GetPGRCamera(i)->Calibration();
		}
	
	cv::Mat_<double> depth_matrix = cv::Mat::zeros(480, 640, CV_64F); 
	bool ShouldRun = false;
	int count = 0;
	char key; 
	while(ShouldRun){
		mulkin.GetContext()->WaitAndUpdateAll();
		for(int i=1; i<2; i++){
			std::ostringstream kinect_image;
			std::ostringstream pgr_image;	
			mulkin.GetKinect(i)->UpdateAllMember();
			multi_pgr.GetPGRCamera(i)->Update();
			kinect_image <<"Kinect_" << i; 
			cv::imshow(kinect_image.str(), *mulkin.GetKinect(i)->GetColorImage());
			pgr_image << "PointGrey_" << i;
			cv::imshow(pgr_image.str(), *multi_pgr.GetPGRCamera(i)->GetDistortedImage());
			//depth image
			/*mulkin.GetKinect(i)->CreateDepthImage();
			std::ostringstream depth_image;
			depth_image << "Depth_"<<i;
			cv::imshow(depth_image.str(), *mulkin.GetKinect(i)->GetDepthImage());*/
		}
		key = cv::waitKey(1);
		if(key == 's'){
			for(int i=1; i<2; i++){
				std::ostringstream PGRChessName;
				std::ostringstream KinectChessName;
				PGRChessName << "./Calibration/PointGrey_"<< i << "/chess" << count << ".bmp";
				cv::imwrite(PGRChessName.str(), *multi_pgr.GetPGRCamera(i)->GetDistortedImage());
				std::cout << PGRChessName.str() << std::endl;
				KinectChessName << "./Calibration/Kinect_" << i << "./chess" << count << ".bmp";
				cv::imwrite(KinectChessName.str(), *mulkin.GetKinect(i)->GetColorImage());
				std::cout << KinectChessName.str() << std::endl;
				for(int y=0; y<480; y++){
					for(int x=0; x<640; x++){
						depth_matrix.at<double>(y, x) = (*mulkin.GetKinect(i)->GetDepthMD())(x, y);
					}
				}
				std::ostringstream filename;
				filename <<"./Calibration/Depth_" << i << "/depth" << count << ".xml";
				cv::FileStorage cvfs(filename.str(), CV_STORAGE_WRITE);
				cv::write(cvfs, "depth_matrix", depth_matrix);
				std::cout << filename.str() <<std::endl;
			}
			count++;
		}
		if(key == 'q')
			ShouldRun = false;
	}
	PGRKinect pgr_kinect[2];
	for(int i=1; i<2; i++){
		pgr_kinect[i].SetKinect(mulkin.GetKinect(i));
		pgr_kinect[i].SetPGRCamera(multi_pgr.GetPGRCamera(i));
		pgr_kinect[i].SetIDAndMatrix(i);
		pgr_kinect[i].Calibration();
		
	}

}
Exemple #12
0
static void MultiKinectCalibration(){
	MultiKinectHandler mulkin;
	//MultiPGRHandler multi_pgr;
	//multi_pgr.SwitchCameraID(0, 1);
	
	cv::Mat_<double> depth_matrix = cv::Mat::zeros(480, 640, CV_64F); 
	bool ShouldRun = false;
	int count = 0;
	int count_0 = 0;
	int count_1 = 0;
	char key; 
	while(ShouldRun){
		mulkin.GetContext()->WaitAndUpdateAll();
		for(int i=0; i<2; i++){
			std::ostringstream kinect_image;
			std::ostringstream depth_image;
			mulkin.GetKinect(i)->UpdateAllMember();
			mulkin.GetKinect(i)->CreateDepthImage();
			//multi_pgr.GetPGRCamera(i)->Update();
			kinect_image <<"Kinect_" << i; 
			cv::imshow(kinect_image.str(), *mulkin.GetKinect(i)->GetColorImage());
			//key = cv::waitKey(1);
			depth_image << "Depth_"<<i;
			cv::imshow(depth_image.str(), *mulkin.GetKinect(i)->GetDepthImage());
			
			}
		key = cv::waitKey(1);	
		if(key == '0'){
				std::ostringstream KinectChessName;
				KinectChessName << "./MultiKinectCalibration/Kinect_0/chess"<< count_0<<".bmp";
				cv::imwrite(KinectChessName.str(), *mulkin.GetKinect(0)->GetColorImage());
				std::cout << KinectChessName.str() << std::endl;
				for(int y=0; y<480; y++){
					for(int x=0; x<640; x++){
						depth_matrix.at<double>(y, x) = (*mulkin.GetKinect(0)->GetDepthMD())(x, y);
					}
				}
				std::ostringstream filename;
				filename <<"./MultiKinectCalibration/Depth_0/depth" << count_0 << ".xml";
				cv::FileStorage cvfs(filename.str(), CV_STORAGE_WRITE);
				cv::write(cvfs, "depth_matrix", depth_matrix);
				std::cout << filename.str() <<std::endl;
				count_0++;
		}
		if(key == '1'){
				std::ostringstream KinectChessName;
				KinectChessName << "./MultiKinectCalibration/Kinect_1/chess"<< count_1<<".bmp";
				cv::imwrite(KinectChessName.str(), *mulkin.GetKinect(1)->GetColorImage());
				std::cout << KinectChessName.str() << std::endl;
				for(int y=0; y<480; y++){
					for(int x=0; x<640; x++){
						depth_matrix.at<double>(y, x) = (*mulkin.GetKinect(1)->GetDepthMD())(x, y);
					}
				}
				std::ostringstream filename;
				filename <<"./MultiKinectCalibration/Depth_1/depth" << count_1 << ".xml";
				cv::FileStorage cvfs(filename.str(), CV_STORAGE_WRITE);
				cv::write(cvfs, "depth_matrix", depth_matrix);
				std::cout << filename.str() <<std::endl;
				count_1++;
		}
		if(key == 's'){
			for(int i=0; i<2; i++){
				std::ostringstream PGRChessName;
				std::ostringstream KinectChessName;
				PGRChessName << "./Calibration/PointGrey_"<< i << "/chess" << count << ".bmp";
				//cv::imwrite(PGRChessName.str(), *multi_pgr.GetPGRCamera(i)->GetImage());
				std::cout << PGRChessName.str() << std::endl;
				KinectChessName << "./Calibration/Kinect_" << i << "./chess" << count << ".bmp";
				cv::imwrite(KinectChessName.str(), *mulkin.GetKinect(i)->GetColorImage());
				std::cout << KinectChessName.str() << std::endl;
				for(int y=0; y<480; y++){
					for(int x=0; x<640; x++){
						depth_matrix.at<double>(y, x) = (*mulkin.GetKinect(i)->GetDepthMD())(x, y);
					}
				}
				std::ostringstream filename;
				filename <<"./Calibration/Depth_" << i << "/depth" << count << ".xml";
				cv::FileStorage cvfs(filename.str(), CV_STORAGE_WRITE);
				cv::write(cvfs, "depth_matrix", depth_matrix);
				std::cout << filename.str() <<std::endl;
			}
			count++;
		}
		if(key == 'q'){
			ShouldRun = false;
			//int i=1;
			//cv::imwrite("calibration_kinect.bmp", *mulkin.GetKinect(1)->GetColorImage());
			////cv::imwrite("calibration_PGR.bmp", *multi_pgr.GetPGRCamera(i)->GetImage());
			//std::ostringstream depth_image;
			//depth_image << "Depth_"<<1;
			//depth_image <<".bmp";
			//cv::imwrite(depth_image.str(), *mulkin.GetKinect(1)->GetDepthImage());
		}
	}
	mulkin.MultiKinectCalibration();

}
//////////main関数/////////////
int main(int argc, char** argv){
	
	cubeSize_ = cubeSize/2.;

	int i=0, j=0, k=0;
	clock_t start_time_total,end_time_total;
	clock_t start_time[fileTotal];
	clock_t end_time[fileTotal];

	//char * filename[fileTotal];
	//並列用
	char * model_filename[fileTotal];
	char * data_filename[fileTotal];

	//Mat shape[fileTotal];
	Mat shape_reg[fileTotal];
	//Mat shape_temp[fileTotal];
	Mat shape_fixed[fileTotal];

	//並列用
	Mat model_shape[fileTotal];
	Mat data_shape[fileTotal];
	Mat shape_temp[fileTotal][fileTotal];

	Mat my_model_corr[fileTotal];
	int myIndex[fileTotal][rows];
	float myDist[fileTotal][rows];
	RT<float> my_rt[fileTotal];

	Mat_<float> model_mean;

	//modelファイルのデータ数
	//model_rows = 16128;
	//dataファイルのデータ数
	//data_rows = 16128;
	
	start_time_total = clock();
	cout << "-------------" << endl;
	cout << "ICP Algorithm" << endl;
	cout << "-------------" << endl;

#pragma omp parallel for
	for(fileCount=0;fileCount<fileTotal;fileCount++)
	{

#pragma region // --- 点群のCSVファイルをcv::Matに取り込む ---
		if(fileCount>=1){
			///model
			//csvファイル名
			model_filename[fileCount] = (char *)malloc(sizeof(char *) * 100);
			//sprintf(model_filename[fileCount],"%s/%s/%d.csv",filedir,dir,fileCount);
			sprintf(model_filename[fileCount],"%s/%s/points%02d.csv",filedir,dir,fileCount);
			//csvファイルのデータ数
			model_rows[fileCount] = rows;
			//CSVファイル読み込み
			model_shape[fileCount] = csvread(model_filename[fileCount], model_rows[fileCount], cols);
			//コンソールにファイル名表示
			//cout << "model点群データファイル名 " << model_filename[fileCount] << endl;
		}
		///data
		//csvファイル名
		data_filename[fileCount] = (char *)malloc(sizeof(char *) * 100);
		//sprintf(data_filename[fileCount],"%s/%s/%d.csv",filedir,dir,(fileCount+1));
		sprintf(data_filename[fileCount],"%s/%s/points%02d.csv",filedir,dir,(fileCount+1));
		//csvファイルのデータ数
		data_rows[fileCount] = rows;
		//CSVファイル読み込み
		data_shape[fileCount] = csvread(data_filename[fileCount], data_rows[fileCount], cols);
		//コンソールにファイル名表示
		cout << "点群データファイル名 " << data_filename[fileCount] << endl;
#pragma endregion 

		if(fileCount>=1){

#pragma region // --- ICPによるレジストレーション ---
#if 1 // --- ICP実行する ---
			//実行時間計測開始
			start_time[fileCount] = clock();
			cout << "\t標準ICP開始" << endl;
			//ICP with flann search and unit quaternion method
			//cout << "kd-tree探索+クォータニオンにより[R/t]を推定します" << endl << endl;
			ClosestPointFlann model_shape_flann (model_shape[fileCount]);
			RT_L2 <float, SolveRot_eigen<float>> rt_solver;
			ICP <ClosestPointFlann> icp (model_shape_flann, rt_solver);

			icp.set(data_shape[fileCount]);
			icp.reg(100, 1.0e-6);

			//実行時間計測終了
			end_time[fileCount] = clock();
			//cout << "icp result : [R/t] =" << endl << (icp.rt) << endl << endl;
			cout << "\t" << data_filename[fileCount] << "  icp error =" << icp.dk << endl;
			cout << "\t" << data_filename[fileCount] << "  実行時間 = " << (float)(end_time[fileCount] - start_time[fileCount])/CLOCKS_PER_SEC << "秒" << endl << endl;
			
			//データをローカル変数に格納
			//my_model_corr[fileCount] = Mat::zeros(rows, cols, CV_32F);
			my_model_corr[fileCount].create(rows, cols, CV_32F);
			icp.model_corr.copyTo(my_model_corr[fileCount]);
			icp.rt.copyTo(my_rt[fileCount]);
			for(int k=0;k<data_rows[fileCount];k++){
				myIndex[fileCount][k] = icp.index[k];
				myDist[fileCount][k] = icp.distance[k];
			}

#else // --- ICP実行しない場合 ---
			shape_reg[fileCount] = data_shape[fileCount];
#endif
#pragma endregion
		}else{
			shape_reg[fileCount] = data_shape[fileCount];
		}
	}

#pragma region // --- 座標変換 ---
	//平均値の計算
	reduce(shape_reg[0], model_mean, 0, CV_REDUCE_AVG);

#pragma omp parallel for private(i,j,k)
	for(fileCount=0;fileCount<fileTotal;fileCount++)
	{
		if(fileCount>=1){
			//得られたrtをdatashapeに適用
			//その前にshape_tempの初期化
			for(k=0;k<fileTotal;k++)
			{
				shape_temp[fileCount][k] = cv::Mat::zeros(data_rows[fileCount], cols, CV_32F);
			}
			shape_temp[fileCount][fileCount] = data_shape[fileCount];
			for(k=0;k<fileCount;k++)
			{
				shape_temp[fileCount][fileCount-(k+1)] = my_rt[(fileCount-k)].transform(shape_temp[fileCount][fileCount-k]);
			}
			shape_reg[fileCount] = shape_temp[fileCount][0];
		}

		shape_fixed[fileCount] = shape_reg[fileCount] - repeat(model_mean, shape_reg[fileCount].rows, 1);
		
		/*
		//メモリ割り当て
		points[fileCount] = (GLfloat *)malloc(sizeof(float)*data_rows[fileCount]*cols);
		//座標値をGLpointsに入れる
		for(i=0;i<data_rows[fileCount];i++){
			for(j=0;j<cols;j++){
				points[fileCount][i*cols+j] = shape_fixed[fileCount].at<float>(i,j);
			}
		}*/
#pragma endregion
	}

#pragma region // --- OpenGLにデータ渡す ---
	
	//メモリ割り当て
	allpoints = (GLfloat *)malloc(sizeof(float)*rows*fileTotal*cols);
	for(fileCount=0;fileCount<fileTotal;fileCount++)
	{
		//座標値をallpointsに入れる
		for(int i=0;i<rows;i++){
			for(int j=0;j<cols;j++){
				allpoints[fileCount*rows*cols+i*cols+j] = shape_fixed[fileCount].at<float>(i,j);
			}
		}
	}
#pragma endregion

	
#pragma region // --- カメラRTの計算 ---
	Mat cameraRT[fileTotal];
	Mat cameraR[fileTotal];
	Mat cameraT[fileTotal];
	cameraRT[0] = Mat::eye(4,4,CV_32F);
	cameraR[0] = Mat::eye(3,3,CV_32F);
	cameraT[0] = Mat::zeros(1,3,CV_32F);
	for(i=1;i<fileTotal;i++){
		cameraRT[i] = Mat::eye(4,4,CV_32F);
		cameraR[i] = Mat::eye(3,3,CV_32F);
		cameraT[i] = Mat::zeros(1,3,CV_32F);
		
		Mat r = my_rt[i].operator()(Range(0,3),Range(0,3));
		cameraR[i] = cameraR[i-1]*r.t();
		Mat t = my_rt[i].operator()(Range(3,4),Range(0,3));
		cameraT[i] = t*cameraR[i-1].t() + cameraT[i-1];
		
		cameraRT[i].at<float>(0,0) = cameraR[i].at<float>(0,0);
		cameraRT[i].at<float>(0,1) = cameraR[i].at<float>(0,1);
		cameraRT[i].at<float>(0,2) = cameraR[i].at<float>(0,2);
		cameraRT[i].at<float>(1,0) = cameraR[i].at<float>(1,0);
		cameraRT[i].at<float>(1,1) = cameraR[i].at<float>(1,1);
		cameraRT[i].at<float>(1,2) = cameraR[i].at<float>(1,2);
		cameraRT[i].at<float>(2,0) = cameraR[i].at<float>(2,0);
		cameraRT[i].at<float>(2,1) = cameraR[i].at<float>(2,1);
		cameraRT[i].at<float>(2,2) = cameraR[i].at<float>(2,2);
		
		cameraRT[i].at<float>(3,0) = cameraT[i].at<float>(0,0);
		cameraRT[i].at<float>(3,1) = cameraT[i].at<float>(0,1);
		cameraRT[i].at<float>(3,2) = cameraT[i].at<float>(0,2);
	}
#pragma endregion

// --- データ出力 ---
#if FILEOUTPUT

	///////////////////////////////
	// 全ての点群(shape_fixed)をまとめて書き出し
	// pcd
	//
	FILE *outfp;
	char outfilename[100];
	sprintf(outfilename,"%s/%s/result_xyz.pcd",outdir,dir);
	outfp = fopen(outfilename,"w");
	if(outfp == NULL){
		printf("%sファイルが開けません\n",outfilename);
		return -1;
	}
	int red = 255*256*256;
	int green = 255*256*256 + 255*256;
	int white = 255*256*256 + 255*256 + 255;
	fprintf(outfp,"# .PCD v.7 - Point Cloud Data file format\nVERSION .7\nFIELDS x y z rgb\nSIZE 4 4 4 4\nTYPE F F F F\nCOUNT 1 1 1 1\nWIDTH %d\nHEIGHT 1\nVIEWPOINT 0 0 0 1 0 0 0\nPOINTS %d\nDATA ascii\n", rows*fileTotal, rows*fileTotal);
	for(i=0;i<fileTotal;i++){
		for(j=0;j<data_rows[i];j++){
			fprintf(outfp,"%f %f %f %d\n", shape_reg[i].at<float>(j,0), shape_reg[i].at<float>(j,1), shape_reg[i].at<float>(j,2), green+(int)floor(255.*(i+1)/fileTotal));
		}
	}
	fclose(outfp);

	///////////////////////////////
	// 全ての点群(shape_fixed)をまとめて書き出し
	// csv
	//
	sprintf(outfilename,"%s/%s/allpoints.csv",outdir,dir);
	outfp = fopen(outfilename,"w");
	if(outfp == NULL){
		printf("%sファイルが開けません\n",outfilename);
		return -1;
	}
	for(i=0;i<fileTotal;i++){
		for(j=0;j<data_rows[i];j++){
			fprintf(outfp,"%f %f %f\n", shape_reg[i].at<float>(j,0), shape_reg[i].at<float>(j,1), shape_reg[i].at<float>(j,2));
		}
	}
	fclose(outfp);

	///////////////////////////////
	// 全ての点群(shape_fixed)をまとめて書き出し
	// result_xyz.csv
	//
	sprintf(outfilename,"%s/%s/result_xyz_icp.csv",outdir,dir);
	outfp = fopen(outfilename,"w");
	if(outfp == NULL){
		printf("%sファイルが開けません\n",outfilename);
		return -1;
	}
	for(i=0;i<fileTotal;i++){
		for(j=0;j<data_rows[i];j++){
			fprintf(outfp,"%f,%f,%f\n", shape_reg[i].at<float>(j,0), shape_reg[i].at<float>(j,1), shape_reg[i].at<float>(j,2));
		}
	}
	fclose(outfp);

	//////////////////////////////////
	// Corr(対応点), Index(対応点の要素番号), Distance(対応点間距離)の書き出し
	//
	FILE *outfp_corr;
	char outfilename_corr[100];

	for(fileCount=1;fileCount<fileTotal;fileCount++){

		///Indexファイル
		sprintf(outfilename_corr,"%s/%s/index%02d.csv",outdir,dir,(fileCount));
		outfp_corr = fopen(outfilename_corr,"w");
		if(outfp_corr == NULL){
			printf("%sファイルが開けません\n",outfilename_corr);
			return -1;
		}
		for(j=0;j<data_rows[fileCount];j++){
			fprintf(outfp_corr,"%d\n", myIndex[fileCount][j]);
		}
		fclose(outfp_corr);

		///Distanceファイル
		sprintf(outfilename_corr,"%s/%s/dist%02d.csv",outdir,dir,(fileCount));
		outfp_corr = fopen(outfilename_corr,"w");
		if(outfp_corr == NULL){
			printf("%sファイルが開けません\n",outfilename_corr);
			return -1;
		}
		for(j=0;j<data_rows[fileCount];j++){
			fprintf(outfp_corr,"%f\n", myDist[fileCount][j]);
		}
		fclose(outfp_corr);
	}
	
	for(fileCount=0;fileCount<fileTotal;fileCount++){
		
		if(fileCount<(fileTotal-1)){
			///Corr点群ファイル
			sprintf(outfilename_corr,"%s/%s/corr%02d.csv",outdir,dir,(fileCount+1));
			outfp_corr = fopen(outfilename_corr,"w");
			if(outfp_corr == NULL){
				printf("%sファイルが開けません\n",outfilename_corr);
				return -1;
			}

			for(j=0;j<data_rows[fileCount];j++){
				//fprintf(outfp_corr,"%f %f %f\n", my_model_corr[fileCount].at<float>(j,0), my_model_corr[fileCount].at<float>(j,1), my_model_corr[fileCount].at<float>(j,2));
				fprintf(outfp_corr,"%f %f %f\n", shape_reg[fileCount].at<float>(myIndex[fileCount+1][j],0), shape_reg[fileCount].at<float>(myIndex[fileCount+1][j],1), shape_reg[fileCount].at<float>(myIndex[fileCount+1][j],2));
			}
			fclose(outfp_corr);
		}else{
			///Corr点群ファイル
			sprintf(outfilename_corr,"%s/%s/corr%02d.csv",outdir,dir,(fileCount+1));
			outfp_corr = fopen(outfilename_corr,"w");
			if(outfp_corr == NULL){
				printf("%sファイルが開けません\n",outfilename_corr);
				return -1;
			}

			for(j=0;j<data_rows[fileCount];j++){
				//fprintf(outfp_corr,"%f %f %f\n", my_model_corr[fileCount].at<float>(j,0), my_model_corr[fileCount].at<float>(j,1), my_model_corr[fileCount].at<float>(j,2));
				fprintf(outfp_corr,"%f %f %f\n", shape_reg[fileCount].at<float>(j,0), shape_reg[fileCount].at<float>(j,1), shape_reg[fileCount].at<float>(j,2));
			}
			fclose(outfp_corr);
		}
	}

	/////////////////////
	// RTの書き出し
	//
	//my_rt[0]に恒等変換を代入
	//Mat rt0 = (Mat_<float>(4,4) << 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1);
	Mat rt0 = Mat::eye(4,4,CV_32F);
	rt0.copyTo(my_rt[0]);
	// Open File Storage
	char rtfilename[100];
	sprintf(rtfilename,"%s/%s/rt.xml",outdir,dir);
	cv::FileStorage	cvfs(rtfilename,CV_STORAGE_WRITE);
	cv::WriteStructContext ws(cvfs, "mat_rt", CV_NODE_SEQ);	// create node
	for(int i=0; i<fileTotal; i++){
		cv::write(cvfs,"",cameraRT[i]);
	}
	cvfs.release();

#endif

//--- OpenGLで表示 ---
#if GLVIEW
	
	// --- GLUT initialize ---
	initFlag();
	initParam();

	//window1
	glutInit(&argc, argv);
	glutInitWindowPosition(0, 0);
	glutInitWindowSize(window_w, window_h);
	glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );

	window1 = glutCreateWindow("Window1");
	glutMouseFunc(mouse);
	glutMotionFunc(drag);
	glutPassiveMotionFunc(passive);
	glutMouseWheelFunc ( MouseWheel ) ;//ホイールコールバック
	glutDisplayFunc(disp);
	glutIdleFunc(myGlutIdle);
	glutKeyboardFunc(glut_keyboard);
	glutIdleFunc(animate);
	glClearColor(0.0, 0.0, 0.0, 0.5); //背景色

	glutMainLoop();
#endif
	
	//実行時間計測終了
	end_time_total = clock();
	cout << "-------------" << endl;
	cout << "    Finish   " << endl;
	cout << "-------------" << endl;
	cout << "プログラム実行時間 = " << (float)(end_time_total - start_time_total)/CLOCKS_PER_SEC << "秒" << endl << endl;
	
	//cvNamedWindow ("WaitKey", CV_WINDOW_AUTOSIZE);
	//cvWaitKey(0);
	return 0;
}