Example #1
0
int main() {

    static int index = 0;
    static char str[100];

    cv::VideoCapture cap1(1);
    cv::VideoCapture cap2(2);

    if (!cap1.isOpened() || !cap2.isOpened()) {
        std::cout << "Cannot open the video cam" << std::endl;
        return -1;
    }

    cap1.set(CV_CAP_PROP_FPS, 23);
    cap2.set(CV_CAP_PROP_FPS, 23);

    double dWidth = cap2.get(CV_CAP_PROP_FRAME_WIDTH);
    double dHeight = cap2.get(CV_CAP_PROP_FRAME_HEIGHT);

    std::cout << "Frame size : " << dWidth << " x " << dHeight << std::endl;

    cv::namedWindow("MyVideo1", CV_WINDOW_AUTOSIZE);
    cv::namedWindow("MyVideo2", CV_WINDOW_AUTOSIZE);

    while (1) {
        cv::Mat frame1;
        cv::Mat frame2;
        bool success1 = cap1.read(frame1);
        bool success2 = cap2.read(frame2);

        if (!success1 || !success2) {
            std::cout << "Cannot read a frame from video stream" << std::endl;
            break;
        }

        cv::imshow("MyVideo1", frame1);
        cv::imshow("MyVideo2", frame2);

		int key = cv::waitKey(300);
		if (key == '\r') {
			std::cout << "Capturing smaple " << (++index) << std::endl;

            std::sprintf(str, "test_left_%d.png", index);
            cv::imwrite(str, frame1);

            std::sprintf(str, "test_right_%d.png", index);
            cv::imwrite(str, frame2);
		}

        if (key == 27) {
            std::cout << "esc key is pressed by user" << std::endl;
            break;
        }
    }

    return 0;
}
double caplet_lmm(const Date& todaysDate_,
	const Date& settlementDate_,
	const Date& maturity_,
	Rate spot_,
	Rate strike,
	Rate Numeraire, //zero-coupon bond
	//Volatility volatility

	double correl,
	double a,
	double b,
	double c,
	double d
	)

{

	//SavedSettings backup;

	const Size size = 10;

	#if defined(QL_USE_INDEXED_COUPON)
		const Real tolerance = 1e-5;
	#else
		const Real tolerance = 1e-12;
	#endif

	boost::shared_ptr<IborIndex> index = makeIndex();

	boost::shared_ptr<LiborForwardModelProcess> process(new LiborForwardModelProcess(size, index));

	// set-up pricing engine
	const boost::shared_ptr<OptionletVolatilityStructure> capVolCurve = makeCapVolCurve(Settings::instance().evaluationDate());

	Array variances = LfmHullWhiteParameterization(process, capVolCurve).covariance(0.0).diagonal();

	boost::shared_ptr<LmVolatilityModel> volaModel(new LmFixedVolatilityModel(Sqrt(variances),process->fixingTimes()));

	boost::shared_ptr<LmCorrelationModel> corrModel(new LmExponentialCorrelationModel(size, correl));

	boost::shared_ptr<AffineModel> model(new LiborForwardModel(process, volaModel, corrModel));

	const Handle<YieldTermStructure> termStructure = process->index()->forwardingTermStructure();

	boost::shared_ptr<AnalyticCapFloorEngine> engine1(new AnalyticCapFloorEngine(model, termStructure));

	boost::shared_ptr<Cap> cap1(new Cap(process->cashFlows(),std::vector<Rate>(size, strike)));

	cap1->setPricingEngine(engine1);

	return cap1->NPV();

}
void testNode_MainWindow::timerEvent(QTimerEvent *)
{
    if(ui->tabWidget->currentIndex()==0){
        IplImage* showCamera = cvQueryFrame(m_camera);
        QImage *frame = IplImageToQImage(showCamera);

        ui->threshold_num->setNum(ui->threshold_bar->value());

        *frame = func_Gray(*frame);

        if(ui->check_Threshold->isChecked()){
            threshold = ui->threshold_bar->value();
        }else{
            threshold = Average_Threshold(*frame);
        }

        for(int h=0;h<frame->height();h++){
            for(int w=0;w<frame->width();w++){
                if(qRed(frame->pixel(w,h))<=threshold){
                    frame->setPixel(w,h,QColor(0,0,0).rgb());
                }else{
                    frame->setPixel(w,h,QColor(255,255,255).rgb());
                }
            }
        }
        ui->show_camera->setPixmap(QPixmap::fromImage(*frame));
    }else if(ui->tabWidget->currentIndex()==1){
        ros::NodeHandle n;
        image_transport::ImageTransport it(n);
        image_transport::Subscriber sub = it.subscribe("camera/image", 1, imageCallback);
//        sub1 = it1->subscribe("/camera/image",1,imageCallback);

//        sub_image = MatToQImage(cvframe);
//        ui->sub_label->setPixmap(QPixmap::fromImage(sub_image));
        ros::spin();
        killTimer(m_nTimerId);
    }else if(ui->tabWidget->currentIndex()==2){
        cv::VideoCapture cap(201);
        cap>>cv00;
        cv::VideoCapture cap1(202);
        cap1>>cv01;

//        IplImage* showCamera = cvQueryFrame(m_camera);
//        QImage *camera2 = IplImageToQImage(showCamera);

        camera0 = MatToQImage(cv00);
        camera1 = MatToQImage(cv01);

        ui->camera01->setPixmap(QPixmap::fromImage(camera0));
        ui->camera02->setPixmap(QPixmap::fromImage(camera1));
    }
void Vision::get_array(int faces[6][9])
{

  Mat image0;
  Mat image1;
  Mat image2;
  Mat image3;

  Mat filtered, denoised;


  {
  VideoCapture cap0(0);
  //cap0.set(CV_CAP_PROP_BRIGHTNESS, CAMERA_BRIGHTNESS);
  //cap0.set(CV_CAP_PROP_CONTRAST, CAMERA_CONTRAST);
  cap0.set(CV_CAP_PROP_SATURATION, CAMERA_SATURATION);
  //cap0.set(CV_CAP_PROP_CONTRAST, CAMERA_CONTRAST);
  /*
  cap0.set(CV_CAP_PROP_BRIGHTNESS, CAMERA_BRIGHTNESS);

  cap0.set(CV_CAP_PROP_HUE, CAMERA_HUE);
  cap0.set(CV_CAP_PROP_GAIN, CAMERA_GAIN);
  cap0.set(CV_CAP_PROP_EXPOSURE, CAMERA_EXPOSURE);
  */
  cap0 >> image0;
    //imshow("Image 0", image0);

  }


  {
  VideoCapture cap1(1);
  //cap1.set(CV_CAP_PROP_BRIGHTNESS, CAMERA_BRIGHTNESS);
  //cap1.set(CV_CAP_PROP_CONTRAST, CAMERA_CONTRAST);
  cap1.set(CV_CAP_PROP_SATURATION, CAMERA_SATURATION);
  /*
  cap1.set(CV_CAP_PROP_BRIGHTNESS, CAMERA_BRIGHTNESS);
  cap1.set(CV_CAP_PROP_CONTRAST, CAMERA_CONTRAST);
  cap1.set(CV_CAP_PROP_HUE, CAMERA_HUE);
  cap1.set(CV_CAP_PROP_GAIN, CAMERA_GAIN);
  cap1.set(CV_CAP_PROP_EXPOSURE, CAMERA_EXPOSURE);
  */
  cap1 >> image1;
  //imshow("Image 1", image1);

  }


  {

  VideoCapture cap2(2);
  cap2.set(CV_CAP_PROP_SATURATION, CAMERA_SATURATION);
  //cap2.set(CV_CAP_PROP_BRIGHTNESS, CAMERA_BRIGHTNESS);
  //cap2.set(CV_CAP_PROP_CONTRAST, CAMERA_CONTRAST);
  /*
  cap2.set(CV_CAP_PROP_HUE, CAMERA_HUE);
  cap2.set(CV_CAP_PROP_GAIN, CAMERA_GAIN);
  cap2.set(CV_CAP_PROP_EXPOSURE, CAMERA_EXPOSURE);
  */
  cap2 >> image2;
  /*imshow("Image 2", image2);
  bilateralFilter(image2, filtered, 3, 75, 75, BORDER_DEFAULT);
  imshow("Filtered", filtered);
  fastNlMeansDenoisingColored(filtered, denoised, 3, 10, 7, 21);
  imshow("Denoised", denoised);*/
  }


  {
  VideoCapture cap3(3);
  //cap3.set(CV_CAP_PROP_BRIGHTNESS, CAMERA_BRIGHTNESS);
  //cap3.set(CV_CAP_PROP_CONTRAST, CAMERA_CONTRAST);
  cap3.set(CV_CAP_PROP_SATURATION, CAMERA_SATURATION);
  /*

  cap3.set(CV_CAP_PROP_HUE, CAMERA_HUE);
  cap3.set(CV_CAP_PROP_GAIN, CAMERA_GAIN);
  cap3.set(CV_CAP_PROP_EXPOSURE, CAMERA_EXPOSURE);
  */
  cap3 >> image3;
  //imshow("Image 3", image3);
 // waitKey(1);
  }




  //all this stuff gets manually filled out
  std::cout <<"Image 0" << std::endl;
  faces[0][1] = assign_color( image0, 363, 62);
  faces[0][2] = assign_color( image0, 293, 94);
  faces[0][5] = assign_color( image0, 227, 58);
  faces[2][0] = assign_color( image0, 330, 156);
  faces[2][1] = assign_color( image0, 395, 114);
  faces[2][2] = assign_color( image0, 445, 85);
  faces[2][3] = assign_color( image0, 327, 235);
  faces[2][6] = assign_color( image0, 321, 293);
  faces[4][0] = assign_color( image0, 140, 94);
  faces[4][1] = assign_color( image0, 193, 119);
  faces[4][2] = assign_color( image0, 256, 156);
  faces[4][5] = assign_color( image0, 265, 237);
  faces[4][8] = assign_color( image0, 270, 294);
/*
  std::cout <<"Image 0 Modified" << std::endl;
  faces[0][1] = assign_color( denoised, 363, 62, 0);
  faces[0][2] = assign_color( denoised, 293, 94, 0);
  faces[0][5] = assign_color( denoised, 227, 58, 0);
  faces[2][0] = assign_color( denoised, 330, 156, 0);
  faces[2][1] = assign_color( denoised, 395, 114, 0);
  faces[2][2] = assign_color( denoised, 445, 85, 0);
  faces[2][3] = assign_color( denoised, 327, 235, 0);
  faces[2][6] = assign_color( denoised, 321, 293, 0);
  faces[4][0] = assign_color( denoised, 140, 94, 0);
  faces[4][1] = assign_color( denoised, 193, 119, 0);
  faces[4][2] = assign_color( denoised, 256, 156, 0);
  faces[4][5] = assign_color( denoised, 265, 237, 0);
  faces[4][8] = assign_color( denoised, 270, 294, 0);
*/

  std::cout << "Image 1" << std::endl;
  faces[3][0] = assign_color( image1, 313, 282 );
  faces[3][1] = assign_color( image1, 319, 227 );
  faces[3][2] = assign_color( image1, 324, 152);
  faces[3][5] = assign_color( image1, 388, 110);
  faces[3][8] = assign_color( image1, 441, 68);
  faces[4][3] = assign_color( image1, 216, 49);
  faces[4][6] = assign_color( image1, 286, 86);
  faces[4][7] = assign_color( image1, 358, 51);
  faces[5][2] = assign_color( image1, 138, 79 );
  faces[5][5] = assign_color( image1, 187, 111 );
  faces[5][6] = assign_color( image1, 259, 282 );
  faces[5][7] = assign_color( image1, 257, 226 );
  faces[5][8] = assign_color( image1, 250, 150);

  std::cout << "Image 2" << std::endl;
  faces[0][0] = assign_color( image2, 191, 13);
  faces[0][3] = assign_color( image2, 246, 47);
  faces[0][6] = assign_color( image2, 320, 81);
  faces[0][7] = assign_color( image2, 385, 45);
  faces[0][8] = assign_color( image2, 422, 19);
  faces[1][0] = assign_color( image2, 163, 73);
  faces[1][1] = assign_color( image2, 216, 111);
  faces[1][2] = assign_color( image2, 284, 149);
  faces[1][5] = assign_color( image2, 287, 224);
  faces[1][8] = assign_color( image2, 288, 278);
  faces[5][0] = assign_color( image2, 356, 148);
  faces[5][1] = assign_color( image2, 420, 103);
  faces[5][3] = assign_color( image2, 349, 224);


  std::cout << "Image 3" << std::endl;
  faces[1][3] = assign_color( image3, 219, 50);
  faces[1][6] = assign_color( image3, 291, 93 );
  faces[1][7] = assign_color( image3, 360, 51 );
  faces[2][5] = assign_color( image3, 182, 109 );
  faces[2][7] = assign_color( image3, 249, 228 );
  faces[2][8] = assign_color( image3, 252, 155 );
  faces[3][3] = assign_color( image3, 385, 109 );
  faces[3][6] = assign_color( image3, 324, 151 );
  faces[3][7] = assign_color( image3, 313, 227 );


  for (int v = 0; v < 6; v++)
  {
      faces[v][4] = v;
  }
 /*
  for (int d = 0; d < 6; d++)
  {
      for (int o = 0; o < 9; o++)
      {
          sum[ faces[d][o] ] += 1;
      }
  }


  for (int h = 0; h < 6; h++)
  {
      if (sum[h] != 9)
      {
            //std::cerr << "Color reading error..." << std::endl;
      }
  }
  */


}
Example #5
0
int main(int argc, char * argv[])
{
	setlocale(LC_ALL, "Russian");
	OpticalFlowMatrices of_matr;
	CameraParams cp;
	char command;
	int number_of_cams;
	puts("Проверка камер");
	presets_aiming();

	printf("Нужна ли калибровка? (y//n): ");
	scanf("%c", &command);
	bool calibration_needed = command == 'y';

	printf("Введите число камер (1//2): ");
	scanf("%d", &number_of_cams);

	if (number_of_cams == 1)
	{
		if (calibration_needed)
		{
			//Проводим калибровку и записываем результаты в файл
			int board_width, board_height;
			puts("Введите размеры шахматной доски (число углов)");
			printf("Ширина: "); scanf("%d", &board_width);
			printf("Высота: "); scanf("%d", &board_height);

			Size board_sz(board_width, board_height);
			cp = camera_calibration(board_sz);

			write_camera_calibration_to_file(cp);
		}
		else
		{ //Читаем калибровку из файла
			read_camera_calibration_from_file(cp);
		}

		//Алгоритм оптического потока
		//Делаем фото с камер
		VideoCapture cap(0);

		Mat img0, img1;
		single_camera_aiming(cap);
		cap.read(img0);
		printf("Передвиньте камеру и нажмите любую клавишу");
		single_camera_aiming(cap);
		cap.read(img1);

		//Находим оптический поток и строим облако точек
		Mat point_cloud = single_sfm(img0, img1, cp.CM, cp.D);

		//Сохраняем результаты в файл
		save_3d_to_file("[1]point_cloud.obj", point_cloud, "w");
	}

	else if (number_of_cams == 2)
	{
		if (calibration_needed)
		{
			//Проводим калибровку и записываем результаты в файл
			int board_width, board_height;
			puts("Введите размеры шахматной доски (число углов)");
			printf("Ширина: "); scanf("%d", &board_width);
			printf("Высота: "); scanf("%d", &board_height);

			Size board_sz(board_width, board_height);
			of_matr = stereo_calibration(board_sz);

			write_stereo_calibration_to_file(of_matr);
		}
		else
		{ //Читаем калибровку из файла
			read_stereo_calibration_from_file(of_matr);
		}

		//Алгоритм оптического потока
		//Делаем фото с камер
		VideoCapture cap0(0);
		VideoCapture cap1(1);

		Mat img0, img1;
		aiming(cap0, cap1);
		cap0.read(img0);
		cap1.read(img1);


		//Находим оптический поток и строим облако точек
		Mat point_cloud = stereo_sfm(img0, img1, of_matr);

		//Сохраняем результаты в файл
		save_3d_to_file("[2]point_cloud.obj", point_cloud, "w");
	}


	return 0;
}
void TestStelSphericalGeometry::testOctahedronPolygon()
{
	QVERIFY(OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(0.8,0.1,0)));
	QVERIFY(OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(1,0.1,0)));
	QVERIFY(OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(0.5,0.,0)));

	// Check points outside triangle
	QVERIFY(!OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(0.,0.1,0)));
	QVERIFY(!OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(-1.,-1.,0)));

	// Check that the corners are included into the triangle
	QVERIFY(OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(0,0,0)));
	QVERIFY(OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(1,0,0)));
	QVERIFY(OctahedronPolygon::triangleContains2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0), Vec3d(1,1,0)));

	QVERIFY(OctahedronPolygon::isTriangleConvexPositive2D(Vec3d(0,0,0), Vec3d(1,0,0), Vec3d(1,1,0)));

	SubContour contour(smallSquareConvex.getConvexContour());
	OctahedronPolygon splittedSub(contour);
	QCOMPARE(splittedSub.getArea(), smallSquareConvex.getArea());

	//QVector<Vec3d> va = northPoleSquare.getOutlineVertexArray().vertex;
	//QCOMPARE(va.size(),16);
	//va = southPoleSquare.getOutlineVertexArray().vertex;
	//QCOMPARE(va.size(),16);

	// Copy
	OctahedronPolygon splittedSubCopy;
	splittedSubCopy = splittedSub;

	QCOMPARE(splittedSub.getArea(), splittedSubCopy.getArea());
	double oldArea = splittedSubCopy.getArea();
	splittedSub = OctahedronPolygon();
	QCOMPARE(splittedSub.getArea(), 0.);
	QCOMPARE(splittedSubCopy.getArea(), oldArea);
	splittedSubCopy.inPlaceIntersection(splittedSub);
	QCOMPARE(splittedSubCopy.getArea(), 0.);

	QCOMPARE(southPoleSquare.getArea(), northPoleSquare.getArea());
	QCOMPARE(southPoleSquare.getIntersection(northPoleSquare)->getArea(), 0.);
	QCOMPARE(southPoleSquare.getUnion(northPoleSquare)->getArea(), 2.*southPoleSquare.getArea());
	QCOMPARE(southPoleSquare.getSubtraction(northPoleSquare)->getArea(), southPoleSquare.getArea());

	QCOMPARE(northPoleSquare.getIntersection(northPoleSquare)->getArea(), northPoleSquare.getArea());
	QCOMPARE(northPoleSquare.getUnion(northPoleSquare)->getArea(), northPoleSquare.getArea());
	QCOMPARE(northPoleSquare.getSubtraction(northPoleSquare)->getArea(), 0.);

	// Test binary IO
	QByteArray ar;
	QBuffer buf(&ar);
	buf.open(QIODevice::WriteOnly);
	QDataStream out(&buf);
	out << northPoleSquare.getOctahedronPolygon();
	buf.close();
	QVERIFY(!ar.isEmpty());

	// Re-read it
	OctahedronPolygon northPoleSquareRead;
	buf.open(QIODevice::ReadOnly);
	QDataStream in(&buf);
	in >> northPoleSquareRead;
	buf.close();
	QVERIFY(!northPoleSquareRead.isEmpty());
	QCOMPARE(northPoleSquareRead.getArea(), northPoleSquare.getArea());
	QVERIFY(northPoleSquareRead.intersects(northPoleSquare.getOctahedronPolygon()));

	// Spherical cap with aperture > 90 deg
	SphericalCap cap1(Vec3d(0,0,1), std::cos(95.*M_PI/180.));
	qDebug() << "---------------------";
	OctahedronPolygon northCapPoly = cap1.getOctahedronPolygon();
	qDebug() << "---------------------";
	qDebug() << northCapPoly.getArea() << OctahedronPolygon::getAllSkyOctahedronPolygon().getArea()/2;
	QVERIFY(northCapPoly.getArea()>OctahedronPolygon::getAllSkyOctahedronPolygon().getArea()/2);
}