Exemplo n.º 1
0
bool computePlaneToPlaneHomography( const Eigen::MatrixXd &points1, const Eigen::MatrixXd &points2, Eigen::MatrixXd &H,
	std::vector<bool> &mask, double reprojectionErrorThreshold )
{
	if( points1.size() != points2.size() )
	{
		throw std::runtime_error( "computePlaneToPlaneHomography: Point lists must be of the same length." );
	}

	if( H.cols() != 3 || H.rows() != 3 )
	{
		H.resize( 3, 3 );
	}

	bool result = false;
		
	// If we have exactly 4 points, compute the homography.
	if( points1.size() == 4 )
	{
		result = compute4PointPlaneToPlaneHomography( points1, points2, H );
	}
	else
	{
		// Otherwise, use RANSAC to remove the outliers.
		Detail::HomographyEstimator estimator(4);
		estimator.setThreshold( reprojectionErrorThreshold );
		result = estimator( points1, points2, H, mask );
	}

	return result;
}
QVariant EstimateProfileOperationCommand::makeFeeObject(const BigInt &gas, const QVariantMap &request)
{
    Ethereum::Connector::GasEstimator estimator(_provider);
    BigInt price = estimator.getPrice();
    if(request.contains("factor"))
    {
        size_t factor = request["factor"].toInt();
        if(factor==0)
        {
            price = 0;
        }
        else
        {
            price *= factor;
            price /= 100;
        }
    }
    QVariantMap result;

    BigInt fee = gas * price;
    result["gas"] = gas.str().c_str();
    result["fee"] = fee.str().c_str();
    result["price"] = price.str().c_str();
    return result;
}
Exemplo n.º 3
0
int main()
{
	double rate = 0.0;
	double vol = 0.1;
	int N = 3;
	int M = 1;
	int b = 700;
	double maturity = 1.0;
	double initialSpot = 100.0;
	double timeStep = (maturity / (double)N);
	double m = timeStep*(rate - vol*vol / 2.0);
	//std::ofstream fichier("C:\\Users\\Lucien\\Desktop\\e.txt", std::ios::out  | std::ios::trunc);
	for (int i = 0; i < 40; i++)
	{
		double initialSpots[1]{80 + (double)i};
		//False ditribution in the estimator
		lognormal f(m, vol*sqrt(timeStep));
		lognormal g(0.0, 5.0*vol);
		normalGen gGenerator(ENG(time(0)), normalDist(0.0, 5.0*vol));
		RandomGenerationTool kit(f, gGenerator, g);
		BlackScholesPathGenerator paths(rate, vol, N, b, M, maturity, kit);
		BlackScholesRate rates(N, maturity, rate);
		AmericanPutPayoff payoff(100.0, maturity, N, b, M);
		Estimator estimator(paths, payoff, rates);
		std::cout << estimator.computePrice(initialSpots) << std::endl;
	}
	
	//fichier.close();
	system("pause");
}
Exemplo n.º 4
0
int main (int argc, char *argv[]) {
	ope::OPESettings settings;
	settings.minTgtDepth = 0.4f;
	settings.maxObjHeight = 2.5f;

	ope::ObjectPoseEstimator estimator(settings);
	ope::SQParameters sqParams = estimator.run();
		
	std::cin.get();
	std::cin.get();

	return 0;
}
Exemplo n.º 5
0
void CCtrlDialog::OnBnClickedMfcbuttonObtainucs()
{
	// TODO: 在此添加控件通知处理程序代码
	CDepthEstimation estimator(m_pAsmbBody);
	estimator.EstimateUCS();

	CMainFrame *pMain = (CMainFrame *)AfxGetMainWnd();
	pMain->ShowUCSEditor();

	CWnd *pWnd = GetDlgItem(IDC_MFCBUTTON_OBTAINUCS);
	pWnd->EnableWindow(FALSE);
// 	pWnd = GetDlgItem(IDC_MFCBUTTON_DEPTHESTIM);
// 	pWnd->EnableWindow(TRUE);
}
TEST(ProbWelfordCovarEstimator, num_samples) {
  
  const int n = 10;
  Eigen::VectorXd q = Eigen::VectorXd::Ones(n);
  
  const int n_learn = 10;
  
  stan::math::welford_covar_estimator estimator(n);
  
  for (int i = 0; i < n_learn; ++i)
    estimator.add_sample(q);
  
  EXPECT_EQ(n_learn, estimator.num_samples());
  
}
Exemplo n.º 7
0
void CCtrlDialog::OnBnClickedMfcbuttonDepthestim()
{
	// TODO: 在此添加控件通知处理程序代码
	CDepthEstimation estimator(m_pAsmbBody);
	estimator.Estimate();

	CWnd *pWnd = GetDlgItem(IDC_MFCBUTTON_DEPTHESTIM);
	pWnd->EnableWindow(FALSE);

	pWnd = GetDlgItem(IDC_MFCBUTTON_RECONSTRUCTION);
	pWnd->EnableWindow(TRUE);

	CMainFrame *pMain = (CMainFrame *)AfxGetMainWnd();
	CSLDRView * pView = (CSLDRView *)(pMain->GetActiveView());
	pView->Invalidate(); 
}
int main(int argc, char **argv) // rosrun spheres_localization pose_estimation /home/jrhizor/Desktop/KinFuSnapshots/map.txt SIFT /camera/image_raw
{
  ros::init(argc, argv, "pose_estimation");

  // rosrun spheres_localization pose_estimation /home/jrhizor/Desktop/KinFuSnapshots/map.txt SIFT topicname
  ROS_ASSERT(argc==4);
  std::string map_file_name = argv[1];
  std::string method = argv[2];
  std::string camera_topic = argv[3]; 

  // export GSCAM_CONFIG="v4l2src device=/dev/video1 ! video/x-raw-rgb,framerate=30/1 ! ffmpegcolorspace"
  // rosrun gscam gscam
  // load map
  std::vector<InterestPoint3D> world_map = load_map(map_file_name);
  ROS_INFO("Loaded map with %d points.", int(world_map.size()));

  // handle map
  std::vector<cv::KeyPoint> map_keypoints;
  cv::Mat map_desc(world_map.size(), world_map[0].descriptor.size(), CV_32F);
 
  ROS_INFO("Created containers for map information.");

  for(unsigned int i=0; i<world_map.size(); ++i)
  {
    cv::KeyPoint temp_keypoint;
    temp_keypoint.pt = cv::Point2f(0, 0); // value never read

    for(unsigned int j=0; j<world_map[i].descriptor.size(); ++j)
    {
      map_desc.at<float>(i,j) = world_map[i].descriptor[j];
    }

    // map_position_lookup[std::make_pair(temp_keypoint.pt.x, temp_keypoint.pt.y)] = 
    //       pcl::PointXYZ(world_map[i].x,world_map[i].y,world_map[i].z);

    map_keypoints.push_back(temp_keypoint);
  }

  ROS_INFO("Filled containers with map information.");


  PoseEstimator estimator(camera_topic);
  // estimator.run(map_keypoints, map_desc, map_position_lookup, method);
  estimator.run(map_keypoints, map_desc, world_map, method);

  return 0;
} 
Exemplo n.º 9
0
Stitcher::Status Stitcher::estimateCameraParams()
{
    detail::HomographyBasedEstimator estimator;
    if (!estimator(features_, pairwise_matches_, cameras_))
        return ERR_HOMOGRAPHY_EST_FAIL;

    for (size_t i = 0; i < cameras_.size(); ++i)
    {
        Mat R;
        cameras_[i].R.convertTo(R, CV_32F);
        cameras_[i].R = R;
        LOGLN("Initial intrinsic parameters #" << indices_[i] + 1 << ":\n " << cameras_[i].K());
    }

    bundle_adjuster_->setConfThresh(conf_thresh_);
    if (!(*bundle_adjuster_)(features_, pairwise_matches_, cameras_))
        return ERR_CAMERA_PARAMS_ADJUST_FAIL;

    // Find median focal length and use it as final image scale
    std::vector<double> focals;
    for (size_t i = 0; i < cameras_.size(); ++i)
    {
        LOGLN("Camera #" << indices_[i] + 1 << ":\n" << cameras_[i].K());
        focals.push_back(cameras_[i].focal);
    }

    std::sort(focals.begin(), focals.end());
    if (focals.size() % 2 == 1)
        warped_image_scale_ = static_cast<float>(focals[focals.size() / 2]);
    else
        warped_image_scale_ = static_cast<float>(focals[focals.size() / 2 - 1] + focals[focals.size() / 2]) * 0.5f;

    if (do_wave_correct_)
    {
        std::vector<Mat> rmats;
        for (size_t i = 0; i < cameras_.size(); ++i)
            rmats.push_back(cameras_[i].R);
        detail::waveCorrect(rmats, wave_correct_kind_);
        for (size_t i = 0; i < cameras_.size(); ++i)
            cameras_[i].R = rmats[i];
    }

    return OK;
}
TEST(ProbWelfordVarEstimator, sample_variance) {
  
  const int n = 10;
  const int n_learn = 10;
  
  stan::math::welford_var_estimator estimator(n);
  
  for (int i = 0; i < n_learn; ++i) {
    Eigen::VectorXd q = Eigen::VectorXd::Constant(n, i);
    estimator.add_sample(q);
  }
  
  Eigen::VectorXd var(n);
  estimator.sample_variance(var);
  
  for (int i = 0; i < n; ++i)
    EXPECT_EQ(55.0 / 6.0, var(i));
  
}
Exemplo n.º 11
0
void NormalEstimator::createNormalEstimator() {

	TRACE(logger, "createNormalEstimator: Starting");
	std::unique_ptr<pcl::NormalEstimationOMP<pcl::PointXYZRGB, pcl::Normal>> estimator(new pcl::NormalEstimationOMP<pcl::PointXYZRGB, pcl::Normal>() );
	pcl::search::KdTree<pcl::PointXYZRGB>::Ptr kdTree( new pcl::search::KdTree<pcl::PointXYZRGB>() );
	estimator->setSearchMethod( kdTree );

	float radius = getParam<float>( normalRadius );
	if(radius > 0)
		estimator->setRadiusSearch( radius );
	else {
		int k = getParam<int>( kNN );
		assert(k > 0);
		estimator->setKSearch(k);
	}

	normalEstimator = std::move( estimator );
	TRACE(logger, "createNormalEstimator: Finished");
}
Exemplo n.º 12
0
int GrayWorldCommand::execute()
{
	illumestimators::GrayWorldEstimator estimator(config.n, config.p, config.sigma);

	SingleIlluminantPipeline::loadIlluminantEstimator(estimator, config.loadFile);

	MetadataStorage storage(config.metaFiles);

	Illum estimate;
	double error;

	if (!SingleIlluminantPipeline::runEstimator(estimate, error, estimator, config.inputFile, storage, config.verbosity)) {
		return -1;
	}

	SingleIlluminantPipeline::saveIlluminantEstimator(estimator, config.saveFile);

	return 0;
}
TEST(ProbWelfordCovarEstimator, sample_mean) {
  
  const int n = 10;
  const int n_learn = 10;
  
  stan::math::welford_covar_estimator estimator(n);
  
  for (int i = 0; i < n_learn; ++i) {
    Eigen::VectorXd q = Eigen::VectorXd::Constant(n, i);
    estimator.add_sample(q);
  }
  
  Eigen::VectorXd mean(n);
  estimator.sample_mean(mean);
  
  for (int i = 0; i < n; ++i)
    EXPECT_EQ(9.0 / 2.0, mean(i));
  
}
TEST(ProbWelfordCovarEstimator, sample_covariance) {
  
  const int n = 10;
  const int n_learn = 10;
  
  stan::math::welford_covar_estimator estimator(n);
  
  for (int i = 0; i < n_learn; ++i) {
    Eigen::VectorXd q = Eigen::VectorXd::Constant(n, i);
    estimator.add_sample(q);
  }
  
  Eigen::MatrixXd covar(n, n);
  estimator.sample_covariance(covar);
  
  for (int i = 0; i < n; ++i)
    for (int j = 0; j < n; ++j)
      EXPECT_EQ(55.0 / 6.0, covar(i, j));
  
}
Exemplo n.º 15
0
void EMPeriodicGaussian::mergeParams() {

    sort(params_.begin(), params_.end(), paramComparator);
    vector<bool> skip(params_.size(), 0);
    // final set of parameters
    vector<Param> refined;
    for(int i=0; i < params_.size(); i++) {
        // if this parameter has not already been merged
        if(!skip[i]) {
            // find longest continuous sequence of parameters 
            // that can be merged into a single parameter
            bool hasMergedOnce = false;
            vector<Param> candidates;
            Param best = params_[i]; 
            candidates.push_back(params_[i]);
            for(int j=(i+1) % params_.size(); j != i; j = (j+1) % params_.size()) {
                candidates.push_back(params_[j]);
                Param estimate = estimator(candidates);
                double squaredError = squaredIntegratedError(candidates, estimate);
                if(sqrt(squaredError) < 5e-3) {
                    hasMergedOnce = true;
                    best = estimate;
                    // inclusive merge of all continuous indices
                    for(int k=i; k<=j; k++) {
                        skip[k] = true;
                    }
                } else {
                    if(hasMergedOnce) {
                        break;
                    }
                }

            }
            refined.push_back(best);
        }
    }
     
    if(refined.size() != params_.size()) {
        params_ = refined;
    }
}
TEST(ProbWelfordCovarEstimator, restart) {
  
  const int n = 10;
  Eigen::VectorXd q = Eigen::VectorXd::Ones(n);
  
  const int n_learn = 10;
  
  stan::math::welford_covar_estimator estimator(n);
  
  for (int i = 0; i < n_learn; ++i)
    estimator.add_sample(q);
  
  estimator.restart();
  
  EXPECT_EQ(0, estimator.num_samples());
  
  Eigen::VectorXd mean(n);
  estimator.sample_mean(mean);
  
  for (int i = 0; i < n; ++i)
    EXPECT_EQ(0, mean(i));
  
}
Exemplo n.º 17
0
void
RecognizerROS<PointT>::initialize (int argc, char ** argv)
{
    n_.reset( new ros::NodeHandle ( "~" ) );
    bool do_sift = true;
    bool do_shot = false;
    bool do_ourcvfh = false;

    float resolution = 0.003f;
    std::string models_dir, training_dir;

    typename GHV<PointT, PointT>::Parameter paramGHV;
    typename GraphGeometricConsistencyGrouping<PointT, PointT>::Parameter paramGgcg;
    typename LocalRecognitionPipeline<flann::L1, PointT, FeatureT >::Parameter paramLocalRecSift;
    typename LocalRecognitionPipeline<flann::L1, PointT, pcl::Histogram<352> >::Parameter paramLocalRecShot;
    typename MultiRecognitionPipeline<PointT>::Parameter paramMultiPipeRec;
    typename SHOTLocalEstimationOMP<PointT, pcl::Histogram<352> >::Parameter paramLocalEstimator;

    paramLocalRecSift.use_cache_ = paramLocalRecShot.use_cache_ = true;
    paramLocalRecSift.save_hypotheses_ = paramLocalRecShot.save_hypotheses_ = true;

    n_->getParam ( "visualize", visualize_);
    n_->getParam ( "models_dir", models_dir);
    n_->getParam ( "training_dir", training_dir);
    n_->getParam ( "chop_z", chop_z_ );
    n_->getParam ( "do_sift", do_sift);
    n_->getParam ( "do_shot", do_shot);
    n_->getParam ( "do_ourcvfh", do_ourcvfh);
    n_->getParam ( "knn_sift", paramLocalRecSift.knn_);
    n_->getParam ( "knn_shot", paramLocalRecShot.knn_);

    int normal_computation_method;
    if( n_->getParam ( "normal_method", normal_computation_method) )
    {
        paramLocalRecSift.normal_computation_method_ =
                paramLocalRecShot.normal_computation_method_ =
                paramMultiPipeRec.normal_computation_method_ =
                paramLocalEstimator.normal_computation_method_ =
                normal_computation_method;
    }

    int icp_iterations;
    if(n_->getParam ( "icp_iterations", icp_iterations))
        paramLocalRecSift.icp_iterations_ = paramLocalRecShot.icp_iterations_ = paramMultiPipeRec.icp_iterations_ = icp_iterations;

    n_->getParam ( "cg_size_thresh", paramGgcg.gc_threshold_);
    n_->getParam ( "cg_size", paramGgcg.gc_size_);
    n_->getParam ( "cg_ransac_threshold", paramGgcg.ransac_threshold_);
    n_->getParam ( "cg_dist_for_clutter_factor", paramGgcg.dist_for_cluster_factor_);
    n_->getParam ( "cg_max_taken", paramGgcg.max_taken_correspondence_);
    n_->getParam ( "cg_max_time_for_cliques_computation", paramGgcg.max_time_allowed_cliques_comptutation_);
    n_->getParam ( "cg_dot_distance", paramGgcg.thres_dot_distance_);
    n_->getParam ( "use_cg_graph", paramGgcg.use_graph_);
    n_->getParam ( "hv_clutter_regularizer", paramGHV.clutter_regularizer_);
    n_->getParam ( "hv_color_sigma_ab", paramGHV.color_sigma_ab_);
    n_->getParam ( "hv_color_sigma_l", paramGHV.color_sigma_l_);
    n_->getParam ( "hv_detect_clutter", paramGHV.detect_clutter_);
    n_->getParam ( "hv_duplicity_cm_weight", paramGHV.w_occupied_multiple_cm_);
    n_->getParam ( "hv_histogram_specification", paramGHV.use_histogram_specification_);
    n_->getParam ( "hv_hyp_penalty", paramGHV.active_hyp_penalty_);
    n_->getParam ( "hv_ignore_color", paramGHV.ignore_color_even_if_exists_);
    n_->getParam ( "hv_initial_status", paramGHV.initial_status_);
    n_->getParam ( "hv_inlier_threshold", paramGHV.inliers_threshold_);
    n_->getParam ( "hv_occlusion_threshold", paramGHV.occlusion_thres_);
    n_->getParam ( "hv_optimizer_type", paramGHV.opt_type_);
    n_->getParam ( "hv_radius_clutter", paramGHV.radius_neighborhood_clutter_);
    n_->getParam ( "hv_radius_normals", paramGHV.radius_normals_);
    n_->getParam ( "hv_regularizer", paramGHV.regularizer_);
    n_->getParam ( "hv_plane_method", paramGHV.plane_method_);
    n_->getParam ( "hv_add_planes", paramGHV.add_planes_);

    int min_plane_inliers;
    if(n_->getParam ( "hv_min_plane_inliers", min_plane_inliers))
        paramGHV.min_plane_inliers_ = static_cast<size_t>(min_plane_inliers);

//        n_->getParam ( "hv_requires_normals", r_.hv_params_.requires_normals_);

    rr_.reset(new MultiRecognitionPipeline<PointT>(paramMultiPipeRec));

    boost::shared_ptr < GraphGeometricConsistencyGrouping<PointT, PointT> > gcg_alg (
                new GraphGeometricConsistencyGrouping<PointT, PointT> (paramGgcg));

    boost::shared_ptr <Source<PointT> > cast_source;
    if (do_sift || do_shot ) // for local recognizers we need this source type / training data
    {
        boost::shared_ptr < RegisteredViewsSource<pcl::PointXYZRGBNormal, PointT, PointT> > src
                (new RegisteredViewsSource<pcl::PointXYZRGBNormal, PointT, PointT>(resolution));
        src->setPath (models_dir);
        src->setModelStructureDir (training_dir);
        src->generate ();
//        src->createVoxelGridAndDistanceTransform(resolution);
        cast_source = boost::static_pointer_cast<RegisteredViewsSource<pcl::PointXYZRGBNormal, PointT, PointT> > (src);
    }

    if (do_sift)
    {
#ifdef HAVE_SIFTGPU
  boost::shared_ptr < SIFTLocalEstimation<PointT, FeatureT > > estimator (new SIFTLocalEstimation<PointT, FeatureT >());
  boost::shared_ptr < LocalEstimator<PointT, FeatureT > > cast_estimator = boost::dynamic_pointer_cast<SIFTLocalEstimation<PointT, FeatureT > > (estimator);
#else
  boost::shared_ptr < OpenCVSIFTLocalEstimation<PointT, FeatureT > > estimator (new OpenCVSIFTLocalEstimation<PointT, FeatureT >);
  boost::shared_ptr < LocalEstimator<PointT, FeatureT > > cast_estimator = boost::dynamic_pointer_cast<OpenCVSIFTLocalEstimation<PointT, FeatureT > > (estimator);
#endif

        boost::shared_ptr<LocalRecognitionPipeline<flann::L1, PointT, FeatureT > > sift_r;
        sift_r.reset (new LocalRecognitionPipeline<flann::L1, PointT, FeatureT > (paramLocalRecSift));
        sift_r->setDataSource (cast_source);
        sift_r->setTrainingDir (training_dir);
        sift_r->setFeatureEstimator (cast_estimator);
        sift_r->initialize (false);

        boost::shared_ptr < Recognizer<PointT> > cast_recog;
        cast_recog = boost::static_pointer_cast<LocalRecognitionPipeline<flann::L1, PointT, FeatureT > > (sift_r);
        std::cout << "Feature Type: " << cast_recog->getFeatureType() << std::endl;
        rr_->addRecognizer (cast_recog);
    }
    if (do_shot)
    {
        boost::shared_ptr<UniformSamplingExtractor<PointT> > uniform_kp_extractor ( new UniformSamplingExtractor<PointT>);
        uniform_kp_extractor->setSamplingDensity (0.01f);
        uniform_kp_extractor->setFilterPlanar (true);
        uniform_kp_extractor->setThresholdPlanar(0.1);
        uniform_kp_extractor->setMaxDistance( 1000.0 ); // for training we want to consider all points (except nan values)

        boost::shared_ptr<KeypointExtractor<PointT> > keypoint_extractor = boost::static_pointer_cast<KeypointExtractor<PointT> > (uniform_kp_extractor);
        boost::shared_ptr<SHOTLocalEstimationOMP<PointT, pcl::Histogram<352> > > estimator (new SHOTLocalEstimationOMP<PointT, pcl::Histogram<352> >(paramLocalEstimator));
        estimator->addKeypointExtractor (keypoint_extractor);

        boost::shared_ptr<LocalEstimator<PointT, pcl::Histogram<352> > > cast_estimator;
        cast_estimator = boost::dynamic_pointer_cast<LocalEstimator<PointT, pcl::Histogram<352> > > (estimator);

        boost::shared_ptr<LocalRecognitionPipeline<flann::L1, PointT, pcl::Histogram<352> > > local;
        local.reset(new LocalRecognitionPipeline<flann::L1, PointT, pcl::Histogram<352> > (paramLocalRecShot));
        local->setDataSource (cast_source);
        local->setTrainingDir(training_dir);
        local->setFeatureEstimator (cast_estimator);
        local->initialize (false);

        uniform_kp_extractor->setMaxDistance( chop_z_ ); // for training we do not want this restriction

        boost::shared_ptr<Recognizer<PointT> > cast_recog;
        cast_recog = boost::static_pointer_cast<LocalRecognitionPipeline<flann::L1, PointT, pcl::Histogram<352> > > (local);
        std::cout << "Feature Type: " << cast_recog->getFeatureType() << std::endl;
        rr_->addRecognizer(cast_recog);
    }


    boost::shared_ptr<GHV<PointT, PointT> > hyp_verification_method (new GHV<PointT, PointT>(paramGHV));
    boost::shared_ptr<HypothesisVerification<PointT,PointT> > cast_hyp_pointer = boost::static_pointer_cast<GHV<PointT, PointT> > (hyp_verification_method);
    rr_->setHVAlgorithm( cast_hyp_pointer );
    rr_->setCGAlgorithm( gcg_alg );

    vis_pc_pub_ = n_->advertise<sensor_msgs::PointCloud2>( "sv_recogniced_object_instances", 1 );
    recognize_  = n_->advertiseService ("sv_recognition", &RecognizerROS::recognizeROS, this);

    it_.reset(new image_transport::ImageTransport(*n_));
    image_pub_ = it_->advertise("sv_recogniced_object_instances_img", 1, true);
}
Exemplo n.º 18
0
void ModuleServices::estimateSongLength()
{
	SongLengthEstimator estimator(&module);
	estimatedSongLength = estimator.estimateSongLengthInSeconds();
}
int ImageReranker::cvFindHomography( const CvMat* objectPoints, const CvMat* imagePoints,
                                   CvMat* __H, int method, double ransacReprojThreshold,
                                   CvMat* mask )
{
    const double confidence = 0.995;
    const int maxIters = 400;
    const double defaultRANSACReprojThreshold = 3;
    bool result = false;
    Ptr<CvMat> m, M, tempMask;

    double H[9];
    CvMat matH = cvMat( 3, 3, CV_64FC1, H );
    int count;

    CV_Assert( CV_IS_MAT(imagePoints) && CV_IS_MAT(objectPoints) );

    count = MAX(imagePoints->cols, imagePoints->rows);
    CV_Assert( count >= 4 );
    if( ransacReprojThreshold <= 0 )
        ransacReprojThreshold = defaultRANSACReprojThreshold;

    m = cvCreateMat( 1, count, CV_64FC2 );
    cvConvertPointsHomogeneous( imagePoints, m );

    M = cvCreateMat( 1, count, CV_64FC2 );
    cvConvertPointsHomogeneous( objectPoints, M );

    if( mask )
    {
        CV_Assert( CV_IS_MASK_ARR(mask) && CV_IS_MAT_CONT(mask->type) &&
            (mask->rows == 1 || mask->cols == 1) &&
            mask->rows*mask->cols == count );
    }
    if( mask || count > 4 )
        tempMask = cvCreateMat( 1, count, CV_8U );
    if( !tempMask.empty() )
        cvSet( tempMask, cvScalarAll(1.) );

    CvHomographyEstimator estimator(4);
    if( count == 4 )
        method = 0;

    assert(method == CV_RANSAC);
    result = estimator.runRANSAC( M, m, &matH, tempMask, ransacReprojThreshold, confidence, maxIters);

    if( result && count > 4 )
    {
        icvCompressPoints( (CvPoint2D64f*)M->data.ptr, tempMask->data.ptr, 1, count );
        count = icvCompressPoints( (CvPoint2D64f*)m->data.ptr, tempMask->data.ptr, 1, count );
        M->cols = m->cols = count;
        if( method == CV_RANSAC )
            estimator.runKernel( M, m, &matH );
        estimator.refine( M, m, &matH, 10 );
    }

    if( result )
        cvConvert( &matH, __H );

    if( mask && tempMask )
    {
        if( CV_ARE_SIZES_EQ(mask, tempMask) )
           cvCopy( tempMask, mask );
        else
           cvTranspose( tempMask, mask );
    }

    return (int)result;
}