Exemple #1
0
int RGB::prepareDataToTrainSVM (int labels)
{
    hconcat (_V_des_RGB_10_MEDmedBlue,  _V_des_RGB_10_SVDmedBlue,  _B_MED_TO_TRAIN);
    _B_MED_TO_TRAIN.convertTo(_B_MED_TO_TRAIN,CV_32FC1);

    hconcat (_V_des_RGB_10_MEDmedGreen, _V_des_RGB_10_SVDmedGreen, _G_MED_TO_TRAIN);
    _G_MED_TO_TRAIN.convertTo(_G_MED_TO_TRAIN,CV_32FC1);

    hconcat (_V_des_RGB_10_MEDmedRed,   _V_des_RGB_10_SVDmedRed,   _R_MED_TO_TRAIN);
    _R_MED_TO_TRAIN.convertTo(_R_MED_TO_TRAIN,CV_32FC1);

    hconcat (_V_des_RGB_10_MEDmedRGB,   _V_des_RGB_10_SVDmedRGB,   _RGB_MED_TO_TRAIN);
    _RGB_MED_TO_TRAIN.convertTo(_RGB_MED_TO_TRAIN,CV_32FC1);

    hconcat (_V_des_RGB_10_MEDmedSVD,   _V_des_RGB_10_SVDmedSVD,   _RGB_SVD_TO_TRAIN);
    _RGB_SVD_TO_TRAIN.convertTo(_RGB_SVD_TO_TRAIN,CV_32FC1);

    if (labels == 1)
    {
        _LABELS_TO_SVM = Mat::ones (1, _RGB_MED_TO_TRAIN.rows,CV_32F);
    }
    else if (labels == 0)
    {
        _LABELS_TO_SVM = Mat::zeros(1, _RGB_MED_TO_TRAIN.rows,CV_32F);
    }
    else
    {
        cout << "labels debe ser o 0 ó 1. 0: sospechoso, 1: no sospechoso" << endl;
        return -1;
    }
    return 0;
}
Exemple #2
0
// Kok Lim Low's linearization
static void minimizePointToPlaneMetric(Mat Src, Mat Dst, Vec3d& rpy, Vec3d& t)
{
  //Mat sub = Dst - Src;
  Mat A = Mat(Src.rows, 6, CV_64F);
  Mat b = Mat(Src.rows, 1, CV_64F);
  Mat rpy_t;

#if defined _OPENMP
#pragma omp parallel for
#endif
  for (int i=0; i<Src.rows; i++)
  {
    const Vec3d srcPt(Src.ptr<double>(i));
    const Vec3d dstPt(Dst.ptr<double>(i));
    const Vec3d normals(Dst.ptr<double>(i) + 3);
    const Vec3d sub = dstPt - srcPt;
    const Vec3d axis = srcPt.cross(normals);

    *b.ptr<double>(i) = sub.dot(normals);
    hconcat(axis.reshape<1, 3>(), normals.reshape<1, 3>(), A.row(i));
  }

  cv::solve(A, b, rpy_t, DECOMP_SVD);
  rpy_t.rowRange(0, 3).copyTo(rpy);
  rpy_t.rowRange(3, 6).copyTo(t);
}
void cv::hconcat(InputArray src1, InputArray src2, OutputArray dst)
{
    CV_INSTRUMENT_REGION();

    Mat src[] = {src1.getMat(), src2.getMat()};
    hconcat(src, 2, dst);
}
Exemple #4
0
void getHistomPlusColoFeatures(const Mat& image, Mat& features) {
  // TODO
  Mat feature1, feature2;
  getHistogramFeatures(image, feature1);
  getColorFeatures(image, feature2);
  hconcat(feature1.reshape(1, 1), feature2.reshape(1, 1), features);
}
void Kernel::compute()
{
  
  int n = trainingPhotos.size();
  
  for(int i=0; i<n; i++){
    trainingPhotosDescriptors.push_back(extractDescriptors(trainingPhotos[i]));
    trainingSketchesDescriptors.push_back(extractDescriptors(trainingSketches[i]));
  }
  
  Kp = Mat::zeros(n/2,n/2,CV_32F);
  Kg = Mat::zeros(n/2,n/2,CV_32F);
  
  for(int i=0; i<n/2; i++)
    for(int j=0; j<n/2; j++){
      Kg.at<float>(i,j) = this->calcKernel(trainingPhotosDescriptors[i], trainingPhotosDescriptors[j]);
      Kp.at<float>(i,j) = this->calcKernel(trainingSketchesDescriptors[i],trainingSketchesDescriptors[j]);
    }
    
    R = Kg*((Kp).t()*Kp).inv()*(Kp).t();
  
  vector<int> _classes;
  
  for(int i=n/2; i<n; i++){
    if(T2.empty()){
      T2.push_back(this->projectProbeIntern(trainingSketchesDescriptors[i]));
      hconcat(T2,projectGalleryIntern(trainingPhotosDescriptors[i]),T2);
    }
    else{
      hconcat(T2,projectProbeIntern(trainingSketchesDescriptors[i]),T2);
      hconcat(T2,projectGalleryIntern(trainingPhotosDescriptors[i]),T2);
    }
    _classes.push_back(i);
    _classes.push_back(i);
  }
  
  this->pca.computeVar(T2, Mat(), CV_PCA_DATA_AS_COL, 0.99);
  this->mean = pca.mean.clone();
  //cout << mean.size() << endl;
  //cout << pca.eigenvectors.size() << endl;
  //cout << T2.size() << endl;
  Mat T2_pca = pca.eigenvectors*T2;
  T2_pca = T2_pca.t();
  //cout << T2_pca.size() << endl; 
  lda.compute(T2_pca, _classes);
  //cout << lda.eigenvectors().size() << endl;
}
void cv::hconcat(InputArray _src, OutputArray dst)
{
    CV_INSTRUMENT_REGION();

    std::vector<Mat> src;
    _src.getMatVector(src);
    hconcat(!src.empty() ? &src[0] : 0, src.size(), dst);
}
Mat Camera::computePMatrix() {
    Mat AR = computeCalibrationMatrix() * computeRotationMatrix();
    printMat("A", computeCalibrationMatrix());
    printMat("R", computeRotationMatrix());
    Mat ARC = AR * computeCameraCenterMatrix(false);
    Mat p;
    hconcat(AR, ARC.mul(-1), p);
    printMat("P", p);
    return p;
}
Exemple #8
0
void getGrayPlusLBP(const Mat& grayChar, Mat& features)
{
  // TODO: check channnels == 1
  SHOW_IMAGE(grayChar, 0);
  SHOW_IMAGE(255 - grayChar, 0);

  // resize to uniform size, like 20x32
  bool useResize = false;
  bool useConvert = true;
  bool useMean = true;
  bool useLBP = true;

  Mat char_mat;
  if (useResize) {
    char_mat.create(kGrayCharHeight, kGrayCharWidth, CV_8UC1);
    resize(grayChar, char_mat, char_mat.size(), 0, 0, INTER_LINEAR);
  }
  else {
    char_mat = grayChar;
  }
  SHOW_IMAGE(char_mat, 0);

  // convert to float
  Mat float_img;
  if (useConvert) {
    float scale = 1.f / 255;
    char_mat.convertTo(float_img, CV_32FC1, scale, 0);
  }
  else {
    float_img = char_mat;
  }
  SHOW_IMAGE(float_img, 0);

  // cut from mean, it can be optional

  Mat mean_img;
  if (useMean) {
    float_img -= mean(float_img);
    mean_img = float_img;
  }
  else {
    mean_img = float_img;
  }
  SHOW_IMAGE(mean_img, 0);

  // use lbp to get features, it can be changed to other
  Mat originImage = mean_img.clone();
  Mat lbpimage = libfacerec::olbp(mean_img);
  SHOW_IMAGE(lbpimage, 0);
  lbpimage = libfacerec::spatial_histogram(lbpimage, kCharLBPPatterns, kCharLBPGridX, kCharLBPGridY);

  // 32x20 + 16x16
  hconcat(mean_img.reshape(1, 1), lbpimage.reshape(1, 1), features);
}
Exemple #9
0
int main(int argc, char *argv[])
{
    HSV INDIV1("INDIV1","../../zImages/INDV1","../outHSV");
    HSV INDIV3("INDIV3","../../zImages/INDV3","../outHSV");

    INDIV1.listAllFiles();
    INDIV3.listAllFiles();

    cout << INDIV1._fileList.size() << " Images of " << "INDIV1" << endl;
    cout << INDIV3._fileList.size() << " Images of " << "INDIV3" << endl;

    INDIV1.obtainDescriptors(10);
    INDIV3.obtainDescriptors(10);

    // TRAIN SVM WITH INDIV3
    INDIV3.trainSVMOneClass();

    // TEST SVM
    Mat HSV_SVD;
    hconcat (INDIV1._V_des_HSV_10_MEDmedSVD, INDIV1._V_des_HSV_10_SVDmedSVD, HSV_SVD);
    HSV_SVD.convertTo(HSV_SVD,CV_32FC1);

    Mat HSV_MED;
    hconcat (INDIV1._V_des_HSV_10_MEDmedHSV, INDIV1._V_des_HSV_10_SVDmedHSV, HSV_MED);
    HSV_MED.convertTo(HSV_MED,CV_32FC1);

    Mat INDIV1Labels = Mat::zeros(1,HSV_MED.rows,CV_32F);

    string nameDir = INDIV3._dirOutSVM + "/" + INDIV3._indivName + "_SVM_HSV_SVD.xml";
    const char *cstr = nameDir.c_str();
//    cout << cstr << endl;
    INDIV1.testSVMOneClass(HSV_SVD, cstr, "SVD", INDIV1Labels);

    nameDir = INDIV3._dirOutSVM + "/" + INDIV3._indivName + "_SVM_HSV_MED.xml";
    cstr = nameDir.c_str();
//    cout << cstr << endl;
    INDIV1.testSVMOneClass(HSV_MED, cstr, "MED", INDIV1Labels);

    cout << "END" << endl;
    return 0;
}
Exemple #10
0
    /* Post: compute _model with given points and return number of found models */
    int runKernel( InputArray _m1, InputArray _m2, OutputArray _model ) const
    {
        Mat opoints = _m1.getMat(), ipoints = _m2.getMat();

        bool correspondence = solvePnP( _m1, _m2, cameraMatrix, distCoeffs,
                                            rvec, tvec, useExtrinsicGuess, flags );

        Mat _local_model;
        hconcat(rvec, tvec, _local_model);
        _local_model.copyTo(_model);

        return correspondence;
    }
Exemple #11
0
// normalized value concatenation
// gray patch has problem when contrast of object patch is flipped, gradient is more robust
bool ObjPatchMatcher::ComputePatchFeat(MatFeatureSet& patches, Mat& feat) {
	feat.release();
	feat.create(0, 0, CV_32F);
	if(patches.find("gray") != patches.end()) {
		Scalar mean_, std_;
		//normalize(patches["gray"], patches["gray"], 1, 0, NORM_MINMAX);
		meanStdDev(patches["gray"], mean_, std_);
		Mat patch_ = (patches["gray"]-0);///std_.val[0];
		if(feat.empty()) patch_.reshape(1,1).copyTo(feat);
		else hconcat(patch_.reshape(1, 1), feat, feat);
	}
	if(patches.find("gradient") != patches.end()) {
		normalize(patches["gradient"], patches["gradient"], 1, 0, NORM_MINMAX);
		Scalar mean_, std_;
		meanStdDev(patches["gradient"], mean_, std_);
		Mat patch_ = (patches["gradient"]-0);///std_.val[0];
		if(feat.empty()) patch_.reshape(1,1).copyTo(feat);
		else hconcat(patch_.reshape(1, 1), feat, feat);
	}
	if(patches.find("normal") != patches.end()) {
		Scalar mean_, std_;
		meanStdDev(patches["normal"], mean_, std_);
		//Mat patch_ = (patches["normal"]-mean_.val);
		if(feat.empty()) patches["normal"].reshape(1,1).copyTo(feat);
		else hconcat(patches["normal"].reshape(1,1), feat, feat);
	}
	if(patches.find("depth") != patches.end()) {
		//if(feat.empty()) patches["normal"].reshape(1,1).copyTo(feat);
		//else 
		Scalar mean_, std_;
		//normalize(patches["depth"], patches["depth"], 1, 0, NORM_MINMAX);
		meanStdDev(patches["depth"], mean_, std_);
		Mat patch_ = (patches["depth"]-mean_.val[0]);///std_.val[0];
		hconcat(patch_.reshape(1,1), feat, feat);
	}

	return true;
}
Mat calculateImageSymmetryScore(const Mat& image) {
    Mat gray_img ;
    
    if (image.channels() == 3) {
        cvtColor(image, gray_img, CV_BGR2GRAY);
    } else {
        gray_img = image.clone();
    }
    
//    gray_img = magicEqualHist(gray_img);
//    imshow("histed",gray_img);
    if (gray_img.cols %2 == 1) {
        hconcat(gray_img, gray_img.col(gray_img.cols-1), gray_img);
    }
    
    gray_img.convertTo(gray_img, CV_32FC1);

    Mat score(gray_img.cols,gray_img.cols,CV_32FC1,Scalar::all(0));
    Mat temp(1,1,CV_32FC1), base(1,1,CV_32FC1),colsum(1,1,CV_32FC1);
    
    
    for (int i = 0 ; i < gray_img.cols/2; i++) {
        temp = gray_img.colRange(0, i+1).clone();
        flip(temp, temp, 1);
        base = gray_img.colRange(i+1, gray_img.cols).clone();
        base.colRange(0, temp.cols) += temp;
        reduce(base, colsum, 0, CV_REDUCE_SUM,CV_32FC1);
        colsum =  colsum.t();
        colsum.copyTo(score.colRange(i, i+1).rowRange(0, colsum.rows));
    }
    
    flip(score, score, 1);
    flip(gray_img, gray_img, 1);
    
    for (int i = 0 ; i < gray_img.cols/2; i++) {
        temp = gray_img.colRange(0, i+1).clone();
        flip(temp, temp, 1);
        base = gray_img.colRange(i+1, gray_img.cols).clone();
        base.colRange(0, temp.cols) += temp;
        reduce(base, colsum, 0, CV_REDUCE_SUM,CV_32FC1);
        colsum =  colsum.t();
        colsum.copyTo(score.colRange(i, i+1).rowRange(0, colsum.rows));
    }
    
    flip(score, score, 1);
    gray_img.release();
    base.release();
    temp.release();
    return score;
}
Exemple #13
0
void CircShift(Mat &x, Size k){
  int cx, cy;
  if (k.width < 0)
    cx = -k.width;
  else
    cx = x.cols - k.width;

  if (k.height < 0)
    cy = -k.height;
  else
    cy = x.rows - k.height;

  Mat q0(x, Rect(0, 0, cx, cy));   // Top-Left - Create a ROI per quadrant
  Mat q1(x, Rect(cx, 0, x.cols - cx, cy));  // Top-Right
  Mat q2(x, Rect(0, cy, cx, x.rows - cy));  // Bottom-Left
  Mat q3(x, Rect(cx, cy, x.cols - cx, x.rows - cy)); // Bottom-Right

  Mat tmp1, tmp2;                           // swap quadrants (Top-Left with Bottom-Right)
  hconcat(q3, q2, tmp1);
  hconcat(q1, q0, tmp2);
  vconcat(tmp1, tmp2, x);

}
Exemple #14
0
//Transforms::Transforms(Mat &R, Mat &T, double baseline) :_baseline(baseline) {
Transforms::Transforms(double baseline, string cam_extrinsics_filename, string cam_dartboard_extrinsics_filename) :_baseline(baseline) {
  // add baseline, configuration in a yaml file
  FileStorage cam_extrinsics(cam_dartboard_extrinsics_filename, FileStorage::READ);
  FileStorage cam_dartboard_extrinsics(cam_dartboard_extrinsics_filename, FileStorage::READ);

  Mat cR, cT, cdR, cdT;
  cam_extrinsics["R"] >> cR;
  cam_extrinsics["T"] >> cT;
  cam_dartboard_extrinsics["R"] >> cdR;
  cam_dartboard_extrinsics["T"] >> cdT;

  Mat scratch;
  double h[4] = {0,0,0,1};
  Mat H = Mat(1,4, CV_64FC1, &h);

  // Set up camera extrinsics
  hconcat(cR, cT, scratch);
  vconcat(scratch, H, _rightToLeftCamTransform);

  // Set up cam dartboard extrinsics
  hconcat(cdR, cdT, scratch);
  vconcat(scratch, H, _leftCamToDartboardTransform);
}
/* Step 0: Check square matrix 
 * check if CostMatrix is square, if not, we have to add dummy row/col with their
 * entries are 0
 */
void HungarianAlgorithm::step_zero(int* step){ 
	
//	showCostMatrix(step);
//	adding row
 	if(CostMatrix.cols > CostMatrix.rows){ 
		Mat ZeroRow = Mat(1,CostMatrix.cols,CV_64FC1) ; 
		vconcat(CostMatrix,ZeroRow,CostMatrix); 
	}
	if(CostMatrix.cols < CostMatrix.rows){ 
		Mat ZeroCol = Mat(CostMatrix.rows,1,CV_64FC1); 
		hconcat(CostMatrix,ZeroCol,CostMatrix); 
	}
	showCostMatrix(step);
	*step = 1; 	 

}
Exemple #16
0
void AAM::makeApearanceModel()
{
    for(int i=0; i<deltaS.rows; i++)
    {
        Mat shapeParams=deltaS.row(i)*this->w;
        Mat textureParams=b_g.row(i);
        Mat appearanceRow;
        shapeParams.convertTo(shapeParams, textureParams.type());
        //cout<<"shape: "<<shapeParams.type()<<"  "<<shapeParams.rows<<"  "<<shapeParams.dims<<endl;
        //cout<<"texture: "<<textureParams.type()<<"  "<<textureParams.rows<<"  "<<textureParams.dims<<endl;
        hconcat(shapeParams, textureParams, appearanceRow);
        this->appearance.push_back(appearanceRow);
    }
    cout<<"appearance: "<<this->appearance.row(0)<<endl;
    this->appearancePCA(appearance, Mat(), CV_PCA_DATA_AS_ROW);
    this->cSet=appearancePCA.project(appearance);
    cout<<"cSet: "<<cSet.row(0)<<endl;
}
Mat calculateImageSymmetryScore(const Mat& image) {
    Mat gray_img ;
    
    if (image.channels() == 3) {
        cvtColor(image, gray_img, CV_BGR2GRAY);
    } else {
        gray_img = image.clone();
    }
    
    if (gray_img.cols %2 == 1) {
        hconcat(gray_img, gray_img.col(gray_img.cols-1), gray_img);
    }
    gray_img.convertTo(gray_img, CV_32FC1);
    Mat score(gray_img.cols,gray_img.cols,CV_32FC1,Scalar::all(0));
    
    parallel_for_(Range(0,gray_img.cols), SymmetryScore_tbb(gray_img, score));
    
    return score;
}
Exemple #18
0
cv::Mat im2col_general(cv::Mat& InImg, vector<int>& blockSize, vector<int>& stepSize){
	assert(blockSize.size() == 2 && stepSize.size() == 2); 
	
	int channels = InImg.channels(); 
	vector<cv::Mat> layers;
	
	if(channels > 1)
		split(InImg, layers);
	else
		layers.push_back(InImg);
		
	cv::Mat AllBlocks = im2colstep(layers[0], blockSize, stepSize);
	
	for (int i = 1; i < layers.size(); ++i){ 
	 	hconcat(AllBlocks, im2colstep(layers[i], blockSize, stepSize), AllBlocks); 
	} 
	
	return AllBlocks.t();
}
Exemple #19
0
void getLBPplusHistFeatures(const Mat& image, Mat& features) {
  // TODO
  Mat grayImage;
  cvtColor(image, grayImage, CV_RGB2GRAY);

  Mat lbpimage;
  lbpimage = libfacerec::olbp(grayImage);
  Mat lbp_hist = libfacerec::spatial_histogram(lbpimage, 64, 8, 4);
  //features = lbp_hist.reshape(1, 1);

  Mat greyImage;
  cvtColor(image, greyImage, CV_RGB2GRAY);
  //Mat src_hsv;
  //cvtColor(image, src_hsv, CV_BGR2HSV);
  //std::vector<cv::Mat> hsvSplit;
  //split(src_hsv, hsvSplit);

  /*std::vector<cv::Mat> bgrSplit;
  split(image, bgrSplit);*/

  //grayImage = histeq(grayImage);
  Mat img_threshold;
  threshold(greyImage, img_threshold, 0, 255,
    CV_THRESH_OTSU + CV_THRESH_BINARY);
  Mat histomFeatures = getHistogram(img_threshold);

  /*Mat img_threshold2;
  threshold(bgrSplit[1], img_threshold2, 0, 255,
  CV_THRESH_OTSU + CV_THRESH_BINARY);
  Mat greenHistomFeatures = getTheFeatures(img_threshold2);

  Mat histomFeatures;
  hconcat(blueHistomFeatures.reshape(1, 1), greenHistomFeatures.reshape(1, 1), histomFeatures);*/

  //Mat histomFeatures = getTheColorFeatures(greyImage);

  //features.push_back(histomFeatures.reshape(1, 1));

  hconcat(lbp_hist.reshape(1, 1), histomFeatures.reshape(1, 1), features);
  //std::cout << features << std::endl;
  //features = histomFeatures;
}
Exemple #20
0
void getLBPplusHistFeatures(const Mat& image, Mat& features) {
  Mat grayImage;
  cvtColor(image, grayImage, CV_RGB2GRAY);

  Mat lbpimage;
  lbpimage = libfacerec::olbp(grayImage);
  Mat lbp_hist = libfacerec::spatial_histogram(lbpimage, 64, 8, 4);
  //features = lbp_hist.reshape(1, 1);

  Mat greyImage;
  cvtColor(image, greyImage, CV_RGB2GRAY);

  //grayImage = histeq(grayImage);
  Mat img_threshold;
  threshold(greyImage, img_threshold, 0, 255,
    CV_THRESH_OTSU + CV_THRESH_BINARY);
  Mat histomFeatures = getHistogram(img_threshold);

  hconcat(lbp_hist.reshape(1, 1), histomFeatures.reshape(1, 1), features);
  //std::cout << features << std::endl;
  //features = histomFeatures;
}
void triangulate_init( cv::Mat R, cv::Mat t,const std::vector< cv::Point2f > points_1,
        const std::vector< cv::Point2f > points_2,cv::Mat& points4D,std::vector< cv::Point2f > mask3D[]){

    cv::Mat projMatr(3,4,CV_32F);

    cv::Mat Rt(3,4,CV_32F);  // Rt = [R | t]
    R.convertTo(R,CV_32F);
    t.convertTo(t,CV_32F);
    hconcat(R, t, Rt);  // Rt concatenate
    projMatr = camIntrinsic * Rt;

    points4D=cv::Mat_<float>(4,points_1.size());
    cv::triangulatePoints(projMatr0, projMatr, points_1, points_2, points4D);
    cv::Point2f x(-1,-1);
    for(int i = 0;i<points4D.cols;i++){
        mask3D[0].push_back(points_1[i]);
        mask3D[1].push_back(points_2[i]);
        for (int j=2;j<m;j++)
            mask3D[j].push_back(x);
    }

}
Exemple #22
0
void
P_From_KRt(const Matx33d &K, const Matx33d &R, const Vec3d &t, Matx34d &P)
{
    hconcat( K*R, K*t, P );
}
void findEyeCenterByColorSegmentation(const Mat& image, Point2f & eyeCoord, float coordinateWeight, int kmeansIterations, int kmeansRepeats, int blurSize)  {
    
    Mat img, gray_img;
    Mat colorpoints, kmeansPoints;
    
    img = equalizeImage(image);
    
    medianBlur(img, img, blurSize);
    cvtColor(image, gray_img, CV_BGR2GRAY);
    gray_img = imcomplement(gray_img);
    vector<Mat> layers(3);
    split(img, layers);
    for (int i = 0 ; i < layers.size(); i++) {
        layers[i] = layers[i].reshape(1,1).t();
    }
    hconcat(layers, colorpoints);
    
    // add coordinates
    colorpoints.convertTo(colorpoints, CV_32FC1);
    Mat coordinates = matrixPointCoordinates(img.rows,img.cols,false) *coordinateWeight;
    hconcat(colorpoints, coordinates, kmeansPoints);
    
    Mat locIndex(img.size().area(),kmeansIterations,CV_32FC1,Scalar::all(-1));
    linspace(0, img.size().area(), 1).copyTo(locIndex.col(0));
    Mat index_img(img.rows,img.cols,CV_32FC1,Scalar::all(0));
    Mat bestLabels, centers, clustered , colorsum , minColorPtIndex;
    for(int it = 1 ; it < kmeansIterations ; it++) {
        if (kmeansPoints.rows < 2) {
            break;
        }
        kmeans(kmeansPoints,2,bestLabels,TermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, kmeansRepeats, 0.001),kmeansRepeats,KMEANS_PP_CENTERS,centers);
        reduce(centers.colRange(0, 3), colorsum, 1, CV_REDUCE_SUM);
        
        if (colorsum.at<float>(0) < colorsum.at<float>(1)) {
            
            findNonZero(bestLabels==0, minColorPtIndex);
        }
        else {
            findNonZero(bestLabels==1, minColorPtIndex);
        }
        
        minColorPtIndex = minColorPtIndex.reshape(1).col(1);
        
        for (int  i = 0; i <minColorPtIndex.rows ; i ++) {
            locIndex.at<float>(i,it) = locIndex.at<float>(minColorPtIndex.at<int>(i),it-1);
        }
        Mat temp;
        for (int  i = 0; i <minColorPtIndex.rows ; i ++) {
            temp.push_back(kmeansPoints.row(minColorPtIndex.at<int>(i)));
        }
        temp.copyTo(kmeansPoints);
        temp.release();
        for (int i = 0 ; i < minColorPtIndex.rows ; i ++) {
            int r, c;
            ind2sub(locIndex.at<float>(i,it), index_img.cols, index_img.rows, r, c);
            index_img.at<float>(r,c) +=1;
        }
    }
    //    imagesc("layered",mat2gray(index_img));
    Mat layerweighted_img = index_img.mul(index_img);
    layerweighted_img = mat2gray(layerweighted_img);
    gray_img.convertTo(gray_img, CV_32FC1,1/255.0);
    Mat composed  = gray_img.mul(layerweighted_img);
    float zoomRatio = 5.0f;
    Mat zoomed;
    imresize(composed, zoomRatio, zoomed);
    Mat score = calculateImageSymmetryScore(zoomed);
//    imagesc("score", score);
    Mat scoresum;
    reduce(score.rowRange(0, zoomed.cols/6), scoresum, 0, CV_REDUCE_SUM,CV_32FC1);
//    plotVectors("scoresum", scoresum.t());
    double minVal , maxVal;
    Point minLoc, maxLoc;
    minMaxLoc(scoresum,&minVal,&maxVal,&minLoc,&maxLoc);
    float initialHC = (float)maxLoc.x/zoomRatio;
    line(zoomed, Point(maxLoc.x,0), Point(maxLoc.x,zoomed.rows-1), Scalar::all(255));
//    imshow("zoomed", zoomed);
    int bestx = 0,bestlayer = 0;
    Mat bestIndex_img = index_img >=1;
    minMaxLoc(index_img,&minVal,&maxVal,&minLoc,&maxLoc);
    for (int i = 1 ; i<=maxVal; i++) {
        Mat indexlayer_img = index_img >=i;
        medianBlur(indexlayer_img, indexlayer_img, 5);
        erode(indexlayer_img, indexlayer_img, blurSize);
        erode(indexlayer_img, indexlayer_img, blurSize);
        indexlayer_img = removeSmallBlobs(indexlayer_img);
        
        indexlayer_img = fillHoleInBinary(indexlayer_img);
        indexlayer_img = fillConvexHulls(indexlayer_img);
        Mat score = calculateImageSymmetryScore(indexlayer_img);
        Mat scoresum;
        reduce(score.rowRange(0, indexlayer_img.cols/6), scoresum, 0, CV_REDUCE_SUM,CV_32FC1);
        minMaxLoc(scoresum,&minVal,&maxVal,&minLoc,&maxLoc);
        if (abs(maxLoc.x - initialHC) < abs(bestx - initialHC)) {
            
            if (sum(indexlayer_img)[0]/255 < indexlayer_img.size().area()/5*2 &&
                sum(indexlayer_img)[0]/255 > indexlayer_img.size().area()/6) {
                bestx = maxLoc.x;
                bestlayer = i;
                bestIndex_img = indexlayer_img.clone();
                
            }
            
        }
    }
    
    Point2f massCenter = findMassCenter_BinaryBiggestBlob(bestIndex_img);
    
    
    eyeCoord =  Point2f(initialHC,massCenter.y);
}
Exemple #24
0
int main()
{
//#ifdef _DEBUG
//	int iOrg = _CrtSetDbgFlag(_CRTDBG_REPORT_FLAG);	
//	_CrtSetDbgFlag(iOrg | _CRTDBG_LEAK_CHECK_DF);
//#endif
	
	cout << "tmat demo program" << endl;

	// build two 16 element double arrays
	double aa[] = { 1, 2, 3, 4,
				    4, 1, 2, 3,
					3, 4, 1, 2,
					2, 3, 4, 1 };

	double bb[] = { 3, 4, 5, 6,
		            6, 3, 4, 5,
					5, 6, 3, 4,
					4, 5, 6, 3 };

	// instantiate arrays as 4 x 4 matrices
	tmat<double> A(4,4,aa); cout << A << endl;
	tmat<double> B(4,4,bb); cout << B << endl;

	// check determinants
	cout << "det(A) = " << det(A) << endl;
	cout << "det(B) = " << det(B) << endl << endl;

	// exercise the matrix operators
	tmat<double> C;
	C = A + B; cout << "A + B =" << endl << C << endl;
	C = A - B; cout << "A - B =" << endl << C << endl;
	C = A * B; cout << "A * B =" << endl << C << endl;

	// exercise unary operators
	C = transpose(A); cout << "transpose(A) =" << endl << C << endl;
	C = inv(A); cout << "inv(A) =" << endl << C << endl;

	// check error flag
	C = hconcat(A, B); cout << "A | B =" << endl << C << endl;
	C = vconcat(A, B); cout << "A , B =" << endl << C << endl;
//	C = A + C; // error here
	delrow(C,5);cout << "A , B row=" << endl << C << endl;

	// string matrices
	tmat<string> S(3,3);  
	S(1,1) = "an";  S(1,2) = "to";  S(1,3) = "if";
	S(2,1) = "or";	S(2,2) = "no";	S(2,3) = "on";
	S(3,1) = "be";	S(3,2) = "am";  S(3,3) = "as";
	cout << S << endl;

	S = transpose(S); cout << S << endl;

	// integer matrices
	tmat<int> H(3,3);
	H(1,1) = 1;  H(1,2) = 0; H(1,3) = 1;
	H(2,1) = 0;  H(2,2) = 1; H(2,3) = 1;
	H(3,1) = 1;  H(3,2) = 0; H(3,3) = 0;
	cout << H << endl;

	H = bin_add(H, transpose(H));  cout << H << endl;
		


	cout << "end program" << endl;

	return 0;

}// main()
Exemple #25
0
vector<Point> AAM::findPoints(Mat image)
{
    float k=10;
    int i=0;
    Mat convHull = this->createConvexHull(this->meanPoints);
    Mat points;
    cout<<"mean shape"<<this->meanShape<<endl;
    this->meanShape.copyTo(points);
    cout<<"initial shape"<<points<<endl;
    Mat deltaG;
    double normE0;
    Mat c;
    do
    {
        cout<<"iteration "<<i<<endl;
        cout<<"start Points: "<<points<<endl;
        k=1;
        //this->drawPoints(image, points, QString::number(i).toStdString());
        Mat textureSampled=this->getTetureInShape(image, points);
        textureSampled.convertTo(textureSampled, CV_32S);
        this->meanTexture.convertTo(meanTexture, CV_32S);
        Mat deltaT = textureSampled - this->meanTexture;
        points.convertTo(points, meanShape.type());
        Mat deltaPoint = points-meanShape;
       // cout<<"deltaPoint: "<<deltaPoint<<endl;
        deltaPoint=deltaPoint*this->w;
       // cout<<"deltaPoint: "<<deltaPoint<<endl;
        Mat b=this->textureVariationPCA.project(deltaT);
       // cout<<"b for texture: "<<b<<endl;
        Mat ap;
        deltaPoint.convertTo(deltaPoint, b.type());
        hconcat(deltaPoint, b, ap);
       // cout<<"appearance: "<<ap<<endl;
        c= this->appearancePCA.project(ap);
       // cout<<"c: "<<c<<endl;
        Mat modelAp=this->appearancePCA.backProject(c);
       // cout<<"model appearance: "<<modelAp<<endl;
        CvMat bModelMat;
        CvMat modelApMat=modelAp;
        cvGetCols(&modelApMat, &bModelMat, this->shapeSet.cols, this->shapeSet.cols + this->b_g.cols);
        Mat bModel=cvarrToMat(&bModelMat);
        bModel.convertTo(bModel, CV_32F);
       // cout<<"b model"<<bModel<<endl;
        Mat deltaGModel=this->textureVariationPCA.backProject(bModel);
       // cout<<"delta g model: "<<deltaGModel<<endl;
        deltaGModel.convertTo(deltaGModel, this->meanTexture.type());
        Mat textureModel=this->meanTexture + deltaGModel;
        textureModel.convertTo(textureModel, CV_8U);
        textureModel.convertTo(textureModel, CV_32S);
       // cout<<"texture model: "<<textureModel<<endl;
        Mat modelShape=this->getShapeParamsFromC(c);
        modelShape.convertTo(modelShape, this->meanShape.type());
        modelShape=meanShape + modelShape;
        modelShape=this->cutToRange(modelShape, 0, this->ImageHeight);
       // cout<<"model shape: "<<modelShape<<endl;
        Mat realTexture=this->getTetureInShape(image, modelShape);
        realTexture.convertTo(realTexture, textureModel.type());
       // cout<<"real texture"<<realTexture<<endl;
        Mat deltaI=realTexture-textureModel;
       // cout<<"deltaI: "<<deltaI<<endl;
        deltaI.convertTo(deltaI, CV_32F);
        Mat deltaC=deltaI*this->A;
        cout<<"delta c"<<deltaC<<endl;
        c.convertTo(c, deltaC.type());
        normE0=this->euqlidesNorm(deltaI);
        double normE=normE0;
        Mat newC;
        while(normE0<=normE && k>0.0001) //sprawdzić, czy deltaG jest mniejsze niż deltaG0
        {
           // cout<<"k "<<k<<endl;
            newC=c+k*deltaC;
            cout<<"newc: "<<newC<<endl;
            Mat shapeParams=this->getShapeParamsFromC(newC);
            Mat textureParams=this->getTextureParamsFromC(newC);
            Mat textureDelta=this->textureVariationPCA.backProject(textureParams);
            textureDelta.convertTo(textureDelta, meanTexture.type());
            Mat texture=meanTexture + textureDelta;
            shapeParams.convertTo(shapeParams, meanShape.type());
            points=meanShape + shapeParams;
            points=this->cutToRange(points, 0, this->ImageHeight);
            Mat realTexture=this->getTetureInShape(image, points);
            texture.convertTo(texture, realTexture.type());
            deltaI=realTexture - texture;
            normE=this->euqlidesNorm(deltaI);
            k=k/2;
        }
        i++;
        newC.copyTo(c);
        normE0=normE;
        cout<<"points: "<<points<<endl;
        cout<<"error: "<<normE0<<endl;
    }
    while(i<30); // sprwdzić, czy błąd deltaG jest mniejszy od maxymalnego błędu
    this->drawPoints(image, points, "final points");
    cout<<"final points: "<<points<<endl;
    cout<<"mean:         "<<this->meanShape<<endl;
    return this->convertMatToPoints(points);
}
Exemple #26
0
void AAM::countA()
{
    Mat deltaC;
    Mat deltaI;
    for(int i=0; i<this->shapeSet.rows; i++)
    {

        for(int j=0; j<100; j++)
        {
            Mat ds=randomPointsMat(40, this->shapeSet.cols);
            cout<<"random ds: "<<ds<<endl;
            ds.convertTo(ds, this->shapeSet.type());
            Mat newShape=ds+this->shapeSet.row(i);
            newShape.convertTo(newShape, meanShape.type());
            Mat newShapeParams=newShape-meanShape;
            newShapeParams=newShapeParams*this->w;
            Mat image_grey;
            cvtColor(this->images[i], image_grey, COLOR_BGR2GRAY);
            Mat newTexture=this->getTetureInShape(image_grey, newShape);
            newTexture.convertTo(newTexture, meanTexture.type());
            Mat newDeltaTexture=newTexture - meanTexture;
            Mat newTextureParams=this->textureVariationPCA.project(newDeltaTexture);
            Mat newAp;
            newShapeParams.convertTo(newShapeParams, newTextureParams.type());
            hconcat(newShapeParams, newTextureParams, newAp);
            cout<<"new appearance: " <<newAp<<endl;
            Mat c=this->appearancePCA.project(newAp);
            cout<<"new c: "<<c<<endl;
            c.convertTo(c, cSet.type());
            Mat dc=this->cSet.row(i)-c;
            cout<<"dc: "<<dc<<endl;

            Mat ap = this->appearancePCA.backProject(c);
            CvMat apMat=ap;
            cout<<"appearance: "<<ap<<endl;
            CvMat shapeParams;
            cvGetCols(&apMat, &shapeParams, 0, shapeSet.cols);
            cout<<"shape: "<<endl;
            Mat shape=cvarrToMat(&shapeParams);
            cout<<"shapeParams: "<<shape<<endl;
            shape=shape/this->w;
            cout<<"shapeParams scaled: "<<shape<<endl;
            CvMat textureParams;
            cvGetCols(&apMat, &textureParams, shapeSet.cols, shapeSet.cols+this->b_g.cols);
            Mat texture=cvarrToMat(&textureParams);
            cout<<"texture Params: "<<texture<<endl;
            shape.convertTo(shape, meanShape.type());
            Mat modelShape=this->meanShape + shape;
            modelShape=this->cutToRange(modelShape, 0, this->ImageHeight);
            cout<<"modelShape: "<<modelShape<<endl;
            Mat backProjectShape=this->textureVariationPCA.backProject(texture);
            backProjectShape.convertTo(backProjectShape, meanTexture.type());
            Mat modelTexture=this->meanTexture + backProjectShape;
            modelTexture.convertTo(modelTexture, CV_8U);
            modelTexture.convertTo(modelTexture, CV_32S);
            //cout<<"modelTexture: "<<modelTexture<<endl;
            Mat warped=this->warpImage(this->images[i], modelShape);
            cvtColor(warped, image_grey, COLOR_BGR2GRAY);
            Mat convHull = this->createConvexHull(this->meanPoints);
            Mat realTexture=this->getTextureInsideHull(image_grey, convHull);
            Mat realTextureSampled=this->sampleMat(realTexture, sampling);
            normalize(realTextureSampled, realTextureSampled, 0, 255, NORM_MINMAX);
            cout<<realTexture.cols<<" "<<realTextureSampled.cols<<"  "<<modelTexture.cols<<endl;
            realTextureSampled.convertTo(realTextureSampled, modelTexture.type());
            Mat dI=realTextureSampled-modelTexture;
            deltaC.push_back(dc);
            deltaI.push_back(dI);
        }
    }
    cout<<"deltaI : " <<deltaI<<endl;
    cout<<"deltaC: "<<deltaC<<endl;
    this->A=this->findLinearAproximation(deltaI, deltaC);
    Mat deltaItest;
    deltaI.row(0).convertTo(deltaItest, A.type());
    cout<<"delta I row: "<<deltaItest.row(0)<<endl;
    cout<<"delta c row: "<<deltaC.row(0)<<endl;
    //cout<<this->A.type()<<endl;
    //cout<<deltaI.type()<<endl;
    Mat result=deltaItest.row(0)*A;
    cout<<result<<endl;
}
void add_Points( cv::Mat R[],cv::Mat t[],const std::vector< cv::Point2f > points_1[],
        const std::vector< cv::Point2f > points_2[],cv::Mat& points3D,const int add,std::vector< cv::Point2f > mask3D[],cv::Mat& img_matches){

    // obtain mask on new matches by two-view constraint
    cv::Mat mask;
    findEssentialMat(points_1[add-1], points_2[add-1], camIntrinsic, cv::RANSAC, 0.999, 0.25, mask);
    std::vector< cv::Point2f > pointsx,pointsComparex;
    for(int i=0;i<points_1[add-1].size();i++){
        if (mask.at<uchar>(i,0) != 0){
            pointsx.push_back(points_1[add-1][i]);
            pointsComparex.push_back(points_2[add-1][i]);
        }
    }
    std::cout<<"mask result: "<<points_1[add-1].size()<<"	"<<pointsx.size()<<std::endl;

    
    // draw red circles on new points, blue circles on points masked out
    for (int j=0; j<points_1[add-1].size();j++){
        if (mask.at<uchar>(j,0) != 0)
            circle(img_matches, points_1[add-1][j], 10, cv::Scalar(0,0,255), 3);
        else
            circle(img_matches, points_1[add-1][j], 10, cv::Scalar(255,0,0), 3);
    }
    cv::namedWindow("Matches", CV_WINDOW_NORMAL);
    cv::imshow("Matches", img_matches);
    cv::waitKey();
       
    // form projection matrix of the 2nd last camera
    cv::Mat projMatr1(3,4,CV_32F);
    cv::Mat Rt1(3,4,CV_32F);  // Rt = [R | t]
    R[add-1].convertTo(R[add-1],CV_32F);
    t[add-1].convertTo(t[add-1],CV_32F);
    hconcat(R[add-1], t[add-1], Rt1);  // Rt concatenate
    projMatr1 = camIntrinsic * Rt1;
    // form projection matrix of the last camera
    cv::Mat projMatr2(3,4,CV_32F);
    cv::Mat Rt(3,4,CV_32F);  // Rt = [R | t]
    R[add].convertTo(R[add],CV_32F);
    t[add].convertTo(t[add],CV_32F);
    hconcat(R[add], t[add], Rt);  // Rt concatenate
    projMatr2 = camIntrinsic * Rt;

    // triangulation on the masked matches
    cv::Mat points4Dtemp=cv::Mat_<float>(4,points_1[add-1].size());
    cv::triangulatePoints(projMatr1, projMatr2, pointsx, pointsComparex, points4Dtemp);
    
    // fill in new parts of mask3D with 2D points
    cv::Point2f x(-1,-1);
    for(int i = 0;i<points4Dtemp.cols;i++){
        for (int j=0;j<m;j++)
        {
            if (j == (add-1))
                mask3D[j].push_back(pointsx[i]);
            else if (j == add)
                mask3D[j].push_back(pointsComparex[i]);
            else
                mask3D[j].push_back(x);
        }
    }

    cv::Mat points3Dtemp(3,pointsx.size(),CV_32F);
    for (int i=0; i<pointsx.size(); i++)
    {
        float x = points4Dtemp.at<float>(3,i);
        points3Dtemp.at<float>(0,i) = points4Dtemp.at<float>(0,i) / x;
        points3Dtemp.at<float>(1,i) = points4Dtemp.at<float>(1,i) / x;
        points3Dtemp.at<float>(2,i) = points4Dtemp.at<float>(2,i) / x;
    }
    hconcat(points3D,points3Dtemp,points3D);
    n = points3D.cols;
}
void PnP(const std::vector< cv::DMatch > good_matches[],const int add,const std::vector<cv::KeyPoint> keypoints[],
        cv::Mat R[], cv::Mat t[],std::vector< cv::Point2f > points_1[], std::vector< cv::Point2f > points_2[],std::vector< cv::Point2f > mask3D[],cv::Mat& img_matches){

    std::vector<int> indicator(5,-1);
    std::vector<cv::Point2f> points1,points2,points3;

    // image 1 & 2
    // write in indicator for last 2 images
    for(int i = 0; i < good_matches[add-2].size();i++){
        if(good_matches[add-2][i].trainIdx>=indicator.size()){
            indicator.resize(good_matches[add-2][i].trainIdx+1,-1);
            indicator[good_matches[add-2][i].trainIdx] = i;
        }
        else{
            indicator[good_matches[add-2][i].trainIdx] = i;
        }
    }

    // images 2 & 3
    std::cout<< "number of matches between "<<add+1<<" pic and last pic: "<<points_1[add-1].size()<<std::endl;   // how many matches initially

    // assign points1, points2, points3; find common matches and update mask3d and points_2
    for(int i = 0; i < good_matches[add-1].size();i++){

        if(good_matches[add-1][i].queryIdx<indicator.size()){
            int ind = good_matches[add-1][i].queryIdx;

            if(indicator[ind]!=-1){

                cv::Point2f temppoint = keypoints[add-2][good_matches[add-2][indicator[ind]].queryIdx].pt ;
                points1.push_back(temppoint);

                temppoint =keypoints[add-1][good_matches[add-2][indicator[ind]].trainIdx].pt;
                std::vector<cv::Point2f>::iterator p = std::find(points_1[add-1].begin(), points_1[add-1].end(), temppoint);
                std::vector<cv::Point2f>::iterator _p = std::find(mask3D[add-1].begin(), mask3D[add-1].end(), temppoint);
                points2.push_back(temppoint);

                temppoint = keypoints[add][good_matches[add-1][i].trainIdx].pt;
                std::vector<cv::Point2f>::iterator t = std::find(points_2[add-1].begin(), points_2[add-1].end(), temppoint);
                points3.push_back(temppoint);

                // update mask3d info 3rd cam
                if(_p!=mask3D[add-1].end()){
                    int nPosition = distance (mask3D[add-1].begin(), _p);
                    mask3D[add][nPosition] = temppoint;
                }

                // delete common matches across 3 pics from points_1 and points_2
                if(p!=points_1[add-1].end()){
                    int nPosition = distance (points_1[add-1].begin(), p);
                    points_1[add-1].erase(p);
                    points_2[add-1].erase(points_2[add-1].begin()+nPosition);
                }
                //if(t!=points_2[add-1].end()){
                //	points_2[add-1].erase(t);
                //}
            }
        }
    }


/*
    for (int j=0; j<points_1[add-1].size();j++){

        circle(img_matches, points_1[add-1][j], 10, cv::Scalar(0,0,255), 3);
    }

    cv::namedWindow("Matches", CV_WINDOW_NORMAL);
    cv::imshow("Matches", img_matches);
    cv::waitKey();
*/



    std::cout<<"number of common matches found across 3 pics: "<<points1.size()<<std::endl;
    std::cout<<"number of matches between "<<add+1<<" pic and last pic after deduction: "<<points_1[add-1].size()<<"	"<<points_2[add-1].size()<<std::endl;

    // reconstruct 3d points of common matches from last pics for pnp

    cv::Mat projMatr1(3,4,CV_32F);
    if(add==2){
        projMatr1=projMatr0;
    }
    else{
        cv::Mat Rt1(3,4,CV_32F);  // Rt = [R | t]
        R[add-2].convertTo(R[add-2],CV_32F);
        t[add-2].convertTo(t[add-2],CV_32F);
        hconcat(R[add-2], t[add-2], Rt1);  // Rt concatenate

        projMatr1 = camIntrinsic * Rt1;
    }
    cv::Mat projMatr2(3,4,CV_32F);
    cv::Mat Rt(3,4,CV_32F);  // Rt = [R | t]
    R[add-1].convertTo(R[add-1],CV_32F);
    t[add-1].convertTo(t[add-1],CV_32F);
    hconcat(R[add-1], t[add-1], Rt);  // Rt concatenate
    projMatr2 = camIntrinsic * Rt;
    //	        std::cout<<Rt<<std::endl;

    cv::Mat points4Dtemp=cv::Mat_<float>(4,points1.size());

    /*
       cv::Mat mask1,mask2;
       cv::findEssentialMat(points1, points2, camIntrinsic, cv::RANSAC, 0.999, 1.0, mask1);
       std::vector< cv::Point2f > points1x, points2x, points3x;
       for(int i=0;i<points1.size();i++){
       if (mask1.at<uchar>(i,0) != 0){
       points1x.push_back(points1[i]);
       points2x.push_back(points2[i]);
       points3x.push_back(points3[i]);
       }
       }

       cv::findEssentialMat(points2x, points3x, camIntrinsic, cv::RANSAC, 0.999, 1.0, mask2);
       std::vector< cv::Point2f > points1xx, points2xx, points3xx;
       for(int i=0;i<points1x.size();i++){
       if (mask2.at<uchar>(i,0) != 0){
       points1xx.push_back(points1x[i]);
       points2xx.push_back(points2x[i]);
       points3xx.push_back(points3x[i]);
       }
       }

       std::cout<<"number of common matches after ransac: "<<points1xx.size()<<std::endl;
       */


    cv::triangulatePoints(projMatr1, projMatr2, points1, points2, points4Dtemp);
    std::vector<cv::Point3f> points3Dtemp;
    for (int i=0; i < points1.size(); i++)
    {
        float x = points4Dtemp.at<float>(3,i);
        cv::Point3f p;
        p.x = points4Dtemp.at<float>(0,i) / x;
        p.y = points4Dtemp.at<float>(1,i) / x;
        p.z = points4Dtemp.at<float>(2,i) / x;
        points3Dtemp.push_back(p);
    }

    //std::cout<<points3Dtemp.rows<<"		"<<std::endl;
    //std::cout<<"6666666666666666666666666666666"<<std::endl;

    // PnP to get R and t of current pic

    cv::Mat Rv;
    cv::Mat inlier;
    solvePnPRansac(points3Dtemp,points3,camIntrinsic,dist,Rv,t[add],false,3,5,0.99,inlier,CV_ITERATIVE);
    int n_inl = countNonZero(inlier);
    std::cout<<"number of inliers in PnP: "<<n_inl<<std::endl;

    Rodrigues(Rv,R[add]);

    //std::cout<<"5555555555555555555555555555555"<<std::endl;
    //	        std::cout<<"R after pnp: "<<R[add]<<std::endl;
    //	        std::cout<<"t after pnp: "<<t[add-1]<<std::endl;

}
Exemple #29
0
Hashing_Result* HashingHist(PCANet* PcaNet, vector<int>& ImgIdx, vector<cv::Mat>& Imgs){
	Hashing_Result* ha_result = new Hashing_Result;

	int length = Imgs.size();
	int NumFilters =  PcaNet->NumFilters[PcaNet->NumStages - 1];
	int NumImgin0 = length / NumFilters;

	cv::Mat T;
	int row = Imgs[0].rows;
	int col = Imgs[0].cols;
	int depth = Imgs[0].depth();

	vector<double> map_weights;
	cv::Mat temp;
	for(int i=NumFilters - 1; i>=0; i--)
		map_weights.push_back(pow(2.0, (double)i));

	vector<int> Ro_BlockSize;
	double rate = 1 - PcaNet->BlkOverLapRatio;
	for(int i=0 ;i<PcaNet->HistBlockSize.size(); i++)
		Ro_BlockSize.push_back(round(PcaNet->HistBlockSize[i] * rate));
	
	
	cv::Mat BHist;

	int ImgIdx_length = ImgIdx.size();
	int* new_idx = new int[ImgIdx_length];
	for(int i=0; i<ImgIdx_length; i++)
		new_idx[ImgIdx[i]] = i;

	for(int i=0; i<NumImgin0; i++){
		T = cv::Mat::zeros(row, col, depth);	


		for(int j=0; j<NumFilters; j++){
			temp = Heaviside(Imgs[NumFilters * new_idx[i] + j]);
			temp = temp * map_weights[j];
			T = T + temp;
		}
		
		temp = im2col_general(T, PcaNet->HistBlockSize, Ro_BlockSize); 
		temp = Hist(temp, (int)(pow(2.0, NumFilters)) - 1);

		temp = bsxfun_times(temp, NumFilters);

		if(i == 0) BHist = temp;
		else hconcat(BHist, temp, BHist);
	}
	
	int rows = BHist.rows;
	int cols = BHist.cols;

	ha_result->Features.create(1, rows * cols, BHist.type()); 

	double *p_Fe = ha_result->Features.ptr<double>(0);
	double *p_Hi;
	for(int i=0; i<rows; i++){
		p_Hi = BHist.ptr<double>(i);
		for(int j=0; j<cols; j++){
			p_Fe[j * rows + i] = p_Hi[j];
		}
	}
	return ha_result;
}
Exemple #30
0
cv::Mat deblurPatch(const cv::Mat &_patch, cv::Point2f one, cv::Point2f two) {
    ////std::cout << "Start Deblurring" << one << std::endl << two << std::endl;
    cv::Mat blurredWindow = _patch.clone();
    cv::Mat patch = _patch.clone();
    patch.convertTo(patch, cv::DataType<double>::type);
    cv::Mat kernel = evaluateKernel(one,two);
//    //std::cout << " kernel" << kernel << std::endl;
	cv::Mat kernel_hat = kernel.clone(), result = patch.clone();
	cv::Mat est_conv = result.clone(), relative_blur = patch.clone(), error_est = patch.clone();
	cv::Mat patch_hat = patch.clone(), k_est_conv = kernel.clone(), k_relative_blur = kernel.clone(), k_error_est = kernel.clone();

	int rows = kernel.rows, cols = kernel.cols, maxIter = 3;
	int pRows = patch.rows, pCols = patch.cols;
	cv::Point2f anchor = cv::Point( -1, -1 );
	int delta = 0, ddepth = -1;

    double eps = DBL_MIN;
	for(int l=0; l<maxIter; l++) {

		//Improvement
		for(int i=0; i<pRows; i++)
			for(int j=0; j<pCols; j++)
				patch_hat.at<double>(i,j) = patch.at<double>(pRows-i-1,pCols-j-1);
		filter2D(kernel, k_est_conv, ddepth, patch, anchor, delta, cv::BORDER_DEFAULT);
		for(int i=0; i<rows; i++)
			for(int j=0; j<cols; j++) {
                //if(est_conv.at<double>(i,j) < eps)
				//	est_conv.at<double>(i,j) = 10*eps;
				k_relative_blur.at<double>(i,j) = kernel.at<double>(i,j) / k_est_conv.at<double>(i,j);
            }
		filter2D(k_relative_blur, k_error_est, ddepth, patch_hat);
		for(int i=0; i<rows; i++)
			for(int j=0; j<cols; j++) {
				kernel.at<double>(i,j) = kernel.at<double>(i,j) * k_error_est.at<double>(i,j);
            }

		// Classical LR
		for(int i=0; i<rows; i++)
			for(int j=0; j<cols; j++)
				kernel_hat.at<double>(i,j) = kernel.at<double>(rows-i-1,cols-j-1);
		filter2D(result, est_conv, ddepth, kernel);
		for(int i=0; i<pRows; i++)
			for(int j=0; j<pCols; j++) {
                //if(est_conv.at<double>(i,j) < eps)
				//	est_conv.at<double>(i,j) = 10*eps;
				relative_blur.at<double>(i,j) = patch.at<double>(i,j) / est_conv.at<double>(i,j);

            }
		filter2D(relative_blur, error_est, ddepth, kernel_hat, anchor, delta, cv::BORDER_DEFAULT);
		for(int i=0; i<pRows; i++)
			for(int j=0; j<pCols; j++) {
				result.at<double>(i,j) = result.at<double>(i,j) * error_est.at<double>(i,j);
            }
	}



    result.convertTo(result, CV_8U);

    hconcat(blurredWindow, result, blurredWindow);
    imshow("Deblurring", blurredWindow);


    //std::cout << "Stop Deblurring" << std::endl;

	return result;
}