int HrPwWindow::fit(QVector<double> &Xi,QVector<double> &Yi,int n) { QVector<double> r(5),lnXi(100),lnYi(100),logXi(100),logYi(100),invXi(100); //Linear correlation r[0]=val_abs(corr(Xi,Yi,n)); //Exponential correlation lnarray(Yi,lnYi,n); r[1]=val_abs(corr(Xi,lnYi,n)); //Power correlation logarray(Xi,logXi,n); logarray(Yi,logYi,n); r[2]=val_abs(corr(logXi,logYi,n)); //Inverse correlation invarray(Xi,invXi,n); r[3]=val_abs(corr(invXi,Yi,n)); //Logarithmic correlation lnarray(Xi,lnXi,n); r[4]=val_abs(corr(lnXi,Yi,n)); //Best fit test return rmax(r); }
int Statistic::ajustement(QVector<double> &Xi,QVector<double> &Yi,int n) { QVector<double> r(5),lnXi(100),lnYi(100),logXi(100),logYi(100),invXi(100); //corrélation pour linéaire r[0]=val_abs(corr(Xi,Yi,n)); //corrélation pour exponetielle lntab(Yi,lnYi,n); r[1]=val_abs(corr(Xi,lnYi,n)); //corrélation pour puissance logtab(Xi,logXi,n); logtab(Yi,logYi,n); r[2]=val_abs(corr(logXi,logYi,n)); //corrélation pour inverse invtab(Xi,invXi,n); r[3]=val_abs(corr(invXi,Yi,n)); //corrélation pour logarithmique lntab(Xi,lnXi,n); r[4]=val_abs(corr(lnXi,Yi,n)); //Test du meilleur ajustement return rmax(r); }
double NCC(const Image<float>& I1,const Image<float>& meanI1,Point m1,const Image<float>& I2,const Image<float>& meanI2,Point m2,int n) { if (m1.x<n || m1.x>=I1.width()-n || m1.y<n || m1.y>=I1.height()-n) return -1; if (m2.x<n || m2.x>=I2.width()-n || m2.y<n || m2.y>=I2.height()-n) return -1; double c1=corr(I1,meanI1,m1,I1,meanI1,m1,n); if (c1==0) return -1; double c2=corr(I2,meanI2,m2,I2,meanI2,m2,n); if (c2==0) return -1; return corr(I1,meanI1,m1,I2,meanI2,m2,n)/sqrt(c1*c2); }
double NCC(const Image<float>& I1,const Image<float>& meanI1,const Image<float>& corrI1,Point m1,const Image<float>& I2,const Image<float>& meanI2,const Image<float>& corrI2,Point m2,int n) { if (m1.x<n || m1.x>=I1.width()-n || m1.y<n || m1.y>=I1.height()-n) return -1; if (m2.x<n || m2.x>=I2.width()-n || m2.y<n || m2.y>=I2.height()-n) return -1; if (corrI1(m1)==0) return -1; if (corrI2(m2)==0) return -1; return corr(I1,m1,I2,m2,n)/sqrt(corrI1(m1)*corrI2(m2)); }
int HrPwWindow::findDelay(QVector<double> &wattsArray, QVector<double> &hrArray, int rideTimeSecs) { int delay = 0; double maxr = 0; if (rideTimeSecs>= 60) { for (int a = 10; a <=60; ++a) { QVector<double> delayHr(rideTimeSecs); for (int j = a; j<rideTimeSecs; ++j) { delayHr[j-a] = hrArray[j]; } for (int j = rideTimeSecs-a; j<rideTimeSecs; ++j) { delayHr[j] = 0.0; } double r = corr(wattsArray, delayHr, rideTimeSecs-a); //fprintf(stderr, "findDelay %d: %.2f \n", a, r); if (r>maxr) { maxr = r; delay = a; } } } delayEdit->setText(QString("%1").arg(delay)); rDelayEdit->setText(QString("%1").arg(delay)); delaySlider->setValue(delay); rDelaySlider->setValue(delay); return delay; }
void bmfm_fancy(float *disp, // output disparities image (dx, dy) float *errc, // output error image float *a, // input image A float *b, // input image B int w, // width int h, // height int pd, // pixel dimension double fm[9], // fundamental matrix float *disp_init, // initialization (optional) float *search_radius // optional, w.r.t. initialization ) { int maxpoints = 2 * (w+h), (*p)[2] = xmalloc(maxpoints*sizeof*p); for (int j = 0; j < h; j++) for (int i = 0; i < w; i++) { float rad = NAN, ini[2] = {0, 0}; if (search_radius) rad = search_radius[j*w+i]; if (disp_init) ini[0] = disp_init[2*(j*w+i)+0]; if (disp_init) ini[1] = disp_init[2*(j*w+i)+1]; int np = plot_epipolar_fancy(p, fm, w, h, i, j, ini, rad); float mincorr = INFINITY; int minidx = 0; for (int k = 0; k < np; k++) { float c = corr(a, b, w, h, pd, i, j, p[k][0], p[k][1]); if (c < mincorr) { mincorr = c; minidx = k; } } if (disp) disp[2*(j*w+i) + 0] = p[minidx][0] - i; if (disp) disp[2*(j*w+i) + 1] = p[minidx][1] - j; if (errc) errc[j*w+i] = mincorr; } free(p); }
void shot_detector::compare(pcl::PointCloud<DescriptorType>::Ptr model_descriptions, pcl::PointCloud<DescriptorType>::Ptr scene_descriptions) { model_scene_corrs->clear(); pcl::KdTreeFLANN<DescriptorType> match_search; match_search.setInputCloud (model_descriptions); std::cerr << scene_descriptions->size() << " and " << model_descriptions->size() << std::endl; model_good_keypoints_indices.clear(); scene_good_keypoints_indices.clear(); // For each scene keypoint descriptor, find nearest neighbor into the model keypoints descriptor cloud and add it to the correspondences vector. for (size_t i = 0; i < scene_descriptions->size (); ++i) { std::vector<int> neigh_indices (1); std::vector<float> neigh_sqr_dists (1); if (!pcl_isfinite (scene_descriptors->at (i).descriptor[0])) { //skipping NaNs continue; } int found_neighs = match_search.nearestKSearch (scene_descriptors->at (i), 1, neigh_indices, neigh_sqr_dists); if(found_neighs == 1 && neigh_sqr_dists[0] < corr_dist_) { // add match only if the squared descriptor distance is less than 0.25 (SHOT descriptor distances are between 0 and 1 by design) pcl::Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]); model_scene_corrs->push_back (corr); } } pcl::copyPointCloud (*model_keypoints, model_good_keypoints_indices, *model_good_kp); pcl::copyPointCloud (*scene_keypoints, scene_good_keypoints_indices, *scene_good_kp); std::cerr << "Correspondences found: " << model_scene_corrs->size () << std::endl; }
double operator()(configuration const& c) const { auto delta = (rhs - kern(c)) / error_bars; int M = first_dim(delta); double kappa = 0; for(int i = 1; i < M; ++i) kappa += corr(delta(i), delta(i-1)); kappa /= M - 1; return kappa; }
// Renvoit la matrice avec toutes les auto-correlations Image<float> corrImage(const Image<float>& I, const Image<float>& meanI, int n){ Image<float> corrI(I.width(), I.height(),CV_32F); for(int i=n+1; i<I.height()-n-1; i++){ for(int j=n+1; j<I.width()-n-1; j++){ //cout<< i << " " << j << endl; corrI.at<float>(i,j) = (float)(corr(I, meanI, Point(j,i), I, meanI, Point(j,i),n)); } } return corrI; }
int InitialGuess (double *s, double *p, int nphase, int nchn, int *chn) { int index; double frac_off = 0.05; // set to be 0.05 double ptemp[nphase]; double temp[nchn]; int i, h; for (i = 0; i < nchn; i++) { for (h = 0; h < nphase; h++) { ptemp[h] = p[i*nphase+h]; } int x; x = def_off_pulse (nphase, ptemp, frac_off); double ptemp_out[nphase]; pre_diff (ptemp, nphase, x, frac_off, ptemp_out); temp[i] = find_peak_value(nphase,ptemp_out); } int peak; find_peak (0, nchn, temp, &peak); //printf ("%d\n",peak); (*chn) = peak; double p_use[nphase]; double s_use[nphase]; for (h = 0; h < nphase; h++) { p_use[h] = p[peak*nphase+h]; s_use[h] = s[peak*nphase+h]; } // remove the baseline of template index = def_off_pulse (nphase, s_use, frac_off); double s_out[nphase]; pre_diff (s_use, nphase, index, frac_off, s_out); // remove the baseline of profile index = def_off_pulse (nphase, p_use, frac_off); double p_out[nphase]; pre_diff (p_use, nphase, index, frac_off, p_out); // Guess the phase shift int d; d = corr (s_out, p_out, nphase); return d; }
int main() { int a, i, j; gets(string); a = 1; i = 0; for(j = 0; string[j] != '\0'; j++); j--; while(j - i >= 1) { while(corr(string[i])) i++; while(corr(string[j]) && (j >= 1)) j--; if ((reg(string[i]) != reg(string[j])) && (j - i >= 1)) a = 0; i++; j--; } if (a) printf("Yes"); else printf("No"); return 0; }
static void roguejoin(struct level *lev, int x1, int y1, int x2, int y2, int horiz) { int x,y,middle; #ifndef MAX #define MAX(a,b) (((a) > (b)) ? (a) : (b)) #endif #ifndef MIN #define MIN(a,b) (((a) < (b)) ? (a) : (b)) #endif if (horiz) { middle = x1 + rn2(x2-x1+1); for (x=MIN(x1,middle); x<=MAX(x1,middle); x++) corr(lev, x, y1); for (y=MIN(y1,y2); y<=MAX(y1,y2); y++) corr(lev, middle,y); for (x=MIN(middle,x2); x<=MAX(middle,x2); x++) corr(lev, x, y2); } else { middle = y1 + rn2(y2-y1+1); for (y=MIN(y1,middle); y<=MAX(y1,middle); y++) corr(lev, x1, y); for (x=MIN(x1,x2); x<=MAX(x1,x2); x++) corr(lev, x, middle); for (y=MIN(middle,y2); y<=MAX(middle,y2); y++) corr(lev, x2,y); } }
int fit() { cout << "start of fit procedure" << endl; readsource("run369/CRS/CRS_QDC_01!qdc01.dat",7214); readbackgr("run371/CRS/CRS_QDC_01!qdc01.dat",56948); corr(); TCanvas* c1 = new TCanvas("c1","c1",800,600); int maxbin=-1; Double_t maxvalue=-1; Double_t tempbin=0; TString text=""; hcorr->GetXaxis()->SetRange(200,4000); maxbin=hcorr->GetMaximumBin(); maxvalue=hcorr->GetBinContent(maxbin); hcorr->GetXaxis()->SetRange(0,4095); TF1* f1 = new TF1("f1","[0]*exp(-((x-[1])^2)/[2])"); f1->SetParameter(0,maxvalue); f1->SetParameter(1,maxbin); f1->SetParameter(2,1); f1->SetLineColor(2); hcorr->SetMinimum(0); hcorr->SetMaximum(1.2*maxvalue); hcorr->Fit("f1","","",maxbin-maxbin/6,4095); tempbin=sqrt(-log(maxvalue*2/3/(f1->GetParameter(0)))*(f1->GetParameter(2)))+f1->GetParameter(1); cout << "2/3 of maximum height is in bin " << tempbin << "->" <<int(tempbin+0.5) << endl; cout << endl << "max bin = " << maxbin << " with content = " << maxvalue << endl; }
static void spatial_mat(double *par, double *dist, longint *n, longint *nug, double (*corr)(double ), double *mat) { longint i, j, np1 = *n + 1; double aux, *sdist, ratio = 1.0; sdist = dist; if (*nug) ratio = par[1]; for(i = 0; i < *n; i++) { mat[i * np1] = 1.0; for(j = i + 1; j < *n; j++, sdist++) { aux = *sdist / *par; *(mat + i + j * (*n)) = *(mat + j + i * (*n)) = ratio * corr(aux); } } }
std::vector<Joystick::CalibrationData> Joystick::get_calibration() { std::vector<struct js_corr> corr(get_axis_count()); if (ioctl(fd, JSIOCGCORR, &*corr.begin()) < 0) { std::ostringstream str; str << filename << ": " << strerror(errno); throw std::runtime_error(str.str()); } else { std::vector<CalibrationData> data; std::transform(corr.begin(), corr.end(), std::back_inserter(data), corr2cal); return data; } }
void AddEntryDlg::OnButtonEditInsertPressed() { //QString insert_cmd("UPDATE 'musicdb'.'main' SET 'titel' = '"); QString insert_cmd("UPDATE 'main' SET 'titel' = '"); insert_cmd.append(corr(le_title.text())); insert_cmd.append("', 'kuenstler' = '"); insert_cmd.append(corr(le_artist.text())); insert_cmd.append("', 'album' = '"); insert_cmd.append(corr(le_album.text())); insert_cmd.append("', 'tag' = '"); insert_cmd.append(corr(le_tag.text())); insert_cmd.append("', 'genre' = '"); insert_cmd.append(corr(le_genre.text())); insert_cmd.append("', 'jahr' = '"); insert_cmd.append(le_year.text()); insert_cmd.append("', 'others' = '"); if(cb_interest_others.isChecked()) insert_cmd.append("1"); else insert_cmd.append("0"); insert_cmd.append("', 'yours' = '"); if(cb_interest_yours.isChecked()) insert_cmd.append("1"); else insert_cmd.append("0"); insert_cmd.append("', 'dateityp' = '"); insert_cmd.append(le_filetype.text()); insert_cmd.append("', 'qualitaet' = '"); insert_cmd.append(le_quality.text()); insert_cmd.append("', 'bew_yours' = '"); insert_cmd.append(QString::number(sb_vote_yours.value())); insert_cmd.append("', 'bew_others' = '"); insert_cmd.append(QString::number(sb_vote_others.value())); insert_cmd.append("', 'pfad' = '"); insert_cmd.append(le_path.text()); insert_cmd.append("', 'url' = '"); insert_cmd.append(corr(le_source.text())); insert_cmd.append("' WHERE 'main'.'id' ="); insert_cmd.append(QString::number(editnum)); printf("Query command: %s\n", insert_cmd.toAscii().data()); sqlhelper.exec(insert_cmd); accept(); }
int main(){ int i,j,st; int sp = n(); double data[500000]; double theta[5] = {0.3,0.2,0.3,0,0}; double phi[5] = {1.,0.3,0.7,0,0}; double nofphot[5] = {1000.,1000.,1000,0,0}; st = corr(data,3,theta,phi,0.25,50,5,nofphot,0.,50); FILE* f = fopen("T.txt","w+"); for(i=0;i<256;i++){ for(j=0;j<256;j++){ fprintf(f,"%f ",data[sp*256*256+j*256+i]); } fprintf(f,"\n"); } fclose(f); return 0; }
void DSPEngine::dcOffset(SampleVector::iterator begin, SampleVector::iterator end) { double count; int io = 0; int qo = 0; Sample corr((qint16)m_iOffset, (qint16)m_qOffset); // sum and correct in one pass for(SampleVector::iterator it = begin; it < end; it++) { io += it->real(); qo += it->imag(); *it -= corr; } // moving average count = end - begin; m_iOffset = (15.0 * m_iOffset + (double)io / count) / 16.0; m_qOffset = (15.0 * m_qOffset + (double)qo / count) / 16.0; }
doublereal WaterPropsIAPWS::psat(doublereal temperature, int waterState) { doublereal densLiq = -1.0, densGas = -1.0, delGRT = 0.0; doublereal dp, pcorr; if (temperature >= T_c) { densGas = density(temperature, P_c, WATER_SUPERCRIT); setState_TR(temperature, densGas); return P_c; } doublereal p = psat_est(temperature); bool conv = false; for (int i = 0; i < 30; i++) { if (method == 1) { corr(temperature, p, densLiq, densGas, delGRT); doublereal delV = M_water * (1.0/densLiq - 1.0/densGas); dp = - delGRT * Rgas * temperature / delV; } else { corr1(temperature, p, densLiq, densGas, pcorr); dp = pcorr - p; } p += dp; if ((method == 1) && delGRT < 1.0E-8) { conv = true; break; } else { if (fabs(dp/p) < 1.0E-9) { conv = true; break; } } } // Put the fluid in the desired end condition if (waterState == WATER_LIQUID) { setState_TR(temperature, densLiq); } else if (waterState == WATER_GAS) { setState_TR(temperature, densGas); } else { throw Cantera::CanteraError("WaterPropsIAPWS::psat", "unknown water state input: " + Cantera::int2str(waterState)); } return p; }
int go_appendix_backgr(){ readsource("run370/CRS/CRS_QDC_00!qdc00.dat",7215); readbackgr("run371/CRS/CRS_QDC_00!qdc00.dat",56948); corr(); TPaveText *pt = new TPaveText(0.65,0.7,0.85,0.85, "NDC"); pt->AddText("run371"); pt->AddText("background"); pt->AddText("box I (QDC ch00)"); hbackgr->GetXaxis()->SetTitle("QDC energy [bin]"); hbackgr->SetTitle("Background - box I"); TCanvas* c1=new TCanvas("c1","c1",800,300); hbackgr->Draw(); pt->Draw("same"); c1->SaveAs("appendix/backgr_boxI.jpg"); }
/* MEX FUNCTION */ void mexFunction(int nargout, mxArray* argout[], int nargin, const mxArray* argin[]) { if (nargin != 4) mexErrMsgTxt("Four input arguments must be provided to this function. See documentation for syntax details."); double* x = mxGetPr(argin[0]); double* y = mxGetPr(argin[1]); int window = (int)mxGetScalar(argin[2]); int noverlap = (int)mxGetScalar(argin[3]); int increment = window - noverlap; int ncx, ncy, nrx, nry; nrx = mxGetM(argin[0]); ncx = mxGetN(argin[0]); nry = mxGetM(argin[1]); ncy = mxGetN(argin[1]); if (nrx == 0 || nry == 0) { mexErrMsgTxt("Inputs cannot be empty arrays."); } if (nrx != nry) { mexErrMsgTxt("X and Y must contain equivalent length signals."); } // We need a higher precision calculation for the number of SWC points per signal. Otherwise, if this ends up being fractional, // it could get rounded up and result in out-of-bounds indexing later on. We need to always force it downward. float temp = (float)(nrx - window) / (float)increment; int nswc = (int)floor(temp); argout[0] = mxCreateDoubleMatrix(nswc, ncx * ncy, mxREAL); double* swc = mxGetPr(argout[0]); int nrxToUse = nswc * increment; if (ncy == 1) { cilk_for (int a = 0; a < ncx; a++) { int idxSWC = a * nswc; int idxColX = a * nrx; for (int b = 0; b < nrxToUse; b += increment) swc[idxSWC++] = corr(x + idxColX + b, y + b, window); } }
int go_appendix(){ readsource("run370/CRS/CRS_QDC_00!qdc00.dat",7215); readbackgr("run371/CRS/CRS_QDC_00!qdc00.dat",56948); corr(); TPaveText *pt = new TPaveText(0.65,0.7,0.85,0.85, "NDC"); pt->AddText("run371"); pt->AddText("Cs-137"); pt->AddText("box I (QDC ch00)"); pt->AddText("compton edge in bin 860 +- 20"); hcorr->GetXaxis()->SetTitle("QDC energy [bin]"); hcorr->SetTitle("Cs-137 - box I"); TCanvas* c1=new TCanvas("c1","c1",800,300); hcorr->Draw(); pt->Draw("same"); c1->SaveAs("appendix/Cs-137_boxI.jpg"); }
tree texmacs_invarianted (tree t, tree oldt, string src) { tree orgbody= extract (t, "body"); tree oldbody= extract (oldt, "body"); hashmap<tree,tree> corr (UNINIT); hashmap<tree,tree> pred (UNINIT); hashmap<tree,tree> succ (UNINIT); tree uoldbody= texmacs_correspondence (oldbody, corr); texmacs_neighbours (oldbody, pred, succ); tree body= orgbody; body= texmacs_invarianted (body, UNINIT, -1, src, corr, pred, succ); hashmap<tree,path> h (path (-1)); //cout << "body" << LF << HRULE << body << LF << HRULE; //cout << "orgbody" << LF << HRULE << orgbody << LF << HRULE; //cout << "uoldbody" << LF << HRULE << uoldbody << LF << HRULE; get_subtree_paths (uoldbody, path (), h); body= texmacs_invarianted_merge (body, src, orgbody, uoldbody, h); //cout << "merged" << LF << HRULE << body << LF << HRULE; body= texmacs_invarianted_replace (body, src); return change_doc_attr (t, "body", body); }
TEST (CorrespondenceEstimation, CorrespondenceEstimationNormalShooting) { pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1 (new pcl::PointCloud<pcl::PointXYZ> ()); pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2 (new pcl::PointCloud<pcl::PointXYZ> ()); // Defining two parallel planes differing only by the y co-ordinate for (float i = 0; i < 10; i += 0.2) { for (float z = 0; z < 5; z += 0.2) { cloud1->points.push_back (pcl::PointXYZ (i, 0, z)); cloud2->points.push_back (pcl::PointXYZ (i, 2, z)); // Ideally this should be the corresponding point to the point defined in the previous line } } pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne; ne.setInputCloud (cloud1); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ> ()); ne.setSearchMethod (tree); pcl::PointCloud<pcl::Normal>::Ptr cloud1_normals (new pcl::PointCloud<pcl::Normal>); ne.setKSearch (5); ne.compute (*cloud1_normals); // All normals are perpendicular to the plane defined pcl::CorrespondencesPtr corr (new pcl::Correspondences); pcl::registration::CorrespondenceEstimationNormalShooting <pcl::PointXYZ, pcl::PointXYZ, pcl::Normal> ce; ce.setInputCloud (cloud1); ce.setKSearch (10); ce.setSourceNormals (cloud1_normals); ce.setInputTarget (cloud2); ce.determineCorrespondences (*corr); // Based on the data defined, the correspondence indices should be 1 <-> 1 , 2 <-> 2 , 3 <-> 3 etc. for (unsigned int i = 0; i < corr->size (); i++) { EXPECT_EQ ((*corr)[i].index_query, (*corr)[i].index_match); } }
pcl::CorrespondencesPtr Recognizer::flann_matcher(pcl::PointCloud<DescriptorType>::Ptr input_descriptors, pcl::PointCloud<DescriptorType>::Ptr target_descriptors, float match_thresh) { pcl::KdTreeFLANN<DescriptorType> match_search; match_search.setInputCloud(input_descriptors); for(size_t i = 0; i < target_descriptors->size (); ++i) { std::vector<int> neigh_indices (1); std::vector<float> neigh_sqr_dists (1); for (int j = 0; j < 33; j++) { // for each bin if(pcl_isnan(target_descriptors->at(i).histogram[j]) || !pcl_isfinite(target_descriptors->at(i).histogram[j])) continue; } int found_neighs = match_search.nearestKSearch(target_descriptors->at(i), 1, neigh_indices, neigh_sqr_dists); if(found_neighs == 1 && neigh_sqr_dists[0] < match_thresh) { // add match only if the squared descriptor distance is less than 0.25 (SHOT descriptor distances are between 0 and 1 by design) pcl::Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]); (this->corrs)->push_back(corr); } } return this->corrs; }
// return the log-density for the prior on the correlations double CorrPrior(std::vector<double> arctanh_corr) { std::vector<double> corr(pchi); for (int j = 0; j < pchi; ++j) { corr[j] = tanh(arctanh_corr[j]); //std::cout << "corr[" << j << "]: " << corr[j] << ", "; } //std::cout << std::endl; // make sure correlation matrix is positive definite double determ = (1.0 - corr[2] * corr[2]) - corr[0] * (corr[0] - corr[2] * corr[3]) + corr[1] * (corr[0] * corr[2] - corr[1]); double logprior; if (determ > 0) { // correlation matrix is positive definite, so calculate the log prior density logprior = (0.5 * pchi * (pchi - 1.0) - 1.0) * log(determ); logprior -= 0.5 * (pchi + 1.0) * log(1.0 - corr[2] * corr[2]); logprior -= 0.5 * (pchi + 1.0) * log(1.0 - corr[1] * corr[1]); logprior -= 0.5 * (pchi + 1.0) * log(1.0 - corr[0] * corr[0]); } else { // correlation matrix is not positive definite logprior = -std::numeric_limits<double>::infinity(); } return logprior; }
// Generate a WWW-Authenticate header. This has the format: // WWW-Authenticate: Digest realm="<home domain>", // qop="auth", // nonce="<nonce>", // opaque="<opaque>", // [stale=TRUE] void HTTPDigestAuthenticate::generate_www_auth_header(std::string& www_auth_header, bool include_stale, AuthStore::Digest* digest) { www_auth_header = "Digest"; www_auth_header.append(" realm=\"").append(_home_domain).append("\""); www_auth_header.append(",qop=\"").append("auth").append("\""); www_auth_header.append(",nonce=\"").append(digest->_nonce).append("\""); www_auth_header.append(",opaque=\"").append(digest->_opaque).append("\""); if (include_stale) { www_auth_header.append(",stale=TRUE"); } TRC_DEBUG("WWW-Authenticate header generated: %s", www_auth_header.c_str()); TRC_DEBUG("Raising correlating marker with opaque value = %s", digest->_opaque.c_str()); SAS::Marker corr(_trail, MARKED_ID_GENERIC_CORRELATOR, 0); corr.add_var_param(digest->_opaque); // The marker should be trace-scoped, and should not reactivate any trail // groups SAS::report_marker(corr, SAS::Marker::Scope::Trace, false); }
shared_ptr<MarketModel> FlatVolFactory::create(const EvolutionDescription& evolution, Size numberOfFactors) const { const vector<Time>& rateTimes = evolution.rateTimes(); Size numberOfRates = rateTimes.size()-1; vector<Rate> initialRates(numberOfRates); for (Size i=0; i<numberOfRates; ++i) initialRates[i] = yieldCurve_->forwardRate(rateTimes[i], rateTimes[i+1], Simple); vector<Volatility> displacedVolatilities(numberOfRates); for (Size i=0; i<numberOfRates; ++i) { Volatility vol = // to be changes volatility_(rateTimes[i]); displacedVolatilities[i] = initialRates[i]*vol/(initialRates[i]+displacement_); } vector<Spread> displacements(numberOfRates, displacement_); Matrix correlations = exponentialCorrelations(evolution.rateTimes(), longTermCorrelation_, beta_); shared_ptr<PiecewiseConstantCorrelation> corr(new TimeHomogeneousForwardCorrelation(correlations, rateTimes)); return shared_ptr<MarketModel>(new FlatVol(displacedVolatilities, corr, evolution, numberOfFactors, initialRates, displacements)); }
pcl::CorrespondencesPtr ORPointCloud::correspondences(const ORPointCloud* model, const ORPointCloud* scene) { pcl::CorrespondencesPtr model_scene_corrs (new pcl::Correspondences ()); pcl::KdTreeFLANN<DescriptorType> match_search; match_search.setInputCloud (model->descriptors); // For each scene keypoint descriptor, find nearest neighbor into the model keypoints descriptor cloud and add it to the correspondences vector. for (size_t i = 0; i < scene->descriptors->size (); ++i) { std::vector<int> neigh_indices (1); std::vector<float> neigh_sqr_dists (1); if (!pcl_isfinite (scene->descriptors->at (i).descriptor[0])) //skipping NaNs { continue; } int found_neighs = match_search.nearestKSearch (scene->descriptors->at (i), 1, neigh_indices, neigh_sqr_dists); if(found_neighs == 1 && neigh_sqr_dists[0] < 0.25f) // add match only if the squared descriptor distance is less than 0.25 (SHOT descriptor distances are between 0 and 1 by design) { pcl::Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]); model_scene_corrs->push_back(corr); } } return model_scene_corrs; }
int KinshipHolder::loadDecomposed() { LineReader lr(this->eigenFileName); int lineNo = 0; int fieldLen = 0; std::vector<std::string> fd; std::vector<int> columnToExtract; std::vector<std::string> header; // header line of the kinship eigen file Eigen::MatrixXf& matK = this->matK->mat; Eigen::MatrixXf& matS = this->matS->mat; Eigen::MatrixXf& matU = this->matU->mat; const std::vector<std::string>& names = *this->pSample; const int NumSample = (int)names.size(); std::map<std::string, int> nameMap; makeMap(names, &nameMap); std::map<std::string, int> headerMap; while (lr.readLineBySep(&fd, "\t ")) { ++lineNo; if (lineNo == 1) { // check header header = fd; fieldLen = fd.size(); if (fieldLen < 3) { // at least three columns: IID, Lambda, U1 logger->error( "Insufficient column number (<3) in the first line of kinsihp " "file!"); return -1; }; for (size_t i = 0; i != fd.size(); ++i) { fd[i] = tolower(fd[i]); } makeMap(fd, &headerMap); if (fd.size() != headerMap.size()) { logger->error("Kinship file have duplicated headers!"); return -1; } // check IID, Lambda, U1, U2, ... U(N) where (N) is the sample size if (headerMap.count("iid") == 0) { logger->error("Missing 'IID' column!"); return -1; } columnToExtract.push_back(headerMap["iid"]); if (headerMap.count("lambda") == 0) { logger->error("Missing 'Lambda' column!"); return -1; } columnToExtract.push_back(headerMap["lambda"]); std::string s; for (int i = 0; i < NumSample; ++i) { s = "u"; s += toString(i + 1); if (headerMap.count(s) == 0) { logger->error("Missing '%s' column!", s.c_str()); return -1; } columnToExtract.push_back(headerMap[s]); } s = "u"; s += toString(NumSample + 1); if (headerMap.count(s) != 0) { logger->error("Unexpected column '%s'!", s.c_str()); return -1; } matS.resize(NumSample, 1); matU.resize(NumSample, NumSample); continue; } // body lines if ((int)fd.size() != fieldLen) { logger->error( "Inconsistent column number [ %zu ] (used to be [ %d ])in kinship " "file line [ %d ] - skip this file!", fd.size(), fieldLen, lineNo); return -1; } const int iidColumn = columnToExtract[0]; const std::string& iid = fd[iidColumn]; if (nameMap.count(iid) == 0) { logger->error("Unexpected sample [ %s ]!", iid.c_str()); return -1; } const int row = nameMap[iid]; const int lambdaColumn = columnToExtract[1]; double temp = 0.0; if (!str2double(fd[lambdaColumn], &temp)) { logger->warn("Invalid numeric value [ %s ] treated as zero!", fd[lambdaColumn].c_str()); } matS(lineNo - 2, 0) = temp; for (int i = 0; i < NumSample; ++i) { int uColumn = columnToExtract[i + 2]; if (!str2double(fd[uColumn], &temp)) { logger->warn("Invalid numeric value [ %s ] treated as zero!", fd[lambdaColumn].c_str()); } matU(row, i) = temp; } } // verify eigen decomposition results make senses // check largest eigen vector and eigen value Eigen::MatrixXf v1 = matK * matU.col(0); Eigen::MatrixXf v2 = matS(0, 0) * matU.col(0); if (matS(0, 0) > 0.5 && v1.col(0).norm() > .5 && v2.col(0).norm() > 0.5 && corr(v1, v2) < 0.8) { logger->warn("Cannot verify spectral decompose results!"); return -1; } // check the min(10, NumSample) random eigen vector and eigen value int randomCol = 10; if (randomCol > NumSample - 1) { randomCol = NumSample - 1; } v1 = matK * matU.col(randomCol); v2 = matS(randomCol, 0) * matU.col(randomCol); if (matS(randomCol, 0) > 0.5 && v1.col(0).norm() > 0.5 && v2.col(0).norm() > 0.5 && corr(v1, v2) < 0.8) { logger->warn("Cannot verify spectral decompose results!"); return -1; } #ifdef DEBUG std::string tmp = fn; tmp += ".tmp"; std::ofstream ofs(tmp.c_str(), std::ofstream::out); ofs << mat; ofs.close(); #endif // fprintf(stderr, "Kinship matrix [ %d x %d ] loaded", (int)mat.rows(), // (int)mat.cols()); if (this->matK) { delete this->matK; this->matK = NULL; } return 0; }