int main(void) { Eigen::MatrixXd A(3,3); A << 4,-1,2, -1,6,0, 2,0,5; std::cout << "The matrix A is" << std::endl << A << std::endl; Eigen::LLT<Eigen::MatrixXd> lltOfA(A); // compute the Cholesky decomposition of A Eigen::MatrixXd L = lltOfA.matrixL(); // retrieve factor L in the decomposition std::cout << "The Cholesky factor L is" << std::endl << L << std::endl; Eigen::VectorXd v=Eigen::VectorXd::Random(3); std::cout<<"v= "<<v<<std::endl; std::cout<<"L*v= "<<L*v<<std::endl; std::cout<<"\n--------------------\n"<<std::endl; Eigen::MatrixXd B(2,2); B << 4,0, 0,0; std::cout << "The matrix A is" << std::endl << A << std::endl; Eigen::LDLT<Eigen::MatrixXd> ldltOfB(B); // compute the Cholesky decomposition of A std::cout<<"ldltOfB.info() ="<<ldltOfB.info()<<std::endl; assert(ldltOfB.info()==Eigen::Success); Eigen::MatrixXd Lb = ldltOfB.matrixL(); // retrieve factor L in the decomposition std::cout << "The Cholesky factor L is" << std::endl << Lb << std::endl; std::cout<< "\nThe diagonal matrix D is "<<ldltOfB.vectorD ()<<std::endl; return 0; }
void ImageViewer_ex2::adjustimageAffineSimilarity() { Vector3f l(3,1); Vector3f m(3,1); Vector3f r1(3,1); Vector3f r2(3,1); MatrixXf A(2,3); l = pinmanager->getLine(0); m = pinmanager->getLine(1); r2 << l(0) * m(0), l(0) * m(1) + l(1) * m(0), l(1) * m(1); l = pinmanager->getLine(2); m = pinmanager->getLine(3); r1 << l(0) * m(0), l(0) * m(1) + l(1) * m(0), l(1) * m(1); A << r1.transpose(), r2.transpose(); JacobiSVD<MatrixXf> SVD(A, ComputeFullV); VectorXf S = SVD.matrixV().col(SVD.matrixV().cols() - 1); //S /= S(2); S(2) = 1; MatrixXf kkt(2,2); kkt << S(0), S(1), S(1), S(2); LLT<MatrixXf> lltOfA(kkt); MatrixXf L = lltOfA.matrixU(); H << L(0), L(1), 0, L(2), L(3), 0, 0, 0, 1; //std::cout << H << std::endl; H = H.inverse(); QSize imgSize(this->width(), this->height()); QVector<QPoint> areaRender; areaRender << QPoint(0,0) << QPoint(0, imgSize.height()) << QPoint(imgSize.width(), imgSize.height()) << QPoint(imgSize.width(), 0); showResult(imgSize, areaRender); }
bool MultipleTraitLinearRegressionScoreTest::FitNullModel( Matrix& cov, Matrix& pheno, const FormulaVector& tests) { MultipleTraitLinearRegressionScoreTestInternal& w = *this->work; // set some values w.N = pheno.rows; w.T = pheno.cols; w.C = cov.cols; w.M = -1; w.Y.resize(tests.size()); w.Z.resize(tests.size()); w.ZZinv.resize(tests.size()); w.hasCovariate.resize(tests.size()); w.missingIndex.resize(tests.size()); w.Uyz.resize(tests.size()); w.Ugz.resize(tests.size()); w.Uyg.resize(tests.size()); w.sigma2.resize(tests.size()); w.nTest = tests.size(); ustat.Dimension(blockSize, tests.size()); vstat.Dimension(blockSize, tests.size()); pvalue.Dimension(blockSize, tests.size()); // create dict (key: phenotype/cov name, val: index) std::map<std::string, int> phenoDict; std::map<std::string, int> covDict; makeColNameToDict(pheno, &phenoDict); makeColNameToDict(cov, &covDict); // create Y, Z std::vector<std::string> phenoName; std::vector<std::string> covName; std::vector<int> phenoCol; std::vector<int> covCol; std::vector<std::vector<std::string> > allCovName; // arrange Y, Z according to missing pattern for each trait for (int i = 0; i < w.nTest; ++i) { phenoName = tests.getPhenotype(i); phenoCol.clear(); phenoCol.push_back(phenoDict[phenoName[0]]); covName = tests.getCovariate(i); allCovName.push_back(covName); covCol.clear(); for (size_t j = 0; j != covName.size(); ++j) { if (covName[j] == "1") { continue; } assert(covDict.count(covName[j])); covCol.push_back(covDict[covName[j]]); } w.hasCovariate[i] = covCol.size() > 0; makeMatrix(pheno, phenoCol, &w.Y[i]); if (w.hasCovariate[i]) { makeMatrix(cov, covCol, &w.Z[i]); } // create index to indicate missingness w.missingIndex[i].resize(w.N); for (int j = 0; j < w.N; ++j) { if (hasMissingInRow(w.Y[i], j)) { w.missingIndex[i][j] = true; continue; } else { if (w.hasCovariate[i] && hasMissingInRow(w.Z[i], j)) { w.missingIndex[i][j] = true; continue; } } w.missingIndex[i][j] = false; } removeRow(w.missingIndex[i], &w.Y[i]); removeRow(w.missingIndex[i], &w.Z[i]); if (w.Y[i].rows() == 0) { fprintf(stderr, "Due to missingness, there is no sample to test!\n"); return -1; } // center and scale Y, Z scale(&w.Y[i]); scale(&w.Z[i]); // calcualte Uzy, inv(Z'Z) if (w.hasCovariate[i]) { w.ZZinv[i].noalias() = (w.Z[i].transpose() * w.Z[i]) .ldlt() .solve(EMat::Identity(w.Z[i].cols(), w.Z[i].cols())); w.Uyz[i].noalias() = w.Z[i].transpose() * w.Y[i]; w.sigma2[i] = (w.Y[i].transpose() * w.Y[i] - w.Uyz[i].transpose() * w.ZZinv[i] * w.Uyz[i])(0, 0) / w.Y[i].rows(); } else { w.sigma2[i] = w.Y[i].col(0).squaredNorm() / w.Y[i].rows(); } } // end for i // Make groups based on model covariats and missing patterns of (Y, Z) // Detail: // For test: 1, 2, 3, ..., nTest, a possible grouping is: // (1, 3), (2), (4, 5) ... // => // test_1 => group 0, offset 0 // test_2 => group 1, offset 0 // test_3 => group 0, offset 1 // // For each test, we will use its specific // [covar_name_1, covar_name_2, ...., missing_pattern], as the value to // distingish groups std::map<std::vector<std::string>, int> groupDict; groupSize = 0; for (int i = 0; i < w.nTest; ++i) { std::vector<std::string> key = allCovName[i]; key.push_back(toString(w.missingIndex[i])); if (0 == groupDict.count(key)) { groupDict[key] = groupSize; group.resize(groupSize + 1); group[groupSize].push_back(i); groupSize++; } else { group[groupDict[key]].push_back(i); } } // fprintf(stderr, "total %d missingness group\n", groupSize); w.G.resize(groupSize); w.groupedY.resize(groupSize); w.groupedZ.resize(groupSize); w.groupedUyz.resize(groupSize); w.groupedZZinv.resize(groupSize); w.groupedL.resize(groupSize); w.ustat.resize(groupSize); w.vstat.resize(groupSize); w.groupedHasCovariate.resize(groupSize); for (int i = 0; i < groupSize; ++i) { const int nc = group[i].size(); const int nr = w.Y[group[i][0]].rows(); w.groupedY[i].resize(nr, nc); for (int j = 0; j < nc; ++j) { w.groupedY[i].col(j) = w.Y[group[i][j]]; } // initialize G w.G[i].resize(nr, blockSize); w.ustat[i].resize(blockSize, nc); w.vstat[i].resize(blockSize, 1); w.groupedZ[i] = w.Z[group[i][0]]; w.groupedHasCovariate[i] = w.hasCovariate[group[i][0]]; if (w.groupedHasCovariate[i]) { w.groupedUyz[i] = w.groupedZ[i].transpose() * w.groupedY[i]; } w.groupedZZinv[i] = w.ZZinv[group[i][0]]; Eigen::LLT<Eigen::MatrixXf> lltOfA(w.groupedZZinv[i]); // L * L' = A w.groupedL[i] = lltOfA.matrixL(); // fprintf(stderr, "i = %d, group has covar = %s\n", i, // w.groupedHasCovariate[i] ? "true" : "false"); } // clean up memory w.Y.clear(); w.Z.clear(); w.Uyz.clear(); w.hasCovariate.clear(); w.ZZinv.clear(); return true; }