void calcMeanAndCovarWeighedVectorized(const Eigen::MatrixXf &input, const Eigen::VectorXd &inputWeights, Eigen::MatrixXf &out_covMat, Eigen::VectorXf &out_mean,Eigen::MatrixXf &temp)
	{
		out_mean=input.col(0); //to resize
		out_mean.setZero();
		double wSumInv=1.0/inputWeights.sum();
		for (int k=0;k<inputWeights.size();k++){
			double w=inputWeights[k];
			out_mean+=input.col(k)*(float)(w*wSumInv);
		}
		out_mean = input.rowwise().mean();
		temp = (input.colwise() - out_mean);
		for (int k=0;k<inputWeights.size();k++){
			temp.col(k) *= (float)(sqrt(inputWeights(k)*wSumInv));	//using square roots, as we only want the normalized weights to be included once for each result element in the multiplication below
		}
		out_covMat = temp*temp.transpose();
	}
示例#2
0
  int TestCovariate(Matrix& Xnull, Matrix& Y, Matrix& Xcol,
                    const EigenMatrix& kinshipU, const EigenMatrix& kinshipS){
    Eigen::MatrixXf g;
    G_to_Eigen(Xcol, &g);

    // store U'*G for computing AF later.
    const Eigen::MatrixXf& U = kinshipU.mat;
    this->ug = U.transpose() * g;

    Eigen::RowVectorXf g_mean = g.colwise().mean();
    g = g.rowwise() - g_mean;

    double gTg = g.array().square().sum();
    double t_new = (g.array() * this->transformedY.array()).sum();
    t_new = t_new * t_new / gTg;
    double t_score = t_new / this->gamma;
    this->betaG = (g.transpose() * this->transformedY).sum() / gTg / this->gamma;
    this->betaGVar = this->ySigmaY / gTg / this->gamma;

    this->pvalue = gsl_cdf_chisq_Q(t_score, 1.0);
    return 0;
  }
示例#3
0
Eigen::MatrixXf centerMatrix(const Eigen::MatrixXf& x) {
  return x.rowwise() - x.colwise().mean();
}