예제 #1
0
	Eigen::MatrixXf featureGradient( const Eigen::MatrixXf & a, const Eigen::MatrixXf & b ) const {
		if (ntype_ == NO_NORMALIZATION )
			return kernelGradient( a, b );
		else if (ntype_ == NORMALIZE_SYMMETRIC ) {
			Eigen::MatrixXf fa = lattice_.compute( a*norm_.asDiagonal(), true );
			Eigen::MatrixXf fb = lattice_.compute( b*norm_.asDiagonal() );
			Eigen::MatrixXf ones = Eigen::MatrixXf::Ones( a.rows(), a.cols() );
			Eigen::VectorXf norm3 = norm_.array()*norm_.array()*norm_.array();
			Eigen::MatrixXf r = kernelGradient( 0.5*( a.array()*fb.array() + fa.array()*b.array() ).matrix()*norm3.asDiagonal(), ones );
			return - r + kernelGradient( a*norm_.asDiagonal(), b*norm_.asDiagonal() );
		}
		else if (ntype_ == NORMALIZE_AFTER ) {
			Eigen::MatrixXf fb = lattice_.compute( b );
			
			Eigen::MatrixXf ones = Eigen::MatrixXf::Ones( a.rows(), a.cols() );
			Eigen::VectorXf norm2 = norm_.array()*norm_.array();
			Eigen::MatrixXf r = kernelGradient( ( a.array()*fb.array() ).matrix()*norm2.asDiagonal(), ones );
			return - r + kernelGradient( a*norm_.asDiagonal(), b );
		}
		else /*if (ntype_ == NORMALIZE_BEFORE )*/ {
			Eigen::MatrixXf fa = lattice_.compute( a, true );
			
			Eigen::MatrixXf ones = Eigen::MatrixXf::Ones( a.rows(), a.cols() );
			Eigen::VectorXf norm2 = norm_.array()*norm_.array();
			Eigen::MatrixXf r = kernelGradient( ( fa.array()*b.array() ).matrix()*norm2.asDiagonal(), ones );
			return -r+kernelGradient( a, b*norm_.asDiagonal() );
		}
	}
예제 #2
0
파일: inference.cpp 프로젝트: rlebret/hpca
int run() {
    // store cooccurrence in Eigen sparse matrix object
    REDSVD::SMatrixXf A;
    const int ncontext = read_cooccurrence(c_cooc_file_name, A, verbose);

    // read U matrix from file
    Eigen::MatrixXf V;
    read_eigen_truncated_matrix(c_input_file_V_name, V, dim);
    // read S matrix from file
    Eigen::VectorXf S;
    read_eigen_vector(c_input_file_S_name, S, dim, 1.0-eig);

    // checking the dimensions
    if (V.rows() != ncontext){
        throw std::runtime_error("size mismatch between projection V matrix and the number of context words!!");
    }

    // starting projection
    if (verbose) fprintf(stderr, "Running the projection...");
    const double start = REDSVD::Util::getSec();
    Eigen::MatrixXf embeddings = A * V * S.asDiagonal().inverse();
    if (norm) embeddings.rowwise().normalize();
    if (verbose) fprintf(stderr, "done in %.2f.\n",REDSVD::Util::getSec() - start);

    // write out embeddings
    const char *c_output_name = get_full_path(c_cooc_dir_name, c_output_file_name);
    if (verbose) fprintf(stderr, "writing infered word embeddings in %s\n", c_output_name);
    write_eigen_matrix(c_output_name, embeddings);
    free((char*)c_output_name);

    return 0;
}
예제 #3
0
	void filter( Eigen::MatrixXf & out, const Eigen::MatrixXf & in, bool transpose ) const {
		// Read in the values
		if( ntype_ == NORMALIZE_SYMMETRIC || (ntype_ == NORMALIZE_BEFORE && !transpose) || (ntype_ == NORMALIZE_AFTER && transpose))
			out = in*norm_.asDiagonal();
		else
			out = in;
	
		// Filter
		if( transpose )
			lattice_.compute( out, out, true );
		else
			lattice_.compute( out, out );
// 			lattice_.compute( out.data(), out.data(), out.rows() );
	
		// Normalize again
		if( ntype_ == NORMALIZE_SYMMETRIC || (ntype_ == NORMALIZE_BEFORE && transpose) || (ntype_ == NORMALIZE_AFTER && !transpose))
			out = out*norm_.asDiagonal();
	}
예제 #4
0
	virtual void setParameters( const Eigen::VectorXf & p ) {
		if (ktype_ == DIAG_KERNEL) {
			parameters_ = p;
			initLattice( p.asDiagonal() * f_ );
		}
		else if (ktype_ == FULL_KERNEL) {
			Eigen::MatrixXf tmp = p;
			tmp.resize( parameters_.rows(), parameters_.cols() );
			parameters_ = tmp;
			initLattice( tmp * f_ );
		}
	}
예제 #5
0
파일: Skat.cpp 프로젝트: gpcr/rvtests
  int Fit(Vector& res_G,  // residual under NULL -- may change when permuting
          Vector& v_G,    // variance under NULL -- may change when permuting
          Matrix& X_G,    // covariance
          Matrix& G_G,    // genotype
          Vector& w_G)    // weight
  {
    this->nPeople = X_G.rows;
    this->nMarker = G_G.cols;
    this->nCovariate = X_G.cols;

    // calculation w_sqrt
    G_to_Eigen(w_G, &this->w_sqrt);
    w_sqrt = w_sqrt.cwiseSqrt();

    // calculate K = G * W * G'
    Eigen::MatrixXf G;
    G_to_Eigen(G_G, &G);
    this->K_sqrt.noalias() = w_sqrt.asDiagonal() * G.transpose();

    // calculate Q = ||res * K||
    Eigen::VectorXf res;
    G_to_Eigen(res_G, &res);
    this->Q = (this->K_sqrt * res).squaredNorm();

    // calculate P0 = V - V X (X' V X)^(-1) X' V
    Eigen::VectorXf v;
    G_to_Eigen(v_G, &v);
    if (this->nCovariate == 1) {
      P0 = -v * v.transpose() / v.sum();
      // printf("dim(P0) = %d, %d\n", P0.rows(), P0.cols());
      // printf("dim(v) = %d\n", v.size());
      P0.diagonal() += v;
      // printf("dim(v) = %d\n", v.size());
    } else {
      Eigen::MatrixXf X;
      G_to_Eigen(X_G, &X);
      Eigen::MatrixXf XtV;  // X^t V
      XtV.noalias() = X.transpose() * v.asDiagonal();
      P0 = -XtV.transpose() * ((XtV * X).inverse()) * XtV;
      P0.diagonal() += v;
    }
    // dump();
    // Eigen::MatrixXf tmp = K_sqrt * P0 * K_sqrt.transpose();
    // dumpToFile(tmp, "out.tmp");
    // eigen decomposition
    Eigen::SelfAdjointEigenSolver<Eigen::MatrixXf> es;
    es.compute(K_sqrt * P0 * K_sqrt.transpose());

#ifdef DEBUG
    std::ofstream k("K");
    k << K_sqrt;
    k.close();
#endif
    // std::ofstream p("P0");
    // p << P0;
    // p.close();

    this->mixChiSq.reset();
    int r_ub = std::min(nPeople, nMarker);
    int r = 0;  // es.eigenvalues().size();
    int eigen_len = es.eigenvalues().size();
    for (int i = eigen_len - 1; i >= 0; i--) {
      if (es.eigenvalues()[i] > ZBOUND && r < r_ub) {
        this->mixChiSq.addLambda(es.eigenvalues()[i]);
        r++;
      } else
        break;
    }
    // calculate p-value
    this->pValue = this->mixChiSq.getPvalue(this->Q);
    if (this->pValue == 0.0 || this->pValue == 1.0) {
      this->pValue = this->mixChiSq.getLiuPvalue(this->Q);
    }
    return 0;
  };