Beispiel #1
0
int MAPGetDir(VecCl &Par,VecCl &MaxStep,MatrCl &DirMat,double ErrorMatr,
              double &CurHi) 
  {
   int Dim=Par.Dim(),DimGr=1;
   if (MAPVar->ExperPoint.Ptr!=NULL) DimGr=MAPVar->ExperPoint.Dim();
   if ((Dim>0) && (DimGr>0))
     {
      int k;//,k1;
      double d;
      MatrCl DatM;
      VecCl  MisV(Dim),Norm(Dim),EigV(Dim);
//      SetGrFunc(DatM,MisV,Par,MaxStep);
//      SetCorelFunc(DatM,MisV,CurHi); // now DatGr(Dim,Dim) correlations only
// MisV - Derivativ of the function
      MAPVar->SetCorrelation(DatM,MisV,CurHi,Par,MaxStep);
      for (k=1;k<=Dim;k++) 
        if ((d=DatM(k,k))>MathZer) Norm[k]=1/sqrt(d);else Norm[k]=0;
      DatM=VectorNormalizeMatr(Norm,DatM,Norm);
      if (!ReduceLQ(DatM,DatM,EigV.Ptr,ErrorMatr)) 
        {cout<<" Cannot Calculate ReduceLQ. \n";return 0;}
      DatM=Transpon(DatM);      
      for (k=1;k<=Dim;k++) 
        if ( (fabs(d=EigV[k]))>MathZer) EigV[k]=1/d;else EigV[k]=0;
      MaxStep=MaxStep*0-VectorNormalizeMatr(EigV,DatM,Norm)*MisV;
//cout<<" MaxStep "<<MaxStep<<"\n";
      DirMat=VectorNormalizeMatr(Norm,Transpon(DatM),Norm*0+1);
   }
  return 1;
 };
void omxCallGREMLFitFunction(omxFitFunction *oo, int want, FitContext *fc){
  if (want & (FF_COMPUTE_PREOPTIMIZE)) return;
  
  //Recompute Expectation:
  omxExpectation* expectation = oo->expectation;
  omxExpectationCompute(expectation, NULL);
    
  omxGREMLFitState *gff = (omxGREMLFitState*)oo->argStruct; //<--Cast generic omxFitFunction to omxGREMLFitState
  
  //Ensure that the pointer in the GREML fitfunction is directed at the right FreeVarGroup
  //(not necessary for most compute plans):
  if(fc && gff->varGroup != fc->varGroup){
    gff->buildParamMap(fc->varGroup);
	}
  
  //Declare local variables used in more than one scope in this function:
  const double Scale = fabs(Global->llScale); //<--absolute value of loglikelihood scale
  const double NATLOG_2PI = 1.837877066409345483560659472811;	//<--log(2*pi)
  int i;
  Eigen::Map< Eigen::MatrixXd > Eigy(omxMatrixDataColumnMajor(gff->y), gff->y->cols, 1);
  Eigen::Map< Eigen::MatrixXd > Vinv(omxMatrixDataColumnMajor(gff->invcov), gff->invcov->rows, gff->invcov->cols);
  EigenMatrixAdaptor EigX(gff->X);
  Eigen::MatrixXd P, Py;
  P.setZero(gff->invcov->rows, gff->invcov->cols);
  double logdetV=0, logdetquadX=0;
  
  if(want & (FF_COMPUTE_FIT | FF_COMPUTE_GRADIENT | FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){
    if(gff->usingGREMLExpectation){
      omxGREMLExpectation* oge = (omxGREMLExpectation*)(expectation->argStruct);
      
      //Check that factorizations of V and the quadratic form in X succeeded:
      if(oge->cholV_fail_om->data[0]){
        oo->matrix->data[0] = NA_REAL;
        if (fc) fc->recordIterationError("expected covariance matrix is non-positive-definite");
        return;
      }
      if(oge->cholquadX_fail){
        oo->matrix->data[0] = NA_REAL;
        if (fc) fc->recordIterationError("Cholesky factorization failed; possibly, the matrix of covariates is rank-deficient");
        return;
      }
      
      //Log determinant of V:
      logdetV = oge->logdetV_om->data[0];
      
      //Log determinant of quadX:
      for(i=0; i < gff->X->cols; i++){
        logdetquadX += log(oge->cholquadX_vectorD[i]);
      }
      logdetquadX *= 2;
      gff->REMLcorrection = Scale*0.5*logdetquadX;
      
      //Finish computing fit (negative loglikelihood):
      P.triangularView<Eigen::Lower>() = Vinv.selfadjointView<Eigen::Lower>() * 
        (Eigen::MatrixXd::Identity(Vinv.rows(), Vinv.cols()) - 
          (EigX * oge->quadXinv.selfadjointView<Eigen::Lower>() * oge->XtVinv)); //Vinv * (I-Hatmat)
      Py = P.selfadjointView<Eigen::Lower>() * Eigy;
      oo->matrix->data[0] = gff->REMLcorrection + Scale*0.5*( (((double)gff->y->cols) * NATLOG_2PI) + logdetV + ( Eigy.transpose() * Py )(0,0));
      gff->nll = oo->matrix->data[0]; 
    }
    else{ //If not using GREML expectation, deal with means and cov in a general way to compute fit...
      //Declare locals:
      EigenMatrixAdaptor yhat(gff->means);
      EigenMatrixAdaptor EigV(gff->cov);
      double logdetV=0, logdetquadX=0;
      Eigen::MatrixXd Vinv, quadX;
      Eigen::LLT< Eigen::MatrixXd > cholV(gff->cov->rows);
      Eigen::LLT< Eigen::MatrixXd > cholquadX(gff->X->cols);
      Eigen::VectorXd cholV_vectorD, cholquadX_vectorD;
      
      //Cholesky factorization of V:
      cholV.compute(EigV);
      if(cholV.info() != Eigen::Success){
        omxRaiseErrorf("expected covariance matrix is non-positive-definite");
        oo->matrix->data[0] = NA_REAL;
        return;
      }
      //Log determinant of V:
      cholV_vectorD = (( Eigen::MatrixXd )(cholV.matrixL())).diagonal();
      for(i=0; i < gff->X->rows; i++){
        logdetV += log(cholV_vectorD[i]);
      }
      logdetV *= 2;
      
      Vinv = cholV.solve(Eigen::MatrixXd::Identity( EigV.rows(), EigV.cols() )); //<-- V inverse
      
      quadX = EigX.transpose() * Vinv * EigX; //<--Quadratic form in X
      
      cholquadX.compute(quadX); //<--Cholesky factorization of quadX
      if(cholquadX.info() != Eigen::Success){
        omxRaiseErrorf("Cholesky factorization failed; possibly, the matrix of covariates is rank-deficient");
        oo->matrix->data[0] = NA_REAL;
        return;
      }
      cholquadX_vectorD = (( Eigen::MatrixXd )(cholquadX.matrixL())).diagonal();
      for(i=0; i < gff->X->cols; i++){
        logdetquadX += log(cholquadX_vectorD[i]);
      }
      logdetquadX *= 2;
      gff->REMLcorrection = Scale*0.5*logdetquadX;
      
      //Finish computing fit:
      oo->matrix->data[0] = gff->REMLcorrection + Scale*0.5*( ((double)gff->y->rows * NATLOG_2PI) + logdetV + 
        ( Eigy.transpose() * Vinv * (Eigy - yhat) )(0,0));
      gff->nll = oo->matrix->data[0]; 
      return;
    }
  }
  
  if(want & (FF_COMPUTE_GRADIENT | FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){
    //This part requires GREML expectation:
    omxGREMLExpectation* oge = (omxGREMLExpectation*)(expectation->argStruct);
    
    //Declare local variables for this scope:
    int numChildren = fc->childList.size();
    int __attribute__((unused)) parallelism = (numChildren == 0) ? 1 : numChildren;
    
    fc->grad.resize(gff->dVlength); //<--Resize gradient in FitContext
    
    //Set up new HessianBlock:
    HessianBlock *hb = new HessianBlock;
    if(want & (FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){
      hb->vars.resize(gff->dVlength);
      hb->mat.resize(gff->dVlength, gff->dVlength);
    }
    
    //Begin looping thru free parameters:
#pragma omp parallel for num_threads(parallelism) 
    for(i=0; i < gff->dVlength; i++){
    	//Declare locals within parallelized region:
    	int j=0, t1=0, t2=0;
    	Eigen::MatrixXd PdV_dtheta1;
    	Eigen::MatrixXd dV_dtheta1(Eigy.rows(), Eigy.rows()); //<--Derivative of V w/r/t parameter i.
    	Eigen::MatrixXd dV_dtheta2(Eigy.rows(), Eigy.rows()); //<--Derivative of V w/r/t parameter j.
      t1 = gff->gradMap[i]; //<--Parameter number for parameter i.
      if(t1 < 0){continue;}
      if(want & (FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){hb->vars[i] = t1;}
      if( oge->numcases2drop ){
        dropCasesAndEigenize(gff->dV[i], dV_dtheta1, oge->numcases2drop, oge->dropcase, 1);
      }
      else{dV_dtheta1 = Eigen::Map< Eigen::MatrixXd >(omxMatrixDataColumnMajor(gff->dV[i]), gff->dV[i]->rows, gff->dV[i]->cols);}
      //PdV_dtheta1 = P.selfadjointView<Eigen::Lower>() * dV_dtheta1.selfadjointView<Eigen::Lower>();
      PdV_dtheta1 = P.selfadjointView<Eigen::Lower>();
      PdV_dtheta1 = PdV_dtheta1 * dV_dtheta1.selfadjointView<Eigen::Lower>();
      for(j=i; j < gff->dVlength; j++){
        if(j==i){
          gff->gradient(t1) = Scale*0.5*(PdV_dtheta1.trace() - (Eigy.transpose() * PdV_dtheta1 * Py)(0,0));
          fc->grad(t1) += gff->gradient(t1);
          if(want & (FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){
            gff->avgInfo(t1,t1) = Scale*0.5*(Eigy.transpose() * PdV_dtheta1 * PdV_dtheta1 * Py)(0,0);
          }
        }
        else{if(want & (FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){
          t2 = gff->gradMap[j]; //<--Parameter number for parameter j.
          if(t2 < 0){continue;}
          if( oge->numcases2drop ){
            dropCasesAndEigenize(gff->dV[j], dV_dtheta2, oge->numcases2drop, oge->dropcase, 1);
          }
          else{dV_dtheta2 = Eigen::Map< Eigen::MatrixXd >(omxMatrixDataColumnMajor(gff->dV[j]), gff->dV[j]->rows, gff->dV[j]->cols);}
          gff->avgInfo(t1,t2) = Scale*0.5*(Eigy.transpose() * PdV_dtheta1 * P.selfadjointView<Eigen::Lower>() * dV_dtheta2.selfadjointView<Eigen::Lower>() * Py)(0,0);
          gff->avgInfo(t2,t1) = gff->avgInfo(t1,t2);
    }}}}
    //Assign upper triangle elements of avgInfo to the HessianBlock:
    if(want & (FF_COMPUTE_HESSIAN | FF_COMPUTE_IHESSIAN)){
      for (size_t d1=0, h1=0; h1 < gff->dV.size(); ++h1) {
		    for (size_t d2=0, h2=0; h2 <= h1; ++h2) {
				  	hb->mat(d2,d1) = gff->avgInfo(h2,h1);
				    ++d2;
        }
			  ++d1;	
		  }
		  fc->queue(hb);
  }}