Ejemplo n.º 1
0
Analysis::RetType Analysis_Spline::Setup(ArgList& analyzeArgs, DataSetList* datasetlist, DataFileList* DFLin, int debugIn)
{
  std::string setname = analyzeArgs.GetStringKey("name");
  outfile_ = DFLin->AddDataFile(analyzeArgs.GetStringKey("out"), analyzeArgs);
  meshsize_ = analyzeArgs.getKeyInt("meshsize", 0);
  meshfactor_ = -1.0;
  if (meshsize_ < 3) {
    meshfactor_ = analyzeArgs.getKeyDouble("meshfactor", -1.0);
    if (meshfactor_ < Constants::SMALL) {
      mprinterr("Error: Either meshsize must be specified and > 2, or meshfactor must be\n"
                "Error:   specified and > 0.0\n");
      return Analysis::ERR;
    }
  }
  if (analyzeArgs.Contains("meshmin")) {
    meshmin_ = analyzeArgs.getKeyDouble("meshmin", 0.0);
    useDefaultMin_ = true;
  } else
    useDefaultMin_ = false;
  if (analyzeArgs.Contains("meshmax")) {
    meshmax_ = analyzeArgs.getKeyDouble("meshmax", -1.0);
    useDefaultMax_ = true;
  } else
    useDefaultMax_ = false;
  if (useDefaultMin_ && useDefaultMax_ && meshmax_ < meshmin_) {
    mprinterr("Error: meshmax must be > meshmin\n");
    return Analysis::ERR;
  }
  // Select datasets from remaining args
  if (input_dsets_.AddSetsFromArgs( analyzeArgs.RemainingArgs(), *datasetlist )) {
    mprinterr("Error: Could not add data sets.\n");
    return Analysis::ERR;
  }
  if (input_dsets_.empty()) {
    mprinterr("Error: No input data sets.\n");
    return Analysis::ERR;
  }

  // Set up output datasets
  Dimension Xdim( meshmin_, (meshmax_ - meshmin_) / (double)meshsize_ );
  for (Array1D::const_iterator dsIn = input_dsets_.begin();
                               dsIn != input_dsets_.end(); ++dsIn)
  {
    DataSet* ds = datasetlist->AddSet(DataSet::XYMESH, setname, "Spline");
    if (ds == 0) return Analysis::ERR;
    ds->SetLegend( "Spline(" + (*dsIn)->Meta().Legend() + ")" );
    // TODO: Set individually based on input_dsets_
    ds->SetDim(Dimension::X, Xdim);
    if (outfile_ != 0) outfile_->AddDataSet( ds );
    output_dsets_.push_back( (DataSet_Mesh*)ds );
  }

  mprintf("    SPLINE: Applying cubic splining to %u data sets\n", input_dsets_.size());
  if (meshfactor_ < 0)
    mprintf("\tMesh size= %i\n", meshsize_);
  else
    mprintf("\tMesh size will be input set size multiplied by %f\n", meshfactor_);
  if (useDefaultMin_)
    mprintf("\tMesh min= %f,", meshmin_);
  else
    mprintf("\tMesh min will be input set min,");
  if (useDefaultMax_)
    mprintf(" Mesh max= %f\n", meshmax_);
  else
    mprintf(" Mesh max will be input set max.\n");
  if (outfile_ != 0) {
    if (!setname.empty())
      mprintf("\tOutput set name: %s\n", setname.c_str());
    mprintf("\tOutfile name: %s\n", outfile_->DataFilename().base());
  }
  //for (Array1D::const_iterator set = input_dsets_.begin(); set != input_dsets_.end(); ++set)
  //  mprintf("\t%s\n", (*set)->legend());
  return Analysis::OK;
}
Ejemplo n.º 2
0
// Analysis_Timecorr::Analyze()
Analysis::RetType Analysis_Timecorr::Analyze() {
  // If 2 vectors, ensure they have the same # of frames
  if (vinfo2_!=0) {
    if (vinfo1_->Size() != vinfo2_->Size()) {
      mprinterr("Error: # Frames in vec %s (%i) != # Frames in vec %s (%i)\n",
                vinfo1_->legend(), vinfo1_->Size(), 
                vinfo2_->legend(), vinfo2_->Size());
      return Analysis::ERR;
    }
  }
  // Determine sizes
  int frame = vinfo1_->Size();
  int time = (int)(tcorr_ / tstep_) + 1;
  // nsteps
  int nsteps = 0;
  if (time > frame)
    nsteps = frame;
  else
    nsteps = time;
  // Allocate memory to hold complex numbers for direct or FFT
  if (drct_) {
    data1_.Allocate( frame );
    if (mode_ == CROSSCORR)
      data2_.Allocate( frame ); 
    corfdir_.Allocate( nsteps ); 
  } else {
    // Initialize FFT
    pubfft_.Allocate( frame );
    data1_ = pubfft_.Array();
    if (mode_ == CROSSCORR)
      data2_ = data1_;
  }
  // ----- Calculate spherical harmonics ---------
  // Real + Img. for each -order <= m <= order
  if (vinfo1_->CalcSphericalHarmonics(order_)) return Analysis::ERR;
  if (vinfo2_ != 0) {
    if (vinfo2_->CalcSphericalHarmonics(order_)) return Analysis::ERR;
  }
  // ----- Initialize PN output array memory -----
  DataSet_double& pncf_ = static_cast<DataSet_double&>( *tc_p_ );
  pncf_.Resize( nsteps );
  Dimension Xdim(0.0, tstep_, nsteps, "Time");
  pncf_.SetDim(Dimension::X, Xdim);
  // ----- Calculate PN --------------------------
  for (int midx = -order_; midx <= order_; ++midx) {
    data1_.Assign( vinfo1_->SphericalHarmonics( midx ) );
    if (vinfo2_ != 0)
      data2_.Assign( vinfo2_->SphericalHarmonics( midx ) );
    CalcCorr( frame );
    for (int k = 0; k < nsteps; ++k)
      pncf_[k] += data1_[2 * k];
  }
  // ----- Dipolar Calc. -------------------------
  AvgResults Avg1, Avg2;
  if (dplr_) {
    DataSet_double& cf_ = static_cast<DataSet_double&>( *tc_c_ );
    cf_.Resize( nsteps );
    cf_.SetDim(Dimension::X, Xdim);
    DataSet_double& rcf_ = static_cast<DataSet_double&>( *tc_r3r3_ );
    rcf_.Resize( nsteps );
    rcf_.SetDim(Dimension::X, Xdim);
    // Calculate averages
    std::vector<double> R3i_1 = CalculateAverages(*vinfo1_, Avg1);
    std::vector<double> R3i_2;
    if (vinfo2_ != 0)
      R3i_2 = CalculateAverages(*vinfo2_, Avg2);
    // C
    for (int midx = -order_; midx <= order_; ++midx) {
      data1_.Assign( vinfo1_->SphericalHarmonics( midx ) );
      if (vinfo2_ != 0)
        data2_.Assign( vinfo2_->SphericalHarmonics( midx ) );
      for (int i = 0, i2 = 0; i < frame; ++i, i2 += 2) {
        double r3i = R3i_1[ i ]; 
        data1_[i2  ] *= r3i;
        data1_[i2+1] *= r3i;
        if ( vinfo2_ != 0 ) {
          r3i = R3i_2[ i ];
          data2_[i2  ] *= r3i;
          data2_[i2+1] *= r3i;
        }
      }
      CalcCorr( frame );
      for (int k = 0; k < nsteps; ++k) 
        cf_[k] += data1_[2 * k];
    }
    // 1 / R^6
    for (int i = 0, i2 = 0; i < frame; ++i, i2 += 2) {
      data1_[i2  ] = R3i_1[ i ];
      data1_[i2+1] = 0.0;
      if ( vinfo2_ != 0 ) {
        data2_[i2  ] = R3i_2[ i ];
        data2_[i2+1] = 0.0;
      }
    }
    CalcCorr( frame );
    for (int k = 0; k < nsteps; ++k)
      rcf_[k] = data1_[2 * k];
  }
  // ----- NORMALIZATION -------------------------
  // 4*PI / ((2*order)+1) due to spherical harmonics addition theorem
  double KN = DataSet_Vector::SphericalHarmonicsNorm( order_ );
  Normalize( tc_p_,    frame, KN );
  if (dplr_) {
    Normalize( tc_c_,    frame, KN );
    Normalize( tc_r3r3_, frame, 1.0 );
  }
  // ----- PRINT PTRAJ FORMAT --------------------
  if (outfile_ != 0) { 
    outfile_->Printf("%ss, normal type\n",ModeString[mode_]);
    if (dplr_) {
      outfile_->Printf("***** Vector length *****\n");
      outfile_->Printf("%10s %10s %10s %10s\n", "<r>", "<rrig>", "<1/r^3>", "<1/r^6>");
      outfile_->Printf("%10.4f %10.4f %10.4f %10.4f\n",
                     Avg1.rave_, Avg1.avgr_, Avg1.r3iave_, Avg1.r6iave_);
      if (mode_ == CROSSCORR)
        outfile_->Printf("%10.4f %10.4f %10.4f %10.4f\n",
                       Avg2.rave_, Avg2.avgr_, Avg2.r3iave_, Avg2.r6iave_);
    }
    if (ptrajformat_) {
      outfile_->Printf("\n***** Correlation functions *****\n");
      if (dplr_) {
        DataSet_double& cf_ = static_cast<DataSet_double&>( *tc_c_ );
        DataSet_double& rcf_ = static_cast<DataSet_double&>( *tc_r3r3_ );
        outfile_->Printf("%10s %10s %10s %10s\n", "Time", "<C>", Plegend_[order_], "<1/(r^3*r^3)>");
        for (int i = 0; i < nsteps; ++i)
          outfile_->Printf("%10.3f %10.4f %10.4f %10.4f\n", (double)i * tstep_,
                         cf_[i], pncf_[i], rcf_[i]);
      } else {
        outfile_->Printf("%10s %10s\n", "Time", Plegend_[order_]);
        for (int i = 0; i < nsteps; ++i)
          outfile_->Printf("%10.3f %10.4f\n", (double)i * tstep_, pncf_[i]);
      }
    }
  }
  return Analysis::OK;
}
Ejemplo n.º 3
0
/** Do histogram normalization. */
void Action_Density::PrintHist()
{
  // Determine if area scaling should occur
  const unsigned int SMALL = 1.0;

  double avgArea = area_.mean();
  double sdArea  = sqrt(area_.variance());
  bool scale_area = (property_ == ELECTRON && avgArea > SMALL);

  mprintf("    DENSITY: The average box area in %c/%c is %.2f Angstrom (sd = %.2f).\n",
          area_coord_[0] + 88, area_coord_[1] + 88, avgArea, sdArea);

  if (scale_area)
    mprintf("The electron density will be scaled by this area.\n");

  // Loop over all histograms. Find lowest and highest bin idx out of all
  // histograms. All output histograms will have the same dimensions.
  long int lowest_idx = 0;
  long int highest_idx = 0;
  for (unsigned int idx = 0; idx != histograms_.size(); idx++) 
  {
    HistType const& hist = histograms_[idx];
    if (hist.empty()) {
      mprintf("Warning: Histogram for '%s' is empty; skipping.\n", masks_[idx].MaskString());
      continue;
    }
    // Find lowest bin
    if (idx == 0) {
      lowest_idx  = hist.lowestKey();
      highest_idx = hist.highestKey();
    } else {
      lowest_idx  = std::min(lowest_idx,  hist.lowestKey());
      highest_idx = std::max(highest_idx, hist.highestKey());
    }
  }
  // If using center of bins, put blank bins at either end.
  double xshift = 0.0;
  if (binType_ == CENTER) {
    lowest_idx--;
    highest_idx++;
    xshift = delta_ / 2;
  }
  // Set up common output dimensions
  long int Nbins = (highest_idx - lowest_idx + 1);
  long int offset = -lowest_idx;
  double Xmin = (delta_ * lowest_idx) + xshift;
  //mprintf("DEBUG: Lowest idx= %li  Xmin= %g  Highest idx= %li  Bins= %li\n",
  //        lowest_idx, Xmin, highest_idx, Nbins);
  Dimension Xdim(Xmin, delta_, AxisStr_[axis_]);
  // Loop over all histograms. Normalize and populate output sets.
  for (unsigned int idx = 0; idx != histograms_.size(); idx++) 
  {
    HistType const& hist = histograms_[idx];
    if (hist.empty()) {
      continue;
    }
    // Calculate normalization
    double fac   = delta_;
    double sdfac = 1.0;
    if (scale_area) {
      fac *= avgArea;
      sdfac = 1.0 / avgArea;
    }
    fac = 1.0 / fac;
    // Populate output data sets
    DataSet_1D& out_av = static_cast<DataSet_1D&>( *(AvSets_[idx]) );
    DataSet_1D& out_sd = static_cast<DataSet_1D&>( *(SdSets_[idx]) );
    out_av.Allocate(DataSet::SizeArray(1, Nbins));
    out_sd.Allocate(DataSet::SizeArray(1, Nbins));
    out_av.SetDim(Dimension::X, Xdim);
    out_sd.SetDim(Dimension::X, Xdim);
    // Blank bin for bin center
    if (binType_ == CENTER) {
      double dzero = 0.0;
      out_av.Add(0, &dzero);
      out_sd.Add(0, &dzero);
    }
    // Loop over populated bins
    HistType::const_iterator var = hist.variance_begin();
    for (HistType::const_iterator mean = hist.mean_begin();
                                  mean != hist.mean_end(); ++mean, ++var)
    {
      long int frm = mean->first + offset;
      double density  = mean->second * fac;
      out_av.Add(frm, &density);
      double variance;
      if (hist.nData() < 2)
        variance = 0;
      else
        variance = var->second / (hist.nData() - 1);
      if (variance > 0) {
        variance = sqrt(variance);
        variance *= sdfac;
      }
      out_sd.Add(frm, &variance);
    }
    // Blank bin for bin center
    if (binType_ == CENTER) {
      double dzero = 0.0;
      out_av.Add(Nbins-1, &dzero);
      out_sd.Add(Nbins-1, &dzero);
    }
  } // END loop over all histograms
}
Ejemplo n.º 4
0
// Action_DSSP::Print()
void Action_DSSP::Print() {
  if (dsetname_.empty()) return;
  // Try not to print empty residues. Find the minimum and maximum residue
  // for which there is data. Output res nums start from 1.
  int min_res = -1;
  int max_res = -1;
  for (int resi = 0; resi != (int)SecStruct_.size(); resi++) {
    if (SecStruct_[resi].resDataSet != 0) {
      if (min_res < 0) min_res = resi;
      if (resi > max_res) max_res = resi;
    }
  }
  if (min_res < 0 || max_res < min_res) {
    mprinterr("Error: No residues have SS data.\n");
    return;
  }
  // Calculate average of each SS type across all residues.
  if (dsspFile_ != 0) {
    std::vector<DataSet*> dsspData_(NSSTYPE);
    Dimension Xdim( min_res + 1, 1, "Residue" );
    MetaData md(dsetname_, "avgss", MetaData::NOT_TS);
    // Set up a dataset for each SS type. TODO: NONE type?
    for (int ss = 1; ss < NSSTYPE; ss++) {
      md.SetIdx(ss);
      md.SetLegend( SSname[ss] );
      dsspData_[ss] = Init_.DSL().AddSet(DataSet::DOUBLE, md);
      dsspData_[ss]->SetDim(Dimension::X, Xdim);
      dsspFile_->AddDataSet( dsspData_[ss] ); 
    }
    
    // Calc the avg SS type for each residue that has data.
    int idx = 0; 
    for (int resi = min_res; resi < max_res+1; resi++) {
      if (SecStruct_[resi].resDataSet != 0) {
        for (int ss = 1; ss < NSSTYPE; ss++) {
          double avg = (double)SecStruct_[resi].SSprob[ss];
          avg /= (double)Nframe_;
          dsspData_[ss]->Add(idx, &avg);
        }
        ++idx;
      }
    }
  }
  // Print out SS assignment like PDB
  if (assignout_ != 0) {
      int total = 0;
      int startRes = -1;
      std::string resLine, ssLine;
      for (int resi = min_res; resi < max_res+1; resi++) {
        if (startRes == -1) startRes = resi;
        // Convert residue name.
        resLine += Residue::ConvertResName( SecStruct_[resi].resDataSet->Meta().Legend() );
        // Figure out which SS element is dominant for res if selected
        if (SecStruct_[resi].resDataSet != 0) {
          int dominantType = 0;
          int ssmax = 0;
          for (int ss = 0; ss < NSSTYPE; ss++) {
            if ( SecStruct_[resi].SSprob[ss] > ssmax ) {
              ssmax = SecStruct_[resi].SSprob[ss];
              dominantType = ss;
            }
          }
          ssLine += dssp_char[dominantType];
        } else
          ssLine += '-';
        total++;
        if ((total % 50) == 0 || resi == max_res) {
          assignout_->Printf("%-8i %s\n", startRes+1, resLine.c_str());
          assignout_->Printf("%8s %s\n\n", " ", ssLine.c_str());
          startRes = -1;
          resLine.clear();
          ssLine.clear();
        } else if ((total % 10) == 0) {
          resLine += ' '; 
          ssLine += ' ';
        }
      }
  }
}
Ejemplo n.º 5
0
//TODO: Deal with vectors
Analysis::RetType Analysis_FFT::Analyze() {
  // Ensure input data sets have the same number of points.
  size_t maxsize_ = 0;
  std::vector<bool> skipSet(input_dsets_.size(), true);
  std::vector<bool>::iterator skip = skipSet.begin();
  for (Array1D::const_iterator DS = input_dsets_.begin();
                               DS != input_dsets_.end(); 
                             ++DS, ++skip)
  {
    // Check for empty set
    if ( (*DS)->Empty() ) {
      mprintf("Warning: Set %s is empty, skipping.\n", (*DS)->legend() );
      continue;
    }
    if ( maxsize_ == 0 )
      maxsize_ = (*DS)->Size();
    else if ( (*DS)->Size() != maxsize_ ) {
      mprintf("Warning: Set %s does not have same size (%u) as initial set (%u). Skipping.\n",
              (*DS)->legend(), (*DS)->Size(), maxsize_ );
      continue;
    }
    *skip = false;
  }
  // Setup FFT
  PubFFT pubfft;
  pubfft.SetupFFTforN( maxsize_ );
  //mprintf("DEBUG: FFT size is %i\n",pubfft.size());
  // Set up complex number array
  ComplexArray data1( pubfft.size() );

  double sr = 1.0 / dt_;              // 1 / sampling interval, sampling rate (freq)
  double fnyquist = sr / 2.0;         // Nyquist frequency
  double total_time = dt_ * (double)maxsize_; // Total time (fundamental period)
  double f0 = 1.0 / total_time;       // Fundamental frequency (first harmonic)
  Dimension Xdim(0.0, f0, "Freq.");
  mprintf("\tReporting FFT magnitude, normalized by N/2.\n"
          "\tOnly data up to the Nyquist frequency will be used.\n");
  mprintf("\tSampling rate= %f ps^-1, Nyquist freq.= %f ps^-1\n", sr, fnyquist);
  mprintf("\tPoints= %zu, Fundamental period= %f ps, fundamental freq.= %f ps^-1\n", 
          maxsize_, total_time, f0);
  double norm = (double)maxsize_ / 2;

  skip = skipSet.begin(); 
  Array1D::const_iterator dsout = output_dsets_.begin();
  for (Array1D::const_iterator DS = input_dsets_.begin(); 
                               DS != input_dsets_.end();
                             ++DS, ++dsout, ++skip)
  {
    if (*skip) continue;
    mprintf("\t\tCalculating FFT for set %s\n", (*DS)->legend());
    // Reset data1 so it is padded with zeros
    data1.PadWithZero(0);
    // Place data from DS in real spots in data1
    int datasize =  (*DS)->Size();
    for (int i = 0; i < datasize; ++i)
      data1[i*2] = ((DataSet_1D*)(*DS))->Dval(i);
    // DEBUG
    //for (int i = 0; i < pubfft.size()*2; i+=2)
    //  mprintf("\t\t\t%i FFTinR=%f  FFTinI=%f\n",i/2,data1[i],data1[i+1]);
    // Perform FFT
    pubfft.Forward( data1 );
    // Place real data from FFT in output Data up to the Nyquist frequency
    int i2 = 0;
    for (int i1 = 0; i1 < datasize; ++i1) {
     double freq = i1 * f0;
     if (freq > fnyquist) break;
     double magnitude = sqrt(data1[i2]*data1[i2] + data1[i2+1]*data1[i2+1]);
     magnitude /= norm;
     //mprintf("\t\t\tReal=%f  Img=%f  Mag=%f\n",data1[i2],data1[i2+1],magnitude);
     (*dsout)->Add( i1, &magnitude );
     i2 += 2;
    }
    (*dsout)->SetDim(Dimension::X, Xdim);
  }
  return Analysis::OK;
}
Ejemplo n.º 6
0
// Action_Density::Print()
void Action_Density::Print()
{
  const unsigned int SMALL = 1.0;

  long minus_minidx = 0, minus_maxidx = 0, plus_minidx = 0, plus_maxidx = 0;
  double density, sd, area;

  std::map<long,double>::iterator first_idx, last_idx;
  statmap curr;

  area = area_.mean();
  sd = sqrt(area_.variance());
  bool scale_area = (property_ == ELECTRON && area > SMALL);

  mprintf("    DENSITY: The average box area in %c/%c is %.2f Angstrom (sd = %.2f).\n",
          area_coord_[0] + 88, area_coord_[1] + 88, area, sd);

  if (scale_area)
    mprintf("The electron density will be scaled by this area.\n");

  // the search for minimum and maximum indices relies on ordered map
  for (unsigned long i = 0; i < minus_histograms_.size(); i++) {
    first_idx = minus_histograms_[i].mean_begin(); 
    last_idx = minus_histograms_[i].mean_end();

    if (first_idx->first < minus_minidx)
      minus_minidx = first_idx->first;

    if (last_idx != first_idx) {
      last_idx--;
      if (last_idx->first > minus_maxidx)
        minus_maxidx = last_idx->first;
    }
  }

  for (unsigned long i = 0; i < plus_histograms_.size(); i++) {
    first_idx = plus_histograms_[i].mean_begin(); 
    last_idx = plus_histograms_[i].mean_end();

    if (first_idx->first < plus_minidx)
      plus_minidx = first_idx->first;

    if (last_idx != first_idx) {
      last_idx--;
      if (last_idx->first > plus_maxidx)
        plus_maxidx = last_idx->first;
    }
  }

  // make sure we have zero values at beginning and end as this
  // "correctly" integrates the histogram
  minus_minidx--;
  plus_maxidx++;

  // Set up data set dimensions
  double Xmin = -delta_ + ((double) minus_minidx + 0.5) * delta_;
  Dimension Xdim(Xmin, delta_, AxisStr_[axis_]);
  for (unsigned int j = 0; j != AvSets_.size(); j++) {
    AvSets_[j]->SetDim(Dimension::X, Xdim);
    SdSets_[j]->SetDim(Dimension::X, Xdim);
  }
  unsigned int didx = 0;
  for (long i = minus_minidx; i <= minus_maxidx; i++, didx++) {

    for (unsigned long j = 0; j < minus_histograms_.size(); j++) {
      curr = minus_histograms_[j];

      density = curr.mean(i) / delta_;
      sd = sqrt(curr.variance(i) );

      if (scale_area) {
        density /= area;
        sd /= area;
      }

      AvSets_[j]->Add( didx, &density );
      SdSets_[j]->Add( didx, &sd      );
    }

  }

  for (long i = plus_minidx; i <= plus_maxidx; i++, didx++) {

    for (unsigned long j = 0; j < plus_histograms_.size(); j++) {
      curr = plus_histograms_[j];

      density = curr.mean(i) / delta_;
      sd = sqrt(curr.variance(i) );

      if (scale_area) {
        density /= area;
        sd /= area;
      }

      AvSets_[j]->Add( didx, &density );
      SdSets_[j]->Add( didx, &sd      );
    }

  }
}