int Cluster_DPeaks::ChoosePointsAutomatically() { // Right now all density values are discrete. Try to choose outliers at each // value for which there is density.; /* // For each point, calculate average distance (X,Y) to points in next and // previous density values. const double dens_cut = 3.0 * 3.0; const double dist_cut = 1.32 * 1.32; for (Carray::const_iterator point0 = Points_.begin(); point0 != Points_.end(); ++point0) { int Npts = 0; for (Carray::const_iterator point1 = Points_.begin(); point1 != Points_.end(); ++point1) { if (point0 != point1) { // Only do this for close densities double dX = (double)(point0->PointsWithinEps() - point1->PointsWithinEps()); double dX2 = dX * dX; double dY = (point0->Dist() - point1->Dist()); double dY2 = dY * dY; if (dX2 < dens_cut && dY2 < dist_cut) { Npts++; } } } mprintf("%i %i %i\n", point0->PointsWithinEps(), point0->Fnum()+1, Npts); } */ /* CpptrajFile tempOut; tempOut.OpenWrite("temp.dat"); int currentDensity = -1; double distAv = 0.0; double distSD = 0.0; double sumWts = 0.0; int nValues = 0; Carray::const_iterator lastPoint = Points_.end() + 1; for (Carray::const_iterator point = Points_.begin(); point != lastPoint; ++point) { if (point == Points_.end() || point->PointsWithinEps() != currentDensity) { if (nValues > 0) { distAv = distAv / sumWts; //(double)nValues; distSD = (distSD / sumWts) - (distAv * distAv); if (distSD > 0.0) distSD = sqrt(distSD); else distSD = 0.0; //mprintf("Density %i: %i values Avg= %g SD= %g SumWts= %g\n", currentDensity, // nValues, distAv, distSD, sumWts); tempOut.Printf("%i %g\n", currentDensity, distAv); } if (point == Points_.end()) break; currentDensity = point->PointsWithinEps(); distAv = 0.0; distSD = 0.0; sumWts = 0.0; nValues = 0; } double wt = exp(point->Dist()); double dval = point->Dist() * wt; sumWts += wt; distAv += dval; distSD += (dval * dval); nValues++; } tempOut.CloseFile(); */ // BEGIN CALCULATING WEIGHTED DISTANCE AVERAGE CpptrajFile tempOut; tempOut.OpenWrite("temp.dat"); DataSet_Mesh weightedAverage; Carray::const_iterator cp = Points_.begin(); // Skip local density of 0. //while (cp->PointsWithinEps() == 0 && cp != Points_.end()) ++cp; while (cp != Points_.end()) { int densityVal = cp->PointsWithinEps(); Carray densityArray; // Add all points of current density. while (cp->PointsWithinEps() == densityVal && cp != Points_.end()) densityArray.push_back( *(cp++) ); mprintf("Density value %i has %zu points.\n", densityVal, densityArray.size()); // Sort array by distance std::sort(densityArray.begin(), densityArray.end(), Cpoint::dist_sort()); // Take the average of the points weighted by their position. double wtDistAv = 0.0; double sumWts = 0.0; //std::vector<double> weights; //weights.reserve( densityArray.size() ); int maxPt = (int)densityArray.size() - 1; for (int ip = 0; ip != (int)densityArray.size(); ++ip) { double wt = exp( (double)(ip - maxPt) ); //mprintf("\t%10i %10u %10u %10g\n", densityVal, ip, maxPt, wt); wtDistAv += (densityArray[ip].Dist() * wt); sumWts += wt; //weights.push_back( wt ); } wtDistAv /= sumWts; // Calculate the weighted sample variance //double distSD = 0.0; //for (unsigned int ip = 0; ip != densityArray.size(); ++ip) { // double diff = densityArray[ip].Dist() - wtDistAv; // distSD += weights[ip] * (diff * diff); //} //distSD /= sumWts; weightedAverage.AddXY(densityVal, wtDistAv); //tempOut.Printf("%i %g %g %g\n", densityVal, wtDistAv, sqrt(distSD), sumWts); tempOut.Printf("%i %g %g\n", densityVal, wtDistAv, sumWts); /* // Find the median. double median, Q1, Q3; if (densityArray.size() == 1) { median = densityArray[0].Dist(); Q1 = median; Q3 = median; } else { unsigned int q3_beg; unsigned int med_idx = densityArray.size() / 2; // Always 0 <= Q1 < med_idx if ((densityArray.size() % 2) == 0) { median = (densityArray[med_idx].Dist() + densityArray[med_idx-1].Dist()) / 2.0; q3_beg = med_idx; } else { median = densityArray[med_idx].Dist(); q3_beg = med_idx + 1; } if (densityArray.size() == 2) { Q1 = densityArray[0].Dist(); Q3 = densityArray[1].Dist(); } else { // Find lower quartile unsigned int q1_idx = med_idx / 2; if ((med_idx % 2) == 0) Q1 = (densityArray[q1_idx].Dist() + densityArray[q1_idx-1].Dist()) / 2.0; else Q1 = densityArray[q1_idx].Dist(); // Find upper quartile unsigned int q3_size = densityArray.size() - q3_beg; unsigned int q3_idx = (q3_size / 2) + q3_beg; if ((q3_size %2) == 0) Q3 = (densityArray[q3_idx].Dist() + densityArray[q3_idx-1].Dist()) / 2.0; else Q3 = densityArray[q3_idx].Dist(); } } mprintf("\tMedian dist value is %g. Q1= %g Q3= %g\n", median, Q1, Q3); */ } tempOut.CloseFile(); // END CALCULATING WEIGHTED DISTANCE AVERAGE /* // TEST tempOut.OpenWrite("temp2.dat"); std::vector<double> Hist( Points_.back().PointsWithinEps()+1, 0.0 ); int gWidth = 3; double cval = 3.0; double two_c_squared = 2.0 * cval * cval; mprintf("DBG: cval= %g, Gaussian denominator is %g\n", cval, two_c_squared); for (int wtIdx = 0; wtIdx != (int)weightedAverage.Size(); wtIdx++) { int bval = weightedAverage.X(wtIdx); for (int xval = std::max(bval - gWidth, 0); xval != std::min(bval + gWidth + 1, (int)Hist.size()); xval++) { // a: height (weighted average) // b: center (density value) // c: width // x: density value in histogram //int xval = weightedAverage.X(idx); //double bval = weightedAverage.X(wtIdx); //double bval = (double)wtIdx; double diff = (double)(xval - bval); //Hist[xval] += (weightedAverage.Y(wtIdx) * exp( -( (diff * diff) / two_c_squared ) )); Hist[xval] = std::max(Hist[xval], weightedAverage.Y(wtIdx) * exp( -( (diff * diff) / two_c_squared ) )); } } for (unsigned int idx = 0; idx != Hist.size(); idx++) tempOut.Printf("%u %g\n", idx, Hist[idx]); tempOut.CloseFile(); // END TEST */ /* // TEST // Construct best-fit line segments tempOut.OpenWrite("temp2.dat"); double slope, intercept, correl; int segment_length = 3; DataSet_Mesh Segment; Segment.Allocate1D( segment_length ); for (int wtIdx = 0; wtIdx != (int)weightedAverage.Size(); wtIdx++) { Segment.Clear(); for (int idx = std::max(wtIdx - 1, 0); // TODO: use segment_length idx != std::min(wtIdx + 2, (int)weightedAverage.Size()); idx++) Segment.AddXY(weightedAverage.X(idx), weightedAverage.Y(idx)); Segment.LinearRegression(slope, intercept, correl, true); for (int idx = std::max(wtIdx - 1, 0); // TODO: use segment_length idx != std::min(wtIdx + 2, (int)weightedAverage.Size()); idx++) { double x = weightedAverage.X(idx); double y = slope * x + intercept; tempOut.Printf("%g %g %i\n", x, y, weightedAverage.X(wtIdx)); } } tempOut.CloseFile(); // END TEST */ // BEGIN WEIGHTED RUNNING AVG/SD OF DISTANCES // For each point, determine if it is greater than the average of the // weighted average distances of the previous, current, and next densities. int width = 2; int currentDensity = 0; int wtIdx = 0; double currentAvg = 0.0; double deltaSD = 0.0; double deltaAv = 0.0; int Ndelta = 0; CpptrajFile raOut; if (!rafile_.empty()) raOut.OpenWrite(rafile_); CpptrajFile raDelta; if (!radelta_.empty()) raDelta.OpenWrite(radelta_); std::vector<unsigned int> candidateIdxs; std::vector<double> candidateDeltas; cp = Points_.begin(); // Skip over points with zero density while (cp != Points_.end() && cp->PointsWithinEps() == 0) ++cp; while (weightedAverage.X(wtIdx) != cp->PointsWithinEps() && wtIdx < (int)Points_.size()) ++wtIdx; for (Carray::const_iterator point = cp; point != Points_.end(); ++point) { if (point->PointsWithinEps() != currentDensity) { //currentAvg = weightedAverage.Y(wtIdx); // New density value. Determine average. currentAvg = 0.0; // unsigned int Npt = 0; double currentWt = 0.0; for (int idx = std::max(wtIdx - width, 0); idx != std::min(wtIdx + width + 1, (int)weightedAverage.Size()); idx++) { //currentAvg += weightedAverage.Y(idx); //Npt++; double wt = weightedAverage.Y(idx); currentAvg += (weightedAverage.Y(idx) * wt); currentWt += wt; } //currentAvg /= (double)Npt; currentAvg /= currentWt; //smoothAv += currentAvg; //smoothSD += (currentAvg * currentAvg); //Nsmooth++; currentDensity = point->PointsWithinEps(); if (raOut.IsOpen()) raOut.Printf("%i %g %g\n", currentDensity, currentAvg, weightedAverage.Y(wtIdx)); wtIdx++; } double delta = (point->Dist() - currentAvg); if (delta > 0.0) { //delta *= log((double)currentDensity); if (raDelta.IsOpen()) raDelta.Printf("%8i %8.3f %8i %8.3f %8.3f\n", currentDensity, delta, point->Fnum()+1, point->Dist(), currentAvg); candidateIdxs.push_back( point - Points_.begin() ); candidateDeltas.push_back( delta ); deltaAv += delta; deltaSD += (delta * delta); Ndelta++; } } raOut.CloseFile(); deltaAv /= (double)Ndelta; deltaSD = (deltaSD / (double)Ndelta) - (deltaAv * deltaAv); if (deltaSD > 0.0) deltaSD = sqrt(deltaSD); else deltaSD = 0.0; if (raDelta.IsOpen()) raDelta.Printf("#DeltaAvg= %g DeltaSD= %g\n", deltaAv, deltaSD); raDelta.CloseFile(); int cnum = 0; for (unsigned int i = 0; i != candidateIdxs.size(); i++) { if (candidateDeltas[i] > (deltaSD)) { Points_[candidateIdxs[i]].SetCluster( cnum++ ); mprintf("\tPoint %u (frame %i, density %i) selected as candidate for cluster %i\n", candidateIdxs[i], Points_[candidateIdxs[i]].Fnum()+1, Points_[candidateIdxs[i]].PointsWithinEps(), cnum-1); } } // END WEIGHTED AVG/SD OF DISTANCES /* // Currently doing this by calculating the running average of density vs // distance, then choosing points with distance > twice the SD of the // running average. // NOTE: Store in a mesh data set for now in case we want to spline etc later. if (avg_factor_ < 1) avg_factor_ = 10; unsigned int window_size = Points_.size() / (unsigned int)avg_factor_; mprintf("\tRunning avg window size is %u\n", window_size); // FIXME: Handle case where window_size < frames DataSet_Mesh runavg; unsigned int ra_size = Points_.size() - window_size + 1; runavg.Allocate1D( ra_size ); double dwindow = (double)window_size; double sumx = 0.0; double sumy = 0.0; for (unsigned int i = 0; i < window_size; i++) { sumx += (double)Points_[i].PointsWithinEps(); sumy += Points_[i].Dist(); } runavg.AddXY( sumx / dwindow, sumy / dwindow ); for (unsigned int i = 1; i < ra_size; i++) { unsigned int nextwin = i + window_size - 1; unsigned int prevwin = i - 1; sumx = (double)Points_[nextwin].PointsWithinEps() - (double)Points_[prevwin].PointsWithinEps() + sumx; sumy = Points_[nextwin].Dist() - Points_[prevwin].Dist() + sumy; runavg.AddXY( sumx / dwindow, sumy / dwindow ); } // Write running average if (!rafile_.empty()) { CpptrajFile raOut; if (raOut.OpenWrite(rafile_)) mprinterr("Error: Could not open running avg file '%s' for write.\n", rafile_.c_str()); else { for (unsigned int i = 0; i != runavg.Size(); i++) raOut.Printf("%g %g\n", runavg.X(i), runavg.Y(i)); raOut.CloseFile(); } } double ra_sd; double ra_avg = runavg.Avg( ra_sd ); // Double stdev to use as cutoff for findning anomalously high peaks. ra_sd *= 2.0; mprintf("\tAvg of running avg set is %g, SD*2.0 (delta cutoff) is %g\n", ra_avg, ra_sd); // For each point in density vs distance plot, determine which running // average point is closest. If the difference between the point and the // running average point is > 2.0 the SD of the running average data, // consider it a 'peak'. CpptrajFile raDelta; if (!radelta_.empty()) raDelta.OpenWrite("radelta.dat"); if (raDelta.IsOpen()) raDelta.Printf("%-10s %10s %10s\n", "#Frame", "RnAvgPos", "Delta"); unsigned int ra_position = 0; // Position in the runavg DataSet unsigned int ra_end = runavg.Size() - 1; int cnum = 0; for (Carray::iterator point = Points_.begin(); point != Points_.end(); ++point) { if (ra_position != ra_end) { // Is the next running avgd point closer to this point? while (ra_position != ra_end) { double dens = (double)point->PointsWithinEps(); double diff0 = fabs( dens - runavg.X(ra_position ) ); double diff1 = fabs( dens - runavg.X(ra_position+1) ); if (diff1 < diff0) ++ra_position; // Next running avg position is closer for this point. else break; // This position is closer. } } double delta = point->Dist() - runavg.Y(ra_position); if (raDelta.IsOpen()) raDelta.Printf("%-10i %10u %10g", point->Fnum()+1, ra_position, delta); if (delta > ra_sd) { if (raDelta.IsOpen()) raDelta.Printf(" POTENTIAL CLUSTER %i", cnum); point->SetCluster(cnum++); } if (raDelta.IsOpen()) raDelta.Printf("\n"); } raDelta.CloseFile(); */ return cnum; }
/** Given the structure of a molecule and its normal mode vibrational * frequencies this routine uses standard statistical mechanical * formulas for an ideal gas (in the canonical ensemble, see, * for example, d. a. mcquarrie, "statistical thermodynamics", * harper & row, new york, 1973, chapters 5, 6, and 8) to compute * the entropy, heat capacity, and internal energy. * The si system of units is used internally. Conversion to units * more familiar to most chemists is made for output. * * \param outfile output file, should already be open. * \param natoms Number of atoms * \param nvecs Number of eigenvectors (already converted to frequencies) * \param crd coordinates in Angstroms * \param amass atomic weights, in amu. * \param freq vibrational frequencies, in cm**-1 and in ascending order * \param temp temperature * \param patm pressure, in atmospheres */ void thermo( CpptrajFile& outfile, int natoms, int nvecs, int ilevel, const double* crd, const double* amass, const double* freq, double temp, double patm) { // pmom principal moments of inertia, in amu-bohr**2 and in ascending order. double pmom[3], rtemp, rtemp1, rtemp2, rtemp3; // ----- Constants ------------------- const double thresh = 900.0; // vibrational frequency threshold const double tokg = 1.660531e-27; // kilograms per amu. const double boltz = 1.380622e-23; // boltzman constant, in joules per kelvin. const double planck = 6.626196e-34; // planck constant, in joule-seconds. const double avog = 6.022169e+23; // avogadro constant, in mol**(-1). const double jpcal = 4.18674e+00; // joules per calorie. const double tomet = 1.0e-10; // metres per Angstrom. const double hartre = 4.35981e-18; // joules per hartree. const double pstd = 1.01325e+05; // Standard pressure in pascals // ----------------------------------- // compute the gas constant, pi, pi**2, and e. // compute the conversion factors cal per joule and kcal per joule. const double gas = avog * boltz; // pi = four * datan(one) const double pipi = PI * PI; const double e = exp(1.0); const double tocal = 1.0 / jpcal; const double tokcal = tocal / 1000.0; if (!outfile.IsOpen()) { mprinterr("Internal Error: thermo: output file is not open.\n"); return; } // print the temperature and pressure. outfile.Printf("\n *******************\n"); outfile.Printf( " - Thermochemistry -\n"); outfile.Printf( " *******************\n\n"); outfile.Printf("\n temperature %9.3f kelvin\n pressure %9.5f atm\n",temp,patm); double pressure = pstd * patm; double rt = gas * temp; // compute and print the molecular mass in amu, then convert to // kilograms. double weight = 0.0; for (int iat = 0; iat < natoms; ++iat) weight += amass[iat]; outfile.Printf(" molecular mass (principal isotopes) %11.5f amu\n", weight); weight *= tokg; //trap non-unit multiplicities. //if (multip != 1) { // outfile.Printf("\n Warning-- assumptions made about the electronic partition function\n"); // outfile.Printf( " are not valid for multiplets!\n\n"); //} // compute contributions due to translation: // etran-- internal energy // ctran-- constant v heat capacity // stran-- entropy double dum1 = boltz * temp; double dum2 = pow(TWOPI, 1.5); double arg = pow(dum1, 1.5) / planck; arg = (arg / pressure) * (dum1 / planck); arg = arg * dum2 * (weight / planck); arg = arg * sqrt(weight) * exp(2.5); double stran = gas * log(arg); double etran = 1.5 * rt; double ctran = 1.5 * gas; // Compute contributions due to electronic motion: // It is assumed that the first electronic excitation energy // is much greater than kt and that the ground state has a // degeneracy of one. Under these conditions the electronic // partition function can be considered to be unity. The // ground electronic state is taken to be the zero of // electronic energy. // for monatomics print and return. if (natoms <= 1){ outfile.Printf("\n internal energy: %10.3f joule/mol %10.3f kcal/mol\n", etran, etran * tokcal); outfile.Printf( " entropy: %10.3f joule/k-mol %10.3f cal/k-mol\n", stran, stran * tocal); outfile.Printf( " heat capacity cv: %10.3f joule/k-mol %10.3f cal/k-mol\n", ctran, ctran * tocal); return; } // Allocate workspace memory // vtemp vibrational temperatures, in kelvin. // evibn contribution to e from the vibration n. // cvibn contribution to cv from the vibration n. // svibn contribution to s from the vibration n. double* WorkSpace = new double[ 4 * nvecs ]; double* vtemp = WorkSpace; double* evibn = WorkSpace + nvecs; double* cvibn = WorkSpace + nvecs*2; double* svibn = WorkSpace + nvecs*3; // compute contributions due to rotation. // Compute the principal moments of inertia, get the rotational // symmetry number, see if the molecule is linear, and compute // the rotational temperatures. Note the imbedded conversion // of the moments to SI units. MomentOfInertia( natoms, crd, amass, pmom ); outfile.Printf("\n principal moments of inertia (nuclei only) in amu-A**2:\n"); outfile.Printf( " %12.2f%12.2f%12.2f\n", pmom[0], pmom[1], pmom[2]); bool linear = false; // Symmetry number: only for linear molecules. for others symmetry number is unity double sn = 1.0; if (natoms <= 2) { linear = true; if (amass[0] == amass[1]) sn = 2.0; } outfile.Printf("\n rotational symmetry number %3.0f\n", sn); double con = planck / (boltz*8.0*pipi); con = (con / tokg) * (planck / (tomet*tomet)); if (linear) { rtemp = con / pmom[2]; if (rtemp < 0.2) { outfile.Printf("\n Warning-- assumption of classical behavior for rotation\n"); outfile.Printf( " may cause significant error\n"); } outfile.Printf("\n rotational temperature (kelvin) %12.5f\n", rtemp); } else { rtemp1 = con / pmom[0]; rtemp2 = con / pmom[1]; rtemp3 = con / pmom[2]; if (rtemp1 < 0.2) { outfile.Printf("\n Warning-- assumption of classical behavior for rotation\n"); outfile.Printf( " may cause significant error\n"); } outfile.Printf("\n rotational temperatures (kelvin) %12.5f%12.5f%12.5f\n", rtemp1, rtemp2, rtemp3); } // erot-- rotational contribution to internal energy. // crot-- rotational contribution to cv. // srot-- rotational contribution to entropy. double erot, crot, srot; if (linear) { erot = rt; crot = gas; arg = (temp/rtemp) * (e/sn); srot = gas * log(arg); } else { erot = 1.5 * rt; crot = 1.5 * gas; arg = sqrt(PI*e*e*e) / sn; double dum = (temp/rtemp1) * (temp/rtemp2) * (temp/rtemp3); arg = arg * sqrt(dum); srot = gas * log(arg); } // compute contributions due to vibration. // compute vibrational temperatures and zero point vibrational // energy. only real frequencies are included in the analysis. // ndof = 3*natoms - 6 - nimag // if (nimag .ne. 0) write(iout,1210) nimag // if (linear) ndof = ndof + 1 int ndof = nvecs; // (---iff is the first frequency to include in thermo:) int iff; if (ilevel != 0) iff = 0; else if (linear) iff = 5; else iff = 6; con = planck / boltz; double ezpe = 0.0; for (int i = 0; i < ndof; ++i) { vtemp[i] = freq[i+iff] * con * 3.0e10; ezpe += freq[i+iff] * 3.0e10; } ezpe = 0.5 * planck * ezpe; outfile.Printf("\n zero point vibrational energy %12.1f (joules/mol) \n",ezpe * avog); outfile.Printf( " %12.5f (kcal/mol)\n",ezpe * tokcal * avog); outfile.Printf( " %12.7f (hartree/particle)\n", ezpe / hartre); // compute the number of vibrations for which more than 5% of an // assembly of molecules would exist in vibrational excited states. // special printing for these modes is done to allow the user to // easily take internal rotations into account. the criterion // corresponds roughly to a low frequency of 1.9(10**13) hz, or // 625 cm**(-1), or a vibrational temperature of 900 k. int lofreq = 0; for (int i = 0; i < ndof; ++i) if (vtemp[i] < thresh) ++lofreq; if (lofreq != 0) { outfile.Printf("\n Warning-- %3i vibrations have low frequencies and may represent hindered \n", lofreq); outfile.Printf( " internal rotations. The contributions printed below assume that these \n"); outfile.Printf( " really are vibrations.\n"); } // compute: // evib-- the vibrational component of the internal energy. // cvib-- the vibrational component of the heat capacity. // svib-- the vibrational component of the entropy. double evib = 0.0; double cvib = 0.0; double svib = 0.0; double scont; for (int i = 0; i < ndof; ++i) { // compute some common factors. double tovt = vtemp[i] / temp; double etovt = exp(tovt); double em1 = etovt - 1.0; // compute contributions due to the i'th vibration. double econt = tovt * (0.5 + 1.0/em1); double ccont = etovt * pow(tovt/em1,2.0); double argd = 1.0 - 1.0/etovt; if (argd > 1.0e-7) scont = tovt/em1 - log(argd); else { scont = 0.0; outfile.Printf(" warning: setting vibrational entropy to zero for mode %i with vtemp = %f\n", i+1, vtemp[i]); } // if (lofreq .ge. i) then evibn[i] = econt * rt; cvibn[i] = ccont * gas; svibn[i] = scont * gas; // end if evib += econt; cvib += ccont; svib += scont; } evib *= rt; cvib *= gas; svib *= gas; // the units are now: // e-- joules/mol // c-- joules/mol-kelvin // s-- joules/mol-kelvin double etot = etran + erot + evib; double ctot = ctran + crot + cvib; double stot = stran + srot + svib; // print the sum of the hartree-fock energy and the thermal energy. // call tread(501,gen,47,1,47,1,0) // esum = gen(32) + etot/avog/hartre // write(iout,1230) esum // convert to the following and print // e-- kcal/mol // c-- cal/mol-kelvin // s-- cal/mol-kelvin etran = etran * tokcal; ctran = ctran * tocal; stran = stran * tocal; erot = erot * tokcal; crot = crot * tocal; srot = srot * tocal; evib = evib * tokcal; cvib = cvib * tocal; svib = svib * tocal; etot = etran + erot + evib; ctot = ctran + crot + cvib; stot = stran + srot + svib; for (int i = 0; i < ndof; ++i) { evibn[i] *= tokcal; cvibn[i] *= tocal; svibn[i] *= tocal; } outfile.Printf("\n\n freq. E Cv S\n"); outfile.Printf( " cm**-1 kcal/mol cal/mol-kelvin cal/mol-kelvin\n"); outfile.Printf( "--------------------------------------------------------------------------------\n"); outfile.Printf( " Total %11.3f %11.3f %11.3f\n",etot,ctot,stot); outfile.Printf( " translational %11.3f %11.3f %11.3f\n",etran,ctran,stran); outfile.Printf( " rotational %11.3f %11.3f %11.3f\n",erot,crot,srot); outfile.Printf( " vibrational %11.3f %11.3f %11.3f\n",evib,cvib,svib); for (int i = 0; i < iff; ++i) outfile.Printf(" %5i%10.3f\n", i+1, freq[i]); for (int i = 0; i < ndof; ++i) { outfile.Printf(" %5i%10.3f %11.3f %11.3f %11.3f\n",i+iff+1, freq[i+iff], evibn[i], cvibn[i], svibn[i]); } delete[] WorkSpace; }