int main(const int argc, const char* const argv[]) { if (argc != 2) { std::cerr << err << "Exactly one inputs is " "required, the name of the file containing the list of " "clause positions. The Dimacs file should be given on standard input.\n"; return error_parameters; } std::ifstream f_in(argv[1]); if (not f_in) { std::cerr << err << "Failure opening file " << argv[1] << ".\n"; return error_openfile; } typedef OKlib::InputOutput::CLSAdaptorFilter<> CLSAdaptorFilter; CLSAdaptorFilter::clause_index_container_type clause_index; while (not f_in.eof()) { while ( (((char)f_in.peek() > '9') or ((char)f_in.peek() < '0')) and ((char)f_in.peek() != '-') and not f_in.eof()) { f_in.seekg(1,f_in.cur); } CLSAdaptorFilter::int_type i; f_in >> i; if (not f_in.fail() and (i > 0)) clause_index.insert(i); } f_in.close(); CLSAdaptorFilter::cls_adaptor_type output(std::cout); CLSAdaptorFilter filter(clause_index, output); OKlib::InputOutput::StandardDIMACSInput<CLSAdaptorFilter>(std::cin, filter); }
void caesar_cipher::decryption( const std::string &key, const std::string &in, const std::string &out ) { std::ifstream f_in(in, std::ifstream::in); std::ofstream f_out; if (out == "") { std::string out_file = std::string(DEC_PREF) + in; f_out.open(out_file, std::ifstream::out); } else f_out.open(out, std::ofstream::out); int keyPos = 0; char keyByte = 0; char dataByte = 0; while (f_in.get(dataByte)) { keyByte = key[keyPos]; dataByte = DecryptByte(dataByte, keyByte); f_out << dataByte; keyPos = (++keyPos) % key.length(); } f_in.close(); f_out.close(); }
int main(int argc,char* argv[]) { vector<Diary> my_diary; string str; Diary diary; Diary *p; string date; ifstream f_in(fpath); while (!f_in.eof()) { getline(f_in,date); diary.SetDate(date); diary.Reset(); string str; getline(f_in,str); while (str!=".") { diary.AddLine(str); getline(f_in,str); } my_diary.push_back(diary); } f_in.close(); date=argv[1]; vector<Diary>::size_type i,ii,Size; Size=my_diary.size(); for (i=0;i<Size;i++) if (my_diary[i].GetDate()==date) break; if (i==Size) cout<<"There is not such a diary which date is "<<date<<"."<<endl; else { for (ii=i;ii<Size-1;ii++) my_diary[ii] = my_diary[ii+1]; my_diary.pop_back(); ofstream f_out(fpath); for (i=0;i<my_diary.size();i++) { f_out<<my_diary[i].GetDate()<<endl; vector<string>::size_type j; for (j=0;j<my_diary[i].Size();j++) f_out<<my_diary[i].GetText(j)<<endl; f_out<<"."<<endl; } f_out.close(); return 1; } return 0; }
int main(const int argc, const char* const argv[]) { typedef CLSAdaptorAppend<> AdaptorAppend; typedef AdaptorAppend::output_cls_adaptor_type OutputAdaptor; OutputAdaptor output(std::cout); AdaptorAppend append_cls(output); for (int i = 1; i < argc; ++i) { std::ifstream f_in(argv[i]); if (not f_in) { std::cerr << err << "Failure opening file " << argv[i] << ".\n"; return error_openfile; } OKlib::InputOutput::StandardDIMACSInput<AdaptorAppend>(f_in, append_cls); } append_cls.flush(); }
std::vector<SVMData> read_data(std::string file_name){ std::vector<SVMData> output; std::ifstream f_in(file_name, std::ios_base::in); if(!f_in.good()) return output; char line[256]; while(!f_in.eof() && f_in.getline(line, 256)){ SVMData svmData; if (analyze_features(line, svmData)) output.push_back(svmData); } return output; }
// 参数的合法性检验 void ContentParse(const string &scene_file, ostream &out) { string line; ifstream f_in(scene_file); if(!f_in) { perror("scene_file open error: "); exit(EXIT_FAILURE); } istringstream buffer; string command; // 控制变量 bool independent_mirror = false; // 持久变量(跨循环使用) Material material; vector<Vector> vertices; // 辅助变量(免除循环内定义的变量) Vector vec_general; int v1, v2, v3, verts_num; //Triangle triangle; //Sphere sphere; Light light; double degrees; // 计数变量 int count = 0; cout << '\n'; while(std::getline(f_in, line)) { ++count; cout << "\r\tline " << count << "..."; //out << line << '\n'; if(0 == line.size() || '#' == line.at(0)) { continue; } buffer.clear(); // 不知道为什么,一定要加这个,否则camera那行就会读不进去 buffer.str(line); buffer >> command; if("size" == command) { buffer >> G_WIDTH >> G_HEIGHT; } else if("maxdepth" == command)
GaloisFieldDict GaloisFieldDict::_gf_pow_pnm1d2(const GaloisFieldDict &f, const integer_class &n, const std::vector<GaloisFieldDict> &b) const { GaloisFieldDict f_in(f); f_in %= *this; GaloisFieldDict h, r; h = r = f_in; for (unsigned i = 1; i < n; ++i) { h = h.gf_frobenius_map(*this, b); r *= h; r %= *this; } auto res = gf_pow_mod(r, (modulo_ - 1_z) / 2_z); return res; }
void main( int argc, char *argv[] ) { if (argc!=4) { cout<<" argc "<<argc<<" \nUsage \n"<<argv[0] <<" File_X_col File_To_Build_Spline File_To_Write\n" <<" File_To_Build_Spline: NumX_Grid 50 Misfit 0.1+ data\n"; exit(1); }; char tmp[256]; double Misf,Mis; int NumX; fstream f_out(argv[3],ios::out); TData<real> *in=NULL,*in1=NULL,*out=NULL; cout<<" coreleft "<<coreleft()<<"\n"; InputTDataF(argv[1],in); cout<<" NC "<<in->N<<" NR "<<in->I[0]<<"\n"; InputTDataF(argv[2],in1); cout<<" NC "<<in1->N<<" NR "<<in1->I[0]<<"\n"; fstream f_in(argv[2],ios::in);f_in>>tmp>>NumX>>tmp>>Misf;f_in.close(); DataRegister("TDataF",out); int NC=in1->N,NR=in->I[0]; int *I=new int[in1->N];for (int k=0;k<NC;k++) I[k]=NR; out->SetDim(NC,I);movmem(in->D[0],out->D[0],sizeof(out->D[0][0])*NR); cout<<" NC "<<NC<<" NR "<<NR<<"\n"; CurveSpline *S=new CurveSpline[NC-1]; int n=NumX; Mis=Misf; // TData<double> **S=new TData<double>*[NC-1]; for (k=1;k<NC;k++) {S[k-1].Generate(NumX,Misf,0,*in1,0,k);NumX=n;Misf=Mis;} cout<<" Constructed\n"; cout<<" coreleft "<<coreleft()<<"\n"; // for (k=1;k<NR;k++) {cout<<out->D[0][k]<<"\n";} for (k=1;k<NC;k++) {S[k-1].Evaluate(*out,0,k);} OutputTDataF(f_out,*out); delete in;delete in1;delete out;delete I; // for (k=0;k<NC-1;k++) delete S[k]; delete S; f_out.close(); cout<<" coreleft "<<coreleft()<<"\n"; };
/*//////////////////////////////////////////////////////////////// Parse genomic coordinates file, return a map of vectors of intervals grouped by type of data stream ////////////////////////////////////////////////////////////////*/ unordered_map<string,shared_ptr<vector<TrueGenomicInterval>>> parseGenomicIntervals(string const & fname) { ifstream f_in(fname); check_file_open(f_in, fname); string line; unordered_map<string, shared_ptr<vector<TrueGenomicInterval>> > map; while ( getline(f_in, line) ) { auto space = line.find(' '); string suffix = line.substr(0, space); if (map.find(suffix) == map.end()) { shared_ptr<vector<TrueGenomicInterval>> intervals(new vector<TrueGenomicInterval>()); map[suffix] = intervals; } auto second_space = line.find(' ', space + 1); unsigned long num_alignments = stoul(line.substr(space + 1, second_space)); map[suffix]->emplace_back( line.substr(second_space + 1), num_alignments ); } f_in.close(); return map; }
void MakePlots(TString filename, TString energy="8TeV", TString lumi=""){ TString outDir=filename; outDir.ReplaceAll("fitres","img"); outDir="tmp/"; //std::map<TString, TH2F *> deltaNLL_map; /*------------------------------ Plotto */ TCanvas *c = new TCanvas("c","c"); TFile f_in(filename, "read"); if(f_in.IsZombie()){ std::cerr << "File opening error: " << filename << std::endl; return; } TList *KeyList = f_in.GetListOfKeys(); std::cout << KeyList->GetEntries() << std::endl; for(int i =0; i < KeyList->GetEntries(); i++){ c->Clear(); TKey *key = (TKey *)KeyList->At(i); if(TString(key->GetClassName())!="RooDataSet") continue; RooDataSet *dataset = (RooDataSet *) key->ReadObj(); TString constTermName = dataset->GetName(); TString alphaName=constTermName; alphaName.ReplaceAll("constTerm","alpha"); TTree *tree = dataset2tree(dataset); TGraphErrors bestFit_ = bestFit(tree, alphaName, constTermName); TH2F *hist = prof2d(tree, alphaName, constTermName, "nll", "(12,-0.0005,0.0115,29,-0.0025,0.1425)",true); // // deltaNLL_map.insert(std::pair <TString, TH2F *>(keyName,hist)); hist->SaveAs(outDir+"/deltaNLL-"+constTermName+".root"); hist->Draw("colz"); bestFit_.Draw("P same"); bestFit_.SetMarkerSize(2); Int_t iBinX, iBinY; Double_t x,y; hist->GetBinWithContent2(0,iBinX,iBinY); x= hist->GetXaxis()->GetBinCenter(iBinX); y= hist->GetYaxis()->GetBinCenter(iBinY); TGraph nllBestFit(1,&x,&y); nllBestFit.SetMarkerStyle(3); nllBestFit.SetMarkerColor(kRed); TList* contour68 = contourFromTH2(hist, 0.68); hist->Draw("colz"); hist->GetZaxis()->SetRangeUser(0,50); bestFit_.Draw("P same"); nllBestFit.Draw("P same"); //contour68->Draw("same"); c->SaveAs(outDir+"/deltaNLL-"+constTermName+".png"); hist->SaveAs("tmp/hist-"+constTermName+".root"); nllBestFit.SaveAs("tmp/nllBestFit.root"); contour68->SaveAs("tmp/contour68.root"); delete hist; hist = prof2d(tree, alphaName, constTermName, "nll", "(12,-0.0005,0.0115,29,-0.0025,0.1425)"); RooHistPdf *histPdf = nllToL(hist); delete hist; RooDataSet *gen_dataset=histPdf->generate(*histPdf->getVariables(),1000000,kTRUE,kFALSE); TTree *genTree = dataset2tree(gen_dataset); genTree->SaveAs("tmp/genTree-"+constTermName+".root"); delete gen_dataset; delete histPdf; TGraphErrors toyGraph = g(genTree, constTermName); TGraphErrors bestFitGraph = g(tree,alphaName, constTermName); TGraphErrors bestFitScanGraph = g(y, x); delete genTree; delete tree; toyGraph.SetFillColor(kGreen); toyGraph.SetLineColor(kBlue); toyGraph.SetLineStyle(2); bestFitGraph.SetLineColor(kBlack); bestFitScanGraph.SetLineColor(kRed); bestFitScanGraph.SetLineWidth(2); TMultiGraph g_multi("multigraph",""); g_multi.Add(&toyGraph,"L3"); g_multi.Add(&toyGraph,"L"); g_multi.Add(&bestFitGraph, "L"); g_multi.Add(&bestFitScanGraph, "L"); g_multi.Draw("A"); c->Clear(); g_multi.Draw("A"); c->SaveAs(outDir+"/smearing_vs_energy-"+constTermName+".png"); // TPaveText *pv = new TPaveText(0.7,0.7,1, 0.8); // TLegend *legend = new TLegend(0.7,0.8,0.95,0.92); // legend->SetFillStyle(3001); // legend->SetFillColor(1); // legend->SetTextFont(22); // 132 // legend->SetTextSize(0.04); // l'ho preso mettendo i punti con l'editor e poi ho ricavato il valore con il metodo GetTextSize() // // legend->SetFillColor(0); // colore di riempimento bianco // legend->SetMargin(0.4); // percentuale della larghezza del simbolo // SetLegendStyle(legend); //Plot(c, data,mc,mcSmeared,legend, region, filename, energy, lumi); } f_in.Close(); return; }
void RKIntegratorInternal::init(){ // Call the base class init IntegratorInternal::init(); casadi_assert_message(nq_==0, "Quadratures not supported."); // Number of finite elements int nk = getOption("number_of_finite_elements"); // Interpolation order int deg = getOption("interpolation_order"); casadi_assert_message(deg==1, "Not implemented"); // Expand f? bool expand_f = getOption("expand_f"); // Size of the finite elements double h = (tf_-t0_)/nk; // MX version of the same MX h_mx = h; // Initial state MX Y0("Y0",nx_); // Free parameters MX P("P",np_); // Current state MX Y = Y0; // Dummy time MX T = 0; // Integrate until the end for(int k=0; k<nk; ++k){ // Call the ode right hand side function vector<MX> f_in(DAE_NUM_IN); f_in[DAE_T] = T; f_in[DAE_X] = Y; f_in[DAE_P] = P; vector<MX> f_out = f_.call(f_in); MX ode_rhs = f_out[DAE_ODE]; // Explicit Euler step Y += h_mx*ode_rhs; } // Create a function which returns the state at the end of the time horizon vector<MX> yf_in(2); yf_in[0] = Y0; yf_in[1] = P; MXFunction yf_fun(yf_in,Y); // Should the function be expanded in elementary operations? if(expand_f){ yf_fun.init(); yf_fun_ = SXFunction(yf_fun); } else { yf_fun_ = yf_fun; } // Set number of derivative directions yf_fun_.setOption("number_of_fwd_dir",getOption("number_of_fwd_dir")); yf_fun_.setOption("number_of_adj_dir",getOption("number_of_adj_dir")); // Initialize function yf_fun_.init(); }
void CollocationIntegratorInternal::init(){ // Call the base class init IntegratorInternal::init(); // Legendre collocation points double legendre_points[][6] = { {0}, {0,0.500000}, {0,0.211325,0.788675}, {0,0.112702,0.500000,0.887298}, {0,0.069432,0.330009,0.669991,0.930568}, {0,0.046910,0.230765,0.500000,0.769235,0.953090}}; // Radau collocation points double radau_points[][6] = { {0}, {0,1.000000}, {0,0.333333,1.000000}, {0,0.155051,0.644949,1.000000}, {0,0.088588,0.409467,0.787659,1.000000}, {0,0.057104,0.276843,0.583590,0.860240,1.000000}}; // Read options bool use_radau; if(getOption("collocation_scheme")=="radau"){ use_radau = true; } else if(getOption("collocation_scheme")=="legendre"){ use_radau = false; } // Hotstart? hotstart_ = getOption("hotstart"); // Number of finite elements int nk = getOption("number_of_finite_elements"); // Interpolation order int deg = getOption("interpolation_order"); // Assume explicit ODE bool explicit_ode = f_.input(DAE_XDOT).size()==0; // All collocation time points double* tau_root = use_radau ? radau_points[deg] : legendre_points[deg]; // Size of the finite elements double h = (tf_-t0_)/nk; // MX version of the same MX h_mx = h; // Coefficients of the collocation equation vector<vector<MX> > C(deg+1,vector<MX>(deg+1)); // Coefficients of the continuity equation vector<MX> D(deg+1); // Collocation point SXMatrix tau = ssym("tau"); // For all collocation points for(int j=0; j<deg+1; ++j){ // Construct Lagrange polynomials to get the polynomial basis at the collocation point SXMatrix L = 1; for(int j2=0; j2<deg+1; ++j2){ if(j2 != j){ L *= (tau-tau_root[j2])/(tau_root[j]-tau_root[j2]); } } SXFunction lfcn(tau,L); lfcn.init(); // Evaluate the polynomial at the final time to get the coefficients of the continuity equation lfcn.setInput(1.0); lfcn.evaluate(); D[j] = lfcn.output(); // Evaluate the time derivative of the polynomial at all collocation points to get the coefficients of the continuity equation for(int j2=0; j2<deg+1; ++j2){ lfcn.setInput(tau_root[j2]); lfcn.setFwdSeed(1.0); lfcn.evaluate(1,0); C[j][j2] = lfcn.fwdSens(); } } // Initial state MX X0("X0",nx_); // Parameters MX P("P",np_); // Backward state MX RX0("RX0",nrx_); // Backward parameters MX RP("RP",nrp_); // Collocated differential states and algebraic variables int nX = (nk*(deg+1)+1)*(nx_+nrx_); int nZ = nk*deg*(nz_+nrz_); // Unknowns MX V("V",nX+nZ); int offset = 0; // Get collocated states, algebraic variables and times vector<vector<MX> > X(nk+1); vector<vector<MX> > RX(nk+1); vector<vector<MX> > Z(nk); vector<vector<MX> > RZ(nk); coll_time_.resize(nk+1); for(int k=0; k<nk+1; ++k){ // Number of time points int nj = k==nk ? 1 : deg+1; // Allocate differential states expressions at the time points X[k].resize(nj); RX[k].resize(nj); coll_time_[k].resize(nj); // Allocate algebraic variable expressions at the collocation points if(k!=nk){ Z[k].resize(nj-1); RZ[k].resize(nj-1); } // For all time points for(int j=0; j<nj; ++j){ // Get expressions for the differential state X[k][j] = V[range(offset,offset+nx_)]; offset += nx_; RX[k][j] = V[range(offset,offset+nrx_)]; offset += nrx_; // Get the local time coll_time_[k][j] = h*(k + tau_root[j]); // Get expressions for the algebraic variables if(j>0){ Z[k][j-1] = V[range(offset,offset+nz_)]; offset += nz_; RZ[k][j-1] = V[range(offset,offset+nrz_)]; offset += nrz_; } } } // Check offset for consistency casadi_assert(offset==V.size()); // Constraints vector<MX> g; g.reserve(2*(nk+1)); // Quadrature expressions MX QF = MX::zeros(nq_); MX RQF = MX::zeros(nrq_); // Counter int jk = 0; // Add initial condition g.push_back(X[0][0]-X0); // For all finite elements for(int k=0; k<nk; ++k, ++jk){ // For all collocation points for(int j=1; j<deg+1; ++j, ++jk){ // Get the time MX tkj = coll_time_[k][j]; // Get an expression for the state derivative at the collocation point MX xp_jk = 0; for(int j2=0; j2<deg+1; ++j2){ xp_jk += C[j2][j]*X[k][j2]; } // Add collocation equations to the NLP vector<MX> f_in(DAE_NUM_IN); f_in[DAE_T] = tkj; f_in[DAE_P] = P; f_in[DAE_X] = X[k][j]; f_in[DAE_Z] = Z[k][j-1]; vector<MX> f_out; if(explicit_ode){ // Assume equation of the form ydot = f(t,y,p) f_out = f_.call(f_in); g.push_back(h_mx*f_out[DAE_ODE] - xp_jk); } else { // Assume equation of the form 0 = f(t,y,ydot,p) f_in[DAE_XDOT] = xp_jk/h_mx; f_out = f_.call(f_in); g.push_back(f_out[DAE_ODE]); } // Add the algebraic conditions if(nz_>0){ g.push_back(f_out[DAE_ALG]); } // Add the quadrature if(nq_>0){ QF += D[j]*h_mx*f_out[DAE_QUAD]; } // Now for the backward problem if(nrx_>0){ // Get an expression for the state derivative at the collocation point MX rxp_jk = 0; for(int j2=0; j2<deg+1; ++j2){ rxp_jk += C[j2][j]*RX[k][j2]; } // Add collocation equations to the NLP vector<MX> g_in(RDAE_NUM_IN); g_in[RDAE_T] = tkj; g_in[RDAE_X] = X[k][j]; g_in[RDAE_Z] = Z[k][j-1]; g_in[RDAE_P] = P; g_in[RDAE_RP] = RP; g_in[RDAE_RX] = RX[k][j]; g_in[RDAE_RZ] = RZ[k][j-1]; vector<MX> g_out; if(explicit_ode){ // Assume equation of the form xdot = f(t,x,p) g_out = g_.call(g_in); g.push_back(h_mx*g_out[RDAE_ODE] - rxp_jk); } else { // Assume equation of the form 0 = f(t,x,xdot,p) g_in[RDAE_XDOT] = xp_jk/h_mx; g_in[RDAE_RXDOT] = rxp_jk/h_mx; g_out = g_.call(g_in); g.push_back(g_out[RDAE_ODE]); } // Add the algebraic conditions if(nrz_>0){ g.push_back(g_out[RDAE_ALG]); } // Add the backward quadrature if(nrq_>0){ RQF += D[j]*h_mx*g_out[RDAE_QUAD]; } } } // Get an expression for the state at the end of the finite element MX xf_k = 0; for(int j=0; j<deg+1; ++j){ xf_k += D[j]*X[k][j]; } // Add continuity equation to NLP g.push_back(X[k+1][0] - xf_k); if(nrx_>0){ // Get an expression for the state at the end of the finite element MX rxf_k = 0; for(int j=0; j<deg+1; ++j){ rxf_k += D[j]*RX[k][j]; } // Add continuity equation to NLP g.push_back(RX[k+1][0] - rxf_k); } } // Add initial condition for the backward integration if(nrx_>0){ g.push_back(RX[nk][0]-RX0); } // Constraint expression MX gv = vertcat(g); // Make sure that the dimension is consistent with the number of unknowns casadi_assert_message(gv.size()==V.size(),"Implicit function unknowns and equations do not match"); // Nonlinear constraint function input vector<MX> gfcn_in(1+INTEGRATOR_NUM_IN); gfcn_in[0] = V; gfcn_in[1+INTEGRATOR_X0] = X0; gfcn_in[1+INTEGRATOR_P] = P; gfcn_in[1+INTEGRATOR_RX0] = RX0; gfcn_in[1+INTEGRATOR_RP] = RP; vector<MX> gfcn_out(1+INTEGRATOR_NUM_OUT); gfcn_out[0] = gv; gfcn_out[1+INTEGRATOR_XF] = X[nk][0]; gfcn_out[1+INTEGRATOR_QF] = QF; gfcn_out[1+INTEGRATOR_RXF] = RX[0][0]; gfcn_out[1+INTEGRATOR_RQF] = RQF; // Nonlinear constraint function FX gfcn = MXFunction(gfcn_in,gfcn_out); // Expand f? bool expand_f = getOption("expand_f"); if(expand_f){ gfcn.init(); gfcn = SXFunction(shared_cast<MXFunction>(gfcn)); } // Get the NLP creator function implicitFunctionCreator implicit_function_creator = getOption("implicit_solver"); // Allocate an NLP solver implicit_solver_ = implicit_function_creator(gfcn); // Pass options if(hasSetOption("implicit_solver_options")){ const Dictionary& implicit_solver_options = getOption("implicit_solver_options"); implicit_solver_.setOption(implicit_solver_options); } // Initialize the solver implicit_solver_.init(); if(hasSetOption("startup_integrator")){ // Create the linear solver integratorCreator startup_integrator_creator = getOption("startup_integrator"); // Allocate an NLP solver startup_integrator_ = startup_integrator_creator(f_,g_); // Pass options startup_integrator_.setOption("number_of_fwd_dir",0); // not needed startup_integrator_.setOption("number_of_adj_dir",0); // not needed startup_integrator_.setOption("t0",coll_time_.front().front()); startup_integrator_.setOption("tf",coll_time_.back().back()); if(hasSetOption("startup_integrator_options")){ const Dictionary& startup_integrator_options = getOption("startup_integrator_options"); startup_integrator_.setOption(startup_integrator_options); } // Initialize the startup integrator startup_integrator_.init(); } // Mark the system not yet integrated integrated_once_ = false; }
void read_param() { double LAMBDA, BETA, GAMMA; ifstream f_in("parameters"); if (!f_in.good()) { cout << "\ncan't open file parameters to read data!\n"; exit(1); } f_in >> SWEEPS >> THERM >> GAP >> BETA >> GAMMA >> C1 >> C2 >> DT >> READIN >> SWEEPNO; // BETA is Rt (BETA in units of root lambda) // GAMMA is MASS in units of root lambda LAMBDA = 1.0; G = 0.0; // !!! if (D == 4) { KAPPA = 0.5 * NCOLOR / LAMBDA; if ((LX == 1)&&(LY == 1)&&(LZ == 1)&&(T!= 1)) KAPPA = KAPPA*(T*T*T)/(BETA*BETA*BETA); if ((LX == 1)&&(LY == 1)&&(LZ!= 1)&&(T!= 1)) KAPPA = KAPPA*(T*T)/(BETA*BETA); if ((LX == 1)&&(LY!= 1)&&(LZ!= 1)&&(T!= 1)) KAPPA = KAPPA*T/BETA; } if (D == 2) { KAPPA = (NCOLOR * 0.5 * T * T) / (LAMBDA * BETA * BETA); if (LX == 1 && T != 1) KAPPA = KAPPA * T / BETA; } BMASS = GAMMA * BETA / T; TRAJECTORY_LENGTH = (int)(0.5/DT); if (FERMIONS == 1 && NUMLINK == 5) cout << "16 supercharge theory\n"; if (FERMIONS == 1 && NUMLINK == 2) cout << "4 supercharge theory\n"; if (FERMIONS == 0 && NUMLINK == 5) cout << "Quenched 16 supercharge theory\n"; if (FERMIONS == 0 && NUMLINK == 2) cout << "Quenched 4 supercharge theory\n"; cout << "Number of colors " << NCOLOR << "\n"; cout << "Temporal extent " << T << "\n"; if (D == 2) cout << "Spatial extent " << LX << "\n"; if (D == 4) cout << "Spatial extent " << LX << "\t" << LY << "\t" << LZ << "\n"; cout << "Inverse temperature in units of root lambda " << BETA << "\n"; cout << "Lattice Coupling " << KAPPA << "\n"; cout << "Mass in units of root lambda " << GAMMA << "\n"; cout << "Lattice scalar mass squared " << KAPPA*BMASS*BMASS << "\n"; cout << "C1 coeff " << C1 << "\n"; cout << "C2 coeff " << C2 << "\n"; cout << "Coupling to det " << G << "\n"; cout << "Thermalization sweeps " << THERM << "\n"; cout << "Number of sweeps " << SWEEPS << "\n"; cout << "Gap between measurements " << GAP << "\n"; cout << "Time step in leapfrog eqs " << DT << "\n"; cout << "Trajectory length " << TRAJECTORY_LENGTH << "\n"; cout << "Minimax approx degree " << DEGREE << "\n"; cout << "Reading initial config: (1 for yes, 0 for no) " << READIN << "\n"; if (PBC == 1.0) cout << "periodic temporal bc for fermions" << "\n"; else cout << "antiperiodic temporal bc for fermions" << "\n"; srand(random_seed()); setup(); return; }
int main(int argc, char* argv[]) { if (argc != 4) { std::cout << "Aufruf mit Parametern: <französiche Trainigsdaten> <englische Trainingsdaten> <Alignment der Trainingsdaten>\n" << "Folgende Ausgabe: relfreq_f relfreq_e # quellphrase # zielphrase # singlecf singlece # source_to_target target_to_source # unigram-sprachmodell\n"; return 0; } Lexicon flex(french); Lexicon elex(english); PTree<std::pair<int, PTree<int> > > pTree; PTree<unsigned int> eSinglecount; uint eCount = 0; //Gesamtzahl der englischen Wörter std::unordered_map<uint,Wordinfo> ef_pair, fe_pair; //Einzelwortbasierte Übersetzungshäufigkeit von e nach f (und umgekehrt) igzstream f_in(argv[1]), e_in(argv[2]), a_in(argv[3]); std::string f_line, e_line, a_line; while (getline(f_in, f_line) && getline(e_in, e_line)) { /*==========Lies Wörter beider Sätze, sowie zugehörige Alignments aus entsprechenden Dateien aus==========*/ std::string token; std:: istringstream f_ist(f_line), e_ist(e_line); std::vector<std::pair<uint, std::vector<unsigned int> > > f_vec, e_vec; //Speichern alle Wörter jeweiliger Sprache und ihre Alignments //Füge alle französichen Wörter in ein Lexicon ein und schreibe ihre IDs in einen Vektor while(f_ist >> token) { uint id = flex.getWord_or_add(token); std::pair<uint, std::vector<unsigned int> > pair_tmp; pair_tmp.first = id; f_vec.push_back(pair_tmp); } //Füge alle englischen Wörter in ein Lexicon ein und schreibe ihre IDs in einen Vektor while (e_ist >> token) { uint id = elex.getWord_or_add(token); std::pair<uint, std::vector<unsigned int> > pair_tmp; pair_tmp.first = id; e_vec.push_back(pair_tmp); eCount++; } getline(a_in, a_line); //"SEND:" abfangen do { getline(a_in, a_line); if(a_line == "") break; //Alignment eine Satzes zu Ende else { std::istringstream a_ist(a_line); int f_ind, e_ind; std::string s; a_ist >> s >> f_ind >> e_ind; f_vec[f_ind].second.push_back(e_ind); //Speichere einzelnes Alignment in f_vec e_vec[e_ind].second.push_back(f_ind); //Speichere einzelnes Alignment in e_vec uint& f_id = f_vec[f_ind].first, e_id = e_vec[e_ind].first; fe_pair[f_id].pairs[e_id]++; //Paircount für f nach e erhöhen fe_pair[f_id].singlecount++; //Singlecount für f nach e erhöhen ef_pair[e_id].pairs[f_id]++; //Paircount für e nach f erhöhen ef_pair[e_id].singlecount++; //Singlecount für e nach f erhöhen } } while(true); /*==========Beide Sätze liegen nun in Vektoren gespeichert vor, die Alignments jedes Wortes sind in einem Vektor gespeichert========== *==========Führe darauf nun den vorgegebenen Algorithmus aus, um alle Phrasen zu finden und im Präfixbaum zu speichern==========*/ for(unsigned int j1 = 0; j1 < f_vec.size(); j1++) for(unsigned int j2 = j1; j2 < std::min(j1+3,(unsigned int)f_vec.size()); j2++) { //Länge der Quellphrase maximal 3 unsigned int i1, i2; bool set_i = false; //hält mit, ob i1 und i2 gesetzt wurden, oder nicht. for(unsigned int k = j1; k <= j2; k++) if(f_vec[k].second.size() && set_i) { i1 = std::min(i1, f_vec[k].second.front()); //Minimales Alignment innerhalb der Phrase finden => i1 i2 = std::max(i2, f_vec[k].second.back()); //Maximales Alignment innerhalb der Phrase finden => i2 } else if(f_vec[k].second.size() && !(set_i)) { i1 = f_vec[k].second.front(); i2 = f_vec[k].second.back(); set_i = true; } if (set_i){ //leere Phrasen werden nicht geprüft sondern direkt verworfen if(j1 == j2) { //Einzelwortphrasen auf Quellseite werden IMMER extrahiert std::vector<uint> f_vec_tmp, e_vec_tmp; for (unsigned int k = j1; k <= j2; k++) f_vec_tmp.push_back(f_vec[k].first); //Quellphrase in Vektor zusammenstellen for (unsigned int k = i1; k <= i2; k++) e_vec_tmp.push_back(e_vec[k].first); //Zielphrase in Vektor zusammenstellen std::pair<int, PTree<int> > pair_tmp; pair_tmp.first = 0; pTree.traverse(f_vec_tmp,true,pair_tmp)->c.first++; //Quellphrase in Baum einfügen pTree.traverse(f_vec_tmp,false)->c.second.traverse(e_vec_tmp,true,0)->c++; //Zielphrase in "Unter-Baum" einfügen eSinglecount.traverse(e_vec_tmp,true,0)->c++; } else if (i2-i1 < 4) { //Länge der Zielphrase maximal 4 unsigned int j1_test, j2_test; bool set_j_test = false; //hält mit, ob j1_test und j2_test gesetzt wurden, oder nicht for (unsigned int k = i1; k <= i2; k++) if (e_vec[k].second.size() && set_j_test) { j1_test = std::min(j1_test, e_vec[k].second.front()); j2_test = std::max(j2_test, e_vec[k].second.back()); } else if (e_vec[k].second.size() && !(set_j_test)) { j1_test = e_vec[k].second.front(); j2_test = e_vec[k].second.back(); set_j_test = true; } if (set_j_test) //leere Phrasen werden nicht geprüft sondern sofort verworfen if ((j1_test >= j1) && (j2_test <= j2)) { //Phrasen, die den Test bestehen, werden extrahiert std::vector<uint> f_vec_tmp, e_vec_tmp; for (unsigned int k = j1; k <= j2; k++) f_vec_tmp.push_back(f_vec[k].first); for (unsigned int k = i1; k <= i2; k++) e_vec_tmp.push_back(e_vec[k].first); std::pair<int, PTree<int> > pair_tmp; pair_tmp.first = 0; pTree.traverse(f_vec_tmp,true,pair_tmp)->c.first++; //Quellphrase in Baum einfügen pTree.traverse(f_vec_tmp,false)->c.second.traverse(e_vec_tmp,true,0)->c++; //Zielphrase in "Unter-Baum" einfügen eSinglecount.traverse(e_vec_tmp,true,0)->c++; } } } } /*==========Jetzt sind alle erlaubten Phrasen aus diesem Satzpaar im Präfixbaum gespeichert==========*/ /*==========Damit ist die Bearbeitung dieses Satzpaares abgeschlossen und das nächste rückt nach==========*/ } /*==========Nun sind alle erlaubten Phrasen aller Satzpaare - also der gesamten Testdaten - im Präfixbaum gespeichert==========*/ /*==========Im Anschluss muss also der Präfixbaum in eine Phrasentabelle ausgegeben werden==========*/ for (PTree<std::pair<int, PTree<int> > >::iterator itor1 = pTree.begin(); itor1 != pTree.end(); itor1++) { //Durchlaufe den Baum int singlecount_f = (&*itor1) -> c.first; //Zähler für Quellphrase auslesen if (singlecount_f) { std::vector<uint> source_id = (&*itor1) -> phrase();//Quellphrase (in IDs) auslesen std::string source_phrase = ""; for (unsigned int k = 0; k < source_id.size(); k++) //ID-Phrase in Stringphrase umwandeln source_phrase += flex.getString(source_id[k]) + " "; for(PTree<int>::iterator itor2 = (&*itor1) -> c.second.begin(); itor2 != (&*itor1) -> c.second.end(); itor2++) { //Durchlaufe den "Unter-Baum" int paircount = (&*itor2) -> c; //Zähler für Zielphrase auslesen if(paircount != 0) { std::vector<uint> target_id = (&*itor2) -> phrase();//Zielphrase (in IDs) auslesen std::string target_phrase = ""; for (unsigned int k = 0; k < target_id.size(); k++) //ID-Phrase in Stringphrase umwandeln target_phrase += elex.getString(target_id[k]) + " "; double source_to_target = 1; for (unsigned int k = 0; k < target_id.size(); k++) { double sum_stt = 0; for (unsigned int l = 0; l < source_id.size(); l++) { sum_stt += (double) fe_pair[source_id[l]].pairs[target_id[k]] / (double) fe_pair[source_id[l]].singlecount; } source_to_target *= sum_stt / source_id.size(); } source_to_target = -log(source_to_target); double target_to_source = 1; for (unsigned int k = 0; k < source_id.size(); k++) { double sum_tts = 0; for (unsigned int l = 0; l < target_id.size(); l++) { sum_tts += (double) ef_pair[target_id[l]].pairs[source_id[k]] / (double) ef_pair[target_id[l]].singlecount; } target_to_source *= sum_tts / target_id.size(); } target_to_source = -log(target_to_source); uint singlecount_e = eSinglecount.traverse(target_id)->c; double relFreqF = log(singlecount_f) - log(paircount); //Bestimmen der relativen Wahrscheinlichkeit (negativer Logarithmus) double relFreqE = log(singlecount_e) - log(paircount); double unigram = log(eCount) - log(singlecount_e); std::cout << relFreqF << " " << relFreqE << " # " << source_phrase << "# " << target_phrase << "# " << singlecount_f << " "<< singlecount_e << " # " << source_to_target << " " << target_to_source << " # " << unigram << "\n"; //Ausgabe } } } } return 0; }
std::ifstream MyFile::read(const std::string filename) { std::ifstream f_in(filename); return f_in; }
Function simpleIRK(Function f, int N, int order, const std::string& scheme, const std::string& solver, const Dict& solver_options) { // Consistency check casadi_assert_message(N>=1, "Parameter N (number of steps) must be at least 1, but got " << N << "."); casadi_assert_message(f.n_in()==2, "Function must have two inputs: x and p"); casadi_assert_message(f.n_out()==1, "Function must have one outputs: dot(x)"); // Obtain collocation points std::vector<double> tau_root = collocation_points(order, scheme); tau_root.insert(tau_root.begin(), 0); // Retrieve collocation interpolating matrices std::vector < std::vector <double> > C; std::vector < double > D; collocationInterpolators(tau_root, C, D); // Inputs of constructed function MX x0 = MX::sym("x0", f.sparsity_in(0)); MX p = MX::sym("p", f.sparsity_in(1)); MX h = MX::sym("h"); // Time step MX dt = h/N; // Implicitly defined variables MX v = MX::sym("v", repmat(x0.sparsity(), order)); std::vector<MX> x = vertsplit(v, x0.size1()); x.insert(x.begin(), x0); // Collect the equations that implicitly define v std::vector<MX> V_eq, f_in(2), f_out; for (int j=1; j<order+1; ++j) { // Expression for the state derivative at the collocation point MX xp_j = 0; for (int r=0; r<=order; ++r) xp_j+= C[j][r]*x[r]; // Collocation equations f_in[0] = x[j]; f_in[1] = p; f_out = f(f_in); V_eq.push_back(dt*f_out.at(0)-xp_j); } // Root-finding function Function rfp("rfp", {v, x0, p, h}, {vertcat(V_eq)}); // Create a implicit function instance to solve the system of equations Function ifcn = rootfinder("ifcn", solver, rfp, solver_options); // Get state at end time MX xf = x0; for (int k=0; k<N; ++k) { std::vector<MX> ifcn_out = ifcn({repmat(xf, order), xf, p, h}); x = vertsplit(ifcn_out[0], x0.size1()); // State at end of step xf = D[0]*x0; for (int i=1; i<=order; ++i) { xf += D[i]*x[i-1]; } } // Form discrete-time dynamics return Function("F", {x0, p, h}, {xf}, {"x0", "p", "h"}, {"xf"}); }
TGraphErrors *columns_vs_var(TString filename, TString region_, int column, double& rMin, double& rMax, bool updateRange=false){ TString region; TString xVar, rangeMin, rangeMax,numEvents; double deltaM_data, err_deltaM_data; double deltaM_MC, err_deltaM_MC; double deltaG, err_deltaG; bool isDeltaG=false; std::vector<TString> xLabels; double rMin_=10, rMax_=-10; TGraphErrors deltaM_data_graph(100), deltaM_MC_graph(100), deltaG_graph(100); TH1F deltaM_data_hist("deltaM_data_hist", "#Delta m [GeV/c^{2}]", 1000, -5, 5); int i_point=0; // std::cout << "------------------------------" << std::endl; // std::cout << "[STATUS] Starting with deltaM stability for region: " << region_ << "\t" << "column=" << column <<std::endl; std::ifstream f_in(filename); if(f_in.bad()){ std::cerr << "[ERROR] File " << filename << " not found or not readable" << std::endl; return NULL; } // while(f_in.peek()!=EOF && f_in.good()){ while(f_in.peek()!=EOF && f_in.good()){ if(f_in.peek()==10){ // 10 = \n f_in.get(); continue; } if(f_in.peek() == 35){ // 35 = # std::cout << "[DEBUG] Ignore line" << std::endl; f_in.ignore(1000,10); // ignore the rest of the line until \n continue; } f_in >> region >> xVar >> rangeMin >> rangeMax >> numEvents >> deltaM_data >> err_deltaM_data >> deltaM_MC >> err_deltaM_MC; // std::cout << region << "\t" << xVar << "\t" << rangeMin << "\t" << rangeMax << "\t" << deltaM_data << "\t" << deltaM_MC << "\t" << err_deltaM_MC << "\t" << f_in.peek() << std::endl; if(f_in.peek()!=10){ // 10 = \n isDeltaG=true; f_in >> deltaG >> err_deltaG; } if(deltaM_data< rMin_) rMin_=deltaM_data; if(deltaM_MC< rMin_) rMin_=deltaM_MC; if(deltaM_data > rMax_) rMax_=deltaM_data; if(deltaM_MC > rMax_) rMax_=deltaM_MC; if(region_=="" || region.CompareTo(region_)==0){ deltaM_data_hist.Fill(deltaM_data); if(xVar.CompareTo("runNumber")==0){ deltaM_data_graph.SetPoint(i_point, i_point, deltaM_data); deltaM_data_graph.SetPointError(i_point, 0, err_deltaM_data); deltaM_MC_graph.SetPoint(i_point, i_point, deltaM_MC); deltaM_MC_graph.SetPointError(i_point, 0, err_deltaM_MC); deltaG_graph.SetPoint(i_point, i_point, deltaG); deltaG_graph.SetPointError(i_point, 0, err_deltaG); } else { // because it's not a stability: float i_point_x = (rangeMax.Atof() + rangeMin.Atof())/2.; float i_point_ex = (rangeMax.Atof() - rangeMin.Atof())/2.; deltaM_data_graph.SetPoint(i_point, i_point_x, deltaM_data); deltaM_data_graph.SetPointError(i_point, i_point_ex, err_deltaM_data); // deltaM_data_hist.Fill(deltaM_MC); deltaM_MC_graph.SetPoint(i_point, i_point_x, deltaM_MC); deltaM_MC_graph.SetPointError(i_point, i_point_ex, err_deltaM_MC); deltaG_graph.SetPoint(i_point, i_point_x, deltaG); deltaG_graph.SetPointError(i_point, i_point_ex, err_deltaG); } #ifndef lightLabels xLabels.push_back(rangeMin+"-"+rangeMax); #else if(i_point%3==0) xLabels.push_back(rangeMin); else xLabels.push_back(""); #endif // deltaM_data_graph.GetXaxis()->SetBinLabel(i_point, rangeMin+"-"+rangeMax); i_point++; if(i_point>99){ std::cerr << "[ERROR] maximum number of points reached" << std::endl; return NULL; } } //else std::cout << region << std::endl; }
ImplicitFunction ImplicitFunctionInternal::jac(const std::vector<int> iind, int oind){ // Single output casadi_assert(oind==0); // Get the function SXFunction f = shared_cast<SXFunction>(f_); casadi_assert(!f.isNull()); // Get the jacobians Matrix<SX> Jz = f.jac(0,0); // Number of equations int nz = f.input(0).numel(); // All variables vector<Matrix<SX> > f_in(f.getNumInputs()); f_in[0] = f.inputExpr(0); for(int i=1; i<f.getNumInputs(); ++i) f_in[i] = f.inputExpr(i); // Augmented nonlinear equation Matrix<SX> F_aug = f.outputExpr(0); // Number of right hand sides int nrhs = 1; // Augment variables and equations for(vector<int>::const_iterator it=iind.begin(); it!=iind.end(); ++it){ // Get the jacobian Matrix<SX> Jx = f.jac(*it+1,0); // Number of variables int nx = f.input(*it+1).numel(); // Sensitivities Matrix<SX> dz_dx = ssym("dz_dx", nz, nx); // Derivative equation Matrix<SX> f_der = mul(Jz,dz_dx) + Jx; // Append variables f_in[0].append(vec(dz_dx)); // Augment nonlinear equation F_aug.append(vec(f_der)); // Number of right hand sides nrhs += nx; } // Augmented nonlinear equation SXFunction f_aug(f_in, F_aug); // Create new explciit function ImplicitFunction ret; ret.assignNode(create(f_aug,nrhs)); ret.setOption(dictionary()); // Return the created solver return ret; }
TTree *ToyTree(TString dirname="test/dato/fitres/Hgg_Et-toys/0.01-0.00", TString fname="outProfile-scaleStep2smearing_7-Et_25-trigger-noPF-EB.root", TString opt="", int nSmooth=10){ TString outDir=dirname; outDir.ReplaceAll("fitres","img"); outDir="tmp/"; //std::map<TString, TH2F *> deltaNLL_map; //bool smooth=false; //if(opt.Contains("smooth")) smooth=true; /*------------------------------ Plotto */ TCanvas c("ctoy","c"); TTree *toys = new TTree("toys",""); toys->SetDirectory(0); Double_t constTerm_tree, constTermTrue_tree; Double_t alpha_tree, alphaTrue_tree; char catName[100]; Int_t catIndex; toys->Branch("constTerm", &constTerm_tree, "constTerm/D"); toys->Branch("alpha", &alpha_tree, "alpha/D"); toys->Branch("constTermTrue", &constTermTrue_tree, "constTermTrue/D"); toys->Branch("alphaTrue", &alphaTrue_tree, "alphaTrue/D"); toys->Branch("catName", catName, "catName/C"); toys->Branch("catIndex", &catIndex, "catIndex/I"); std::map<TString, Int_t> catIndexMap; ///1/ for(int itoy =2; itoy <= 50; itoy++){ TString filename=dirname+"/"; filename+=itoy; filename+="/"+fname; TString fout=dirname+"/"; fout+=itoy; fout+="/"; TFile f_in(filename, "read"); if(f_in.IsZombie()){ std::cerr << "File opening error: " << filename << std::endl; continue; //return NULL; } //std::cout << filename << std::endl; TList *KeyList = f_in.GetListOfKeys(); //std::cout << KeyList->GetEntries() << std::endl; for(int i =0; i < KeyList->GetEntries(); i++){ c.Clear(); TKey *key = (TKey *)KeyList->At(i); if(TString(key->GetClassName())!="RooDataSet") continue; RooDataSet *dataset = (RooDataSet *) key->ReadObj(); TString constTermName = dataset->GetName(); TString alphaName=constTermName; alphaName.ReplaceAll("constTerm","alpha"); if(constTermName.Contains("scale")) continue; if(constTermName.Contains("alpha")) continue; if(constTermName.Contains("1.4442-gold")) continue; TTree *tree = dataset2tree(dataset); TGraph *rhoGraph = GetRho(tree, alphaName, constTermName); rhoGraph->SaveAs(fout+"rhoGraph-"+constTermName+".root"); TGraphErrors bestFit_ = bestFit(tree, alphaName, constTermName); //TString binning="(241,-0.0005,0.2405,61,-0.00025,0.03025)"; //"(40,0.00025,0.02025,61,-0.0022975,0.1401475)"; TString binning="(241,-0.0005,0.2405,301,-0.00005,0.03005)"; TH2F *hist = prof2d(tree, constTermName, alphaName, "nll", binning, true, nSmooth, opt); //hist->SaveAs("myhist.root"); Int_t iBinX, iBinY; hist->GetBinWithContent2(0.0002,iBinX,iBinY,1,-1,1,-1,0.0000001); // if(iBinX!=0 && iBinY!=0 && iBinX < 41 && iBinY < 62){ { TString catName_=constTermName; catName_.ReplaceAll("constTerm_",""); catName_.ReplaceAll("-","_"); if(catIndexMap.count(catName_)==0) catIndexMap.insert(std::pair<TString,Int_t>(catName_,catIndexMap.size())); catIndex=catIndexMap[catName_]; constTerm_tree = hist->GetYaxis()->GetBinCenter(iBinY); alpha_tree = hist->GetXaxis()->GetBinCenter(iBinX); sprintf(catName,"%s", catName_.Data()); bestFit_.GetPoint(0, constTermTrue_tree,alphaTrue_tree); // std::cout << constTerm_tree << " " << constTermTrue_tree // << "\t" << alpha_tree << " " << alphaTrue_tree // << std::endl; if(opt.Contains("scandiff")){ constTermTrue_tree = getMinimumFromTree(tree, "nll",TString(constTermName).ReplaceAll("-","_")); } else if(opt.Contains("scan")){ constTerm_tree = getMinimumFromTree(tree, "nll",TString(constTermName).ReplaceAll("-","_")); } //std::cout << iBinX << "\t" << iBinY << "\t" << constTerm_tree - getMinimumFromTree(tree, "nll",TString(constTermName).ReplaceAll("-","_")) << std::endl; toys->Fill(); // }else{ // hist->SaveAs("myhist.root"); // exit(0); } delete tree; delete hist; } f_in.Close(); } //toys->SaveAs("tmp/toysTree.root"); return toys; }
void MakePlots(TString filename, float zmax=30, int nSmooth=10, TString opt="", TString energy="8TeV", TString lumi=""){ TString outDir=filename; outDir.Remove(outDir.Last('/')); outDir+="/img/"+opt; //outDir="tmp/k5b/"; //std::map<TString, TH2F *> deltaNLL_map; /*------------------------------ Plotto */ TCanvas *c = new TCanvas("c","c"); TFile f_in(filename, "read"); if(f_in.IsZombie()){ std::cerr << "File opening error: " << filename << std::endl; return; } TList *KeyList = f_in.GetListOfKeys(); std::cout << KeyList->GetEntries() << std::endl; for(int i =0; i < KeyList->GetEntries(); i++){ c->Clear(); TKey *key = (TKey *)KeyList->At(i); if(TString(key->GetClassName())!="RooDataSet") continue; RooDataSet *dataset = (RooDataSet *) key->ReadObj(); if(dataset==NULL){ std::cerr << "[WARNING] No dataset for " << key->GetName() << "\t" << key->GetTitle() << std::endl; continue; } TString constTermName = dataset->GetName(); TString alphaName=constTermName; alphaName.ReplaceAll("constTerm","alpha"); if(constTermName.Contains("absEta_1_1.4442-gold")) continue; if(constTermName.Contains("rho") || constTermName.Contains("phi")) continue; if(constTermName.Contains("scale")) continue; TTree *tree = dataset2tree(dataset); TGraphErrors bestFit_ = bestFit(tree, alphaName, constTermName); // TString binning="(241,-0.0005,0.2405,60,0.00025,0.03025)"; TString binning="(241,-0.0005,0.2405,301,-0.00005,0.03005)"; TH2F *hist = prof2d(tree, constTermName, alphaName, "nll", binning, true,nSmooth, opt); // std::cout << "Bin width = " << hist->GetXaxis()->GetBinWidth(10) << "\t" << hist->GetYaxis()->GetBinWidth(10) << std::endl; // std::cout << "Bin 1 center = " << hist->GetXaxis()->GetBinCenter(1) << "\t" << hist->GetYaxis()->GetBinCenter(1) << std::endl; // std::cout << "Bin 10 center = " << hist->GetXaxis()->GetBinCenter(10) << "\t" << hist->GetYaxis()->GetBinCenter(10) << std::endl; // return; hist->Draw("colz"); hist->GetZaxis()->SetRangeUser(0,zmax); hist->GetXaxis()->SetRangeUser(0,0.15); hist->GetYaxis()->SetRangeUser(0,0.018); hist->GetXaxis()->SetTitle("#Delta S"); hist->GetYaxis()->SetTitle("#Delta C"); Int_t iBinX, iBinY; Double_t x,y; hist->GetBinWithContent2(0.0002,iBinX,iBinY,1,-1,1,-1,0.0000001); x= hist->GetXaxis()->GetBinCenter(iBinX); y= hist->GetYaxis()->GetBinCenter(iBinY); std::cout << "Best Fit: " << x << "\t" << y << std::endl; TGraph nllBestFit(1,&x,&y); TString fileName=outDir+"/"+constTermName; fileName+="-"; fileName+=nSmooth; nllBestFit.SetMarkerStyle(3); nllBestFit.SetMarkerColor(kRed); nllBestFit.Draw("P same"); std::cout << fileName << std::endl; ofstream fout(fileName+".dat", ios_base::app); fout << constTermName << "\t" << x << "\t" << y << std::endl; c->SaveAs(fileName+".png"); c->SaveAs(fileName+".eps"); if(fileName.Contains("constTerm")) c->SaveAs(fileName+".C"); fileName+="-zoom"; hist->GetZaxis()->SetRangeUser(0,1); //hist->GetXaxis()->SetRangeUser(0.00,0.12); //hist->GetYaxis()->SetRangeUser(0,0.005); c->SaveAs(fileName+".png"); c->SaveAs(fileName+".eps"); // hist->SaveAs(outDir+"/deltaNLL-"+constTermName+".root"); // hist->Draw("colz"); // bestFit_.Draw("P same"); // bestFit_.SetMarkerSize(2); // nllBestFit.SetMarkerStyle(3); // nllBestFit.SetMarkerColor(kRed); // TList* contour68 = contourFromTH2(hist, 0.68); // hist->Draw("colz"); // hist->GetZaxis()->SetRangeUser(0,zmax); // //bestFit_.Draw("P same"); // nllBestFit.Draw("P same"); // //contour68->Draw("same"); delete hist; RooAbsPdf *histPdf = NULL; if(!opt.Contains("keys")){ hist = prof2d(tree, alphaName, constTermName, "nll", binning, false, nSmooth, opt); histPdf = nllToL(hist); }else{ hist = prof2d(tree, alphaName, constTermName, "nll", binning, false,nSmooth); histPdf = Smooth(hist,1,"keys"); } delete hist; // RooDataSet *gen_dataset=histPdf->generate(*histPdf->getVariables(),1000000,kTRUE,kFALSE); // TTree *genTree = dataset2tree(gen_dataset); // genTree->SaveAs(fileName+"-genTree.root"); // delete gen_dataset; // delete histPdf; // TGraphErrors toyGraph = g(genTree, constTermName); // TGraphErrors bestFitGraph = g(tree,alphaName, constTermName); // TGraphErrors bestFitScanGraph = g(y, x); // delete genTree; // delete tree; // toyGraph.SetFillColor(kGreen); // toyGraph.SetLineColor(kBlue); // toyGraph.SetLineStyle(2); // bestFitGraph.SetLineColor(kBlack); // bestFitScanGraph.SetLineColor(kRed); // bestFitScanGraph.SetLineWidth(2); // TMultiGraph g_multi("multigraph",""); // g_multi.Add(&toyGraph,"L3"); // g_multi.Add(&toyGraph,"L"); // g_multi.Add(&bestFitGraph, "L"); // g_multi.Add(&bestFitScanGraph, "L"); // g_multi.Draw("A"); // c->Clear(); // g_multi.Draw("A"); // c->SaveAs(outDir+"/smearing_vs_energy-"+constTermName+".png"); // c->SaveAs(outDir+"/smearing_vs_energy-"+constTermName+".eps"); // // TPaveText *pv = new TPaveText(0.7,0.7,1, 0.8); // // TLegend *legend = new TLegend(0.7,0.8,0.95,0.92); // // legend->SetFillStyle(3001); // // legend->SetFillColor(1); // // legend->SetTextFont(22); // 132 // // legend->SetTextSize(0.04); // l'ho preso mettendo i punti con l'editor e poi ho ricavato il valore con il metodo GetTextSize() // // // legend->SetFillColor(0); // colore di riempimento bianco // // legend->SetMargin(0.4); // percentuale della larghezza del simbolo // // SetLegendStyle(legend); // //Plot(c, data,mc,mcSmeared,legend, region, filename, energy, lumi); } f_in.Close(); return; }