void TMVARegressionApplication( int wMs,int wM, string st,string st2,string option="",TString myMethodList = "" ) { //--------------------------------------------------------------- // This loads the library TMVA::Tools::Instance(); // Default MVA methods to be trained + tested std::map<std::string,int> Use; // --- Mutidimensional likelihood and Nearest-Neighbour methods Use["PDERS"] = 0; Use["PDEFoam"] = 0; Use["KNN"] = 0; // // --- Linear Discriminant Analysis Use["LD"] = 0; // // --- Function Discriminant analysis Use["FDA_GA"] = 0; Use["FDA_MC"] = 0; Use["FDA_MT"] = 0; Use["FDA_GAMT"] = 0; // // --- Neural Network Use["MLP"] = 0; // // --- Support Vector Machine Use["SVM"] = 0; // // --- Boosted Decision Trees Use["BDT"] = 0; Use["BDTG"] = 1; // --------------------------------------------------------------- std::cout << std::endl; std::cout << "==> Start TMVARegressionApplication" << std::endl; // Select methods (don't look at this code - not of interest) if (myMethodList != "") { for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0; std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' ); for (UInt_t i=0; i<mlist.size(); i++) { std::string regMethod(mlist[i]); if (Use.find(regMethod) == Use.end()) { std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl; for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " "; std::cout << std::endl; return; } Use[regMethod] = 1; } } // -------------------------------------------------------------------------------------------------- // --- Create the Reader object TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); // Create a set of variables and declare them to the reader // - the variable names MUST corresponds in name and type to those given in the weight file(s) used //Float_t var1, var2; //reader->AddVariable( "var1", &var1 ); //reader->AddVariable( "var2", &var2 ); Float_t pt_AK8MatchedToHbb,eta_AK8MatchedToHbb,nsv_AK8MatchedToHbb,sv0mass_AK8MatchedToHbb,sv1mass_AK8MatchedToHbb, nch_AK8MatchedToHbb,nmu_AK8MatchedToHbb,nel_AK8MatchedToHbb,muenfr_AK8MatchedToHbb,emenfr_AK8MatchedToHbb; reader->AddVariable( "pt_AK8MatchedToHbb", &pt_AK8MatchedToHbb ); reader->AddVariable( "eta_AK8MatchedToHbb", &eta_AK8MatchedToHbb ); reader->AddVariable( "nsv_AK8MatchedToHbb", &nsv_AK8MatchedToHbb ); reader->AddVariable( "sv0mass_AK8MatchedToHbb", &sv0mass_AK8MatchedToHbb ); reader->AddVariable( "sv1mass_AK8MatchedToHbb", &sv1mass_AK8MatchedToHbb ); reader->AddVariable( "nch_AK8MatchedToHbb", &nch_AK8MatchedToHbb ); reader->AddVariable( "nmu_AK8MatchedToHbb", &nmu_AK8MatchedToHbb ); reader->AddVariable( "nel_AK8MatchedToHbb", &nel_AK8MatchedToHbb ); reader->AddVariable( "muenfr_AK8MatchedToHbb", &muenfr_AK8MatchedToHbb ); reader->AddVariable( "emenfr_AK8MatchedToHbb", &emenfr_AK8MatchedToHbb ); // Spectator variables declared in the training have to be added to the reader, too Float_t spec1,spec2; reader->AddSpectator( "spec1:=n_pv", &spec1 ); reader->AddSpectator( "spec2:=msoftdrop_AK8MatchedToHbb", &spec2 ); // --- Book the MVA methods TString dir = "weights/"; TString prefix = "TMVARegression"; // Book method(s) for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) { if (it->second) { TString methodName = it->first + " method"; TString weightfile = dir + prefix + "_" + TString(it->first) + ".weights.xml"; reader->BookMVA( methodName, weightfile ); } } TH1* hists[100]; Int_t nhists = -1; for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) { TH1* h = new TH1F( it->first.c_str(), TString(it->first) + " method", 100, -100, 600 ); if (it->second) hists[++nhists] = h; } nhists++; //1=signal ,0=QCD ,2=data int nameRoot=1; if((st2.find("QCD")!= std::string::npos)|| (st2.find("bGen")!= std::string::npos)|| (st2.find("bEnriched")!= std::string::npos))nameRoot=0; if(st2.find("data")!= std::string::npos)nameRoot=2; cout<<"nameRoot = "<<nameRoot<<endl; //option----------------------------------------------------------- int JESOption=0; // Prepare input tree (this must be replaced by your data source) // in this example, there is a toy tree with signal and one with background events // we'll later on use only the "signal" events for the test in this example. // TFile *f; TTree *tree; int nPass[20]={0}; int total=0; double fixScaleNum[2]={0}; TH1D* th1=new TH1D("a","a",150,50,200); string massName[nMass]={"Thea","HCorr","Reg"}; string catName[nCat]={"PP","PF","FP","FF"}; string tau21Name[2]={"withTau21","woTau21"}; string catNameShort[nCat]={"P","F"}; string looseTight[2]={"loose","tight"}; TF1 *fa[nMass][2][2][2]; for(int i=0;i<nMass;i++){ for(int j=0;j<2;j++){ for(int k=0;k<2;k++){ for(int w=0;w<2;w++){ fa[i][j][k][w] = new TF1("fa","[0]+[1]*x+[2]*x*x+[3]*pow(x,3)",-3,3); ifstream myfile (Form("PFRatio/%s_%s_%s_%s.txt",looseTight[w].data(),massName[i].data(),catNameShort[j].data(),tau21Name[k].data())); double para[4]; for(int m=0;m<4;m++){ myfile>>para[m]; } fa[i][j][k][w]->SetParameters(para[0],para[1],para[2],para[3]); } } } } /* TH1D* th2[nMass][nCat][2]; TH1D* th3[nMass][nCat][2]; for(int i=0;i<nMass;i++){ for(int j=0;j<nCat;j++){ for(int k=0;k<2;k++){ th2[i][j][k]=(TH1D*)th1->Clone(Form("loose_%s_%s_%s",massName[i].data(),catName[j].data(),tau21Name[k].data())); th3[i][j][k]=(TH1D*)th1->Clone(Form("tight_%s_%s_%s",massName[i].data(),catName[j].data(),tau21Name[k].data())); th2[i][j][k]->Sumw2(); th3[i][j][k]->Sumw2(); } } } */ TH1D* th2d[14]; th2d[0]=new TH1D("0a","0a",4000,1000,5000); th2d[1]=new TH1D("0c","0c",4000,1000,5000); th2d[2]=new TH1D("1a","1a",4000,1000,5000); th2d[3]=new TH1D("1c","1c",4000,1000,5000); th2d[4]=new TH1D("2a","2a",4000,1000,5000); th2d[5]=new TH1D("2b","2b",4000,1000,5000); th2d[6]=new TH1D("2d","2d",4000,1000,5000); th2d[7]=new TH1D("0aL","0aL",4000,1000,5000); th2d[8]=new TH1D("0cL","0cL",4000,1000,5000); th2d[9]=new TH1D("1aL","1aL",4000,1000,5000); th2d[10]=new TH1D("1cL","1cL",4000,1000,5000); th2d[11]=new TH1D("2aL","2aL",4000,1000,5000); th2d[12]=new TH1D("2bL","2bL",4000,1000,5000); th2d[13]=new TH1D("2dL","2dL",4000,1000,5000); //int nWidth=5,nBmin=11; int width [nWidth]={25,30,35,40}; int bmin[nBmin]={100,105,110,115}; TH1D* th3d[14][nWidth][nBmin][2]; TH1D* th3f[14][nWidth][nBmin][2]; TH1D* th3v[14][nWidth][nBmin][2]; for(int i=0;i<nWidth;i++){ for(int j=0;j<nBmin;j++){ for(int k=0;k<2;k++){ for(int l=0;l<14;l++){ th3d[l][i][j][k]=(TH1D*) th2d[l]->Clone(Form("%s_%d_%d_%s",th2d[l]->GetTitle(),bmin[j],width[i]+bmin[j],tau21Name[k].data())); th3f[l][i][j][k]=(TH1D*) th2d[l]->Clone(Form("fill_%s_%d_%d_%s",th2d[l]->GetTitle(),bmin[j],width[i]+bmin[j],tau21Name[k].data())); th3v[l][i][j][k]=(TH1D*) th2d[l]->Clone(Form("valid_%s_%d_%d_%s",th2d[l]->GetTitle(),bmin[j],width[i]+bmin[j],tau21Name[k].data())); th3d[l][i][j][k]->Sumw2(); th3f[l][i][j][k]->Sumw2(); th3v[l][i][j][k]->Sumw2(); } } } } for (int w=wMs;w<wM;w++){ if(w%20==0)cout<<w<<endl; if (nameRoot!=1)f = TFile::Open(Form("%s%d.root",st.data(),w)); else f = TFile::Open(st.data()); if (!f || !f->IsOpen())continue; TDirectory * dir; if (nameRoot!=1)dir = (TDirectory*)f->Get(Form("%s%d.root:/tree",st.data(),w)); else dir = (TDirectory*)f->Get(Form("%s:/tree",st.data())); dir->GetObject("treeMaker",tree); //tree=(TTree*)f->Get("treeMaker"); TreeReader data(tree); total+=data.GetEntriesFast(); for(Long64_t jEntry=0; jEntry<data.GetEntriesFast() ;jEntry++){ data.GetEntry(jEntry); Int_t nVtx = data.GetInt("nVtx"); //0. has a good vertex if(nVtx<1)continue; nPass[0]++; //1.trigger std::string* trigName = data.GetPtrString("hlt_trigName"); vector<bool> &trigResult = *((vector<bool>*) data.GetPtr("hlt_trigResult")); bool passTrigger=false; for(int it=0; it< data.GetPtrStringSize();it++){ std::string thisTrig= trigName[it]; bool results = trigResult[it]; if( ((thisTrig.find("HLT_PFHT800")!= std::string::npos|| thisTrig.find("HLT_AK8DiPFJet300_200_TrimMass30_BTagCSV_p20")!= std::string::npos ) && results==1)){ passTrigger=true; break; } } if(!passTrigger && nameRoot==2)continue; nPass[1]++; const int nAK8Jet=data.GetInt("AK8PuppinJet"); //2.nJets if(nAK8Jet<2)continue;nPass[2]++; int* AK8PuppinSubSDJet=data.GetPtrInt("AK8PuppinSubSDJet"); if(AK8PuppinSubSDJet[0]!=2||AK8PuppinSubSDJet[1]!=2)continue; TClonesArray* AK8PuppijetP4 = (TClonesArray*) data.GetPtrTObject("AK8PuppijetP4"); float* AK8PuppijetCorrUncUp = data.GetPtrFloat("AK8PuppijetCorrUncUp"); float* AK8PuppijetCorrUncDown = data.GetPtrFloat("AK8PuppijetCorrUncDown"); TLorentzVector* thisJet ,* thatJet; thisJet=(TLorentzVector*)AK8PuppijetP4->At(0); thatJet=(TLorentzVector*)AK8PuppijetP4->At(1); //3. Pt if(thisJet->Pt()>99998 ||thatJet->Pt()>99998 )continue; if(thisJet->Pt()<300)continue; if(thatJet->Pt()<300)continue; nPass[3]++; //4tightId----------------------------------------- vector<bool> &AK8PuppijetPassIDTight = *((vector<bool>*) data.GetPtr("AK8PuppijetPassIDTight")); if(AK8PuppijetPassIDTight[0]==0)continue; if(AK8PuppijetPassIDTight[1]==0)continue; Float_t* AK8PuppijetCEmEF = data.GetPtrFloat("AK8PuppijetCEmEF"); Float_t* AK8PuppijetMuoEF = data.GetPtrFloat("AK8PuppijetMuoEF"); if(AK8PuppijetMuoEF[0]>0.8)continue; if(AK8PuppijetCEmEF[0]>0.9)continue; if(AK8PuppijetMuoEF[1]>0.8)continue; if(AK8PuppijetCEmEF[1]>0.9)continue; nPass[4]++; //5. Eta----------------------------------------- if(fabs(thisJet->Eta())>2.4)continue; if(fabs(thatJet->Eta())>2.4)continue; nPass[5]++; //6. DEta----------------------------------------- float dEta = fabs(thisJet->Eta()-thatJet->Eta()); if(dEta>1.3)continue; nPass[6]++; //7. Mjj----------------------------------------- //float mjjRed = (*thisJet+*thatJet).M()+250-thisJet->M()-thatJet->M(); //if(mjjRed<1000)continue; nPass[7]++; //8. fatjetPRmassL2L3Corr----------------------------------------- nPass[8]++; //9.----------------------------------------- Float_t* AK8Puppijet_DoubleSV = data.GetPtrFloat("AK8Puppijet_DoubleSV"); int looseStat=-1; int tightStat=-1; if(AK8Puppijet_DoubleSV[0]>0.3 && AK8Puppijet_DoubleSV[1]>0.3)looseStat=0; else if(AK8Puppijet_DoubleSV[0]>0.3 && AK8Puppijet_DoubleSV[1]<0.3)looseStat=1; else if(AK8Puppijet_DoubleSV[0]<0.3 && AK8Puppijet_DoubleSV[1]>0.3)looseStat=2; else looseStat=3; if(AK8Puppijet_DoubleSV[0]>0.8 && AK8Puppijet_DoubleSV[1]>0.8)tightStat=0; else if(AK8Puppijet_DoubleSV[0]>0.8 && AK8Puppijet_DoubleSV[1]<0.8)tightStat=1; else if(AK8Puppijet_DoubleSV[0]<0.3 && AK8Puppijet_DoubleSV[1]>0.8)tightStat=2; else if(AK8Puppijet_DoubleSV[0]<0.3 && AK8Puppijet_DoubleSV[1]<0.8)tightStat=3; else tightStat=-1; double varTemp[2]; Float_t* AK8PuppijetSDmass = data.GetPtrFloat("AK8PuppijetSDmass"); if(AK8PuppijetSDmass[0]<50||AK8PuppijetSDmass[1]<50)continue; Int_t* AK8Puppijet_nSV=data.GetPtrInt("AK8Puppijet_nSV"); vector<float> *AK8Puppijet_SVMass = data.GetPtrVectorFloat("AK8Puppijet_SVMass"); int nEle= data.GetInt("nEle"); int nMu=data.GetInt("nMu"); Float_t* AK8PuppijetEleEF = data.GetPtrFloat("AK8PuppijetEleEF"); //Float_t* AK8PuppijetMuoEF = data.GetPtrFloat("AK8PuppijetMuoEF"); Int_t* AK8PuppijetCMulti=data.GetPtrInt("AK8PuppijetCMulti"); Int_t* AK8PuppijetEleMulti=data.GetPtrInt("AK8PuppijetEleMulti"); Int_t* AK8PuppijetMuoMulti=data.GetPtrInt("AK8PuppijetMuoMulti"); for(int i=0; i<2;i++){ TLorentzVector* thisAK8Jet ; if(i==1)thisAK8Jet=thatJet; else thisAK8Jet=thisJet; pt_AK8MatchedToHbb=thisAK8Jet->Pt(); eta_AK8MatchedToHbb=thisAK8Jet->Eta(); nsv_AK8MatchedToHbb=AK8Puppijet_nSV[i]; sv0mass_AK8MatchedToHbb=AK8Puppijet_SVMass[i][0]; sv1mass_AK8MatchedToHbb=AK8Puppijet_SVMass[i][1]; nmu_AK8MatchedToHbb=AK8PuppijetMuoMulti[i]; nel_AK8MatchedToHbb=AK8PuppijetEleMulti[i]; muenfr_AK8MatchedToHbb=AK8PuppijetMuoEF[i]; nch_AK8MatchedToHbb=AK8PuppijetCMulti[i]; emenfr_AK8MatchedToHbb=AK8PuppijetEleEF[i]; spec1=nVtx; spec2=AK8PuppijetSDmass[i]; Float_t val ; for (Int_t ih=0; ih<nhists; ih++) { TString title = hists[ih]->GetTitle(); val= (reader->EvaluateRegression( title ))[0]; } varTemp[i]=val; } double PUPPIweight[2]={0}; PUPPIweight[0]=getPUPPIweight(thisJet->Pt(),thisJet->Eta()); PUPPIweight[1]=getPUPPIweight(thatJet->Pt(),thatJet->Eta()); double PUPPIweightThea[2]={0}; PUPPIweightThea[0]=getPUPPIweight_o(thisJet->Pt(),thisJet->Eta()); PUPPIweightThea[1]=getPUPPIweight_o(thatJet->Pt(),thatJet->Eta()); double Mjja= ((*thisJet)+(*thatJet)).M()+250 -((*thisJet)).M()-((*thatJet)).M(); TLorentzVector thisJetReg, thatJetReg; thisJetReg=(*thisJet)*varTemp[0]; thatJetReg=(*thatJet)*varTemp[1]; double Mjjb= (thisJetReg+thatJetReg).M()+250 -(thisJetReg).M()-(thatJetReg).M(); double PUPPIweightOnRegressed[2]={0}; PUPPIweightOnRegressed[0]=getPUPPIweightOnRegressed(thisJetReg.Pt(),thisJetReg.Eta()); PUPPIweightOnRegressed[1]=getPUPPIweightOnRegressed(thatJetReg.Pt(),thatJetReg.Eta()); vector<float> *subjetSDPx = data.GetPtrVectorFloat("AK8PuppisubjetSDPx"); vector<float> *subjetSDPy = data.GetPtrVectorFloat("AK8PuppisubjetSDPy"); vector<float> *subjetSDPz = data.GetPtrVectorFloat("AK8PuppisubjetSDPz"); vector<float> *subjetSDE = data.GetPtrVectorFloat("AK8PuppisubjetSDE"); vector<float> *AK8PuppisubjetSDRawFactor = data.GetPtrVectorFloat("AK8PuppisubjetSDRawFactor"); TLorentzVector thisSDJet, thatSDJet; TLorentzVector* subjetP4[2][2]; for(int i=0;i<2;i++){ for(int j=0;j<2;j++){ subjetP4[i][j]=new TLorentzVector(0,0,0,0); subjetP4[i][j]->SetPxPyPzE(subjetSDPx[i][j],subjetSDPy[i][j],subjetSDPz[i][j],subjetSDE[i][j]); // subjetP4[i][j]*=AK8PuppisubjetSDRawFactor[i][j]; } } thisSDJet=(*subjetP4[0][0])*AK8PuppisubjetSDRawFactor[0][0]+(*subjetP4[0][1])*AK8PuppisubjetSDRawFactor[0][1]; thatSDJet=(*subjetP4[1][0])*AK8PuppisubjetSDRawFactor[1][0]+(*subjetP4[1][1])*AK8PuppisubjetSDRawFactor[1][1]; //thatSDJet=(*subjetP4[1][0])+(*subjetP4[1][1]); TLorentzVector thisSDJetReg, thatSDJetReg; thisSDJetReg= thisSDJet*varTemp[0]*PUPPIweightOnRegressed[0]; thatSDJetReg= thatSDJet*varTemp[1]*PUPPIweightOnRegressed[1]; //double Mjjc= ((thisSDJet)+(thatSDJet)).M()+250 // -((thisSDJet)).M()-((thatSDJet)).M(); double Mjjd= ((thisSDJet)+(thatSDJet)).M()+250 -((thisSDJet)).M()-((thatSDJet)).M(); Float_t* AK8PuppijetTau1 = data.GetPtrFloat("AK8PuppijetTau1"); Float_t* AK8PuppijetTau2 = data.GetPtrFloat("AK8PuppijetTau2"); double puppiTau21[2]; puppiTau21[0]=(AK8PuppijetTau2[0]/AK8PuppijetTau1[0]),puppiTau21[1]=(AK8PuppijetTau2[1]/AK8PuppijetTau1[1]); double mass_j0,mass_j1,MjjLoop; int massCat; for(int k=0;k<7;k++){ if(k==0||k==2||k==4){ if(thisJet->Pt()<300)continue; if(thatJet->Pt()<300)continue; } else if (k==1){ if((thisSDJet*PUPPIweightThea[0]).Pt()<300)continue; if((thatSDJet*PUPPIweightThea[1]).Pt()<300)continue; } else if (k==3){ if((thisSDJet*PUPPIweight[0]).Pt()<300)continue; if((thatSDJet*PUPPIweight[1]).Pt()<300)continue; } else if (k==5){ if(thisJetReg.Pt()<300)continue; if(thatJetReg.Pt()<300)continue; } else{ if(thisSDJetReg.Pt()<300)continue; if(thatSDJetReg.Pt()<300)continue; } if(k==0||k==1){ mass_j0=AK8PuppijetSDmass[0]*PUPPIweightThea[0]; mass_j1=AK8PuppijetSDmass[1]*PUPPIweightThea[1]; massCat=0; } else if (k==2||k==3){ mass_j0=AK8PuppijetSDmass[0]*PUPPIweight[0]; mass_j1=AK8PuppijetSDmass[1]*PUPPIweight[1]; massCat=1; } else{ mass_j0=AK8PuppijetSDmass[0]*varTemp[0]*PUPPIweightOnRegressed[0]; mass_j1=AK8PuppijetSDmass[1]*varTemp[1]*PUPPIweightOnRegressed[1]; massCat=2; } if(k==0||k==2||k==4)MjjLoop=Mjja; else if (k==1)MjjLoop=((thisSDJet)*PUPPIweightThea[0]+(thatSDJet)*PUPPIweightThea[1]).M()+250-((thisSDJet)*PUPPIweightThea[0]).M()-((thatSDJet)*PUPPIweightThea[1]).M(); else if (k==3)MjjLoop=((thisSDJet)*PUPPIweight[0]+(thatSDJet)*PUPPIweight[1]).M()+250-((thisSDJet)*PUPPIweight[0]).M()-((thatSDJet)*PUPPIweight[1]).M(); else if (k==5)MjjLoop=Mjjb; else MjjLoop=Mjjd; //cout<<mass_j0<<","<<mass_j1<<",k="<<k<<endl; for(int i=0;i<nWidth;i++){ for(int j=0;j<nBmin;j++){ if(mass_j0<bmin[j] ||mass_j0>width[i]+bmin[j] ||mass_j1<bmin[j] ||mass_j1>width[i]+bmin[j] )continue; for(int m=0;m<2;m++){ if(m==0 && (puppiTau21[0]>0.6 || puppiTau21[1]>0.6))continue; double tightPFRatio=0,loosePFRatio=0; tightPFRatio=fa[massCat][0][m][1]->Eval(mass_j0); loosePFRatio=fa[massCat][0][m][0]->Eval(mass_j0); if(tightStat==0){ th3d[k][i][j][m]->Fill(MjjLoop); } else if (tightStat==1){ th3f[k][i][j][m]->Fill(MjjLoop); } else if(tightStat==3){ tightPFRatio=fa[massCat][1][m][1]->Eval(mass_j0); th3v[k][i][j][m]->Fill(MjjLoop,tightPFRatio); } if(looseStat==0 && tightStat!=0){ th3d[k+7][i][j][m]->Fill(MjjLoop); } else if (looseStat==1){ th3f[k+7][i][j][m]->Fill(MjjLoop); } else if(looseStat==3){ loosePFRatio=fa[massCat][1][m][0]->Eval(mass_j0); th3v[k+7][i][j][m]->Fill(MjjLoop,loosePFRatio); } } } } } } } for(int i=0;i<10;i++)cout<<"npass["<<i<<"]="<<nPass[i]<<endl; TFile* outFile;//= new TFile(Form("PFRatio/%s.root",st2.data()),"recreate"); outFile= new TFile(Form("MjjVC/%s.root",st2.data()),"recreate"); for(int i=0;i<nWidth;i++){ for(int j=0;j<nBmin;j++){ for(int k=0;k<2;k++){ for(int l=0;l<14;l++){ th3d[l][i][j][k]->Write(); th3f[l][i][j][k]->Write(); th3v[l][i][j][k]->Write(); } } } } outFile->Close(); for(int i=0;i<nWidth;i++){ for(int j=0;j<nBmin;j++){ for(int k=0;k<2;k++){ for(int l=0;l<14;l++){ delete th3d[l][i][j][k]; delete th3f[l][i][j][k]; delete th3v[l][i][j][k]; } } } } delete reader; }
void TMVARegressionApplication( TString myMethodList = "" ) { //--------------------------------------------------------------- // This loads the library TMVA::Tools::Instance(); // Default MVA methods to be trained + tested std::map<std::string,int> Use; // --- Mutidimensional likelihood and Nearest-Neighbour methods Use["PDERS"] = 0; Use["PDEFoam"] = 1; Use["KNN"] = 1; // // --- Linear Discriminant Analysis Use["LD"] = 1; // // --- Function Discriminant analysis Use["FDA_GA"] = 1; Use["FDA_MC"] = 0; Use["FDA_MT"] = 0; Use["FDA_GAMT"] = 0; // // --- Neural Network Use["MLP"] = 1; Use["DNN_CPU"] = 0; // // --- Support Vector Machine Use["SVM"] = 0; // // --- Boosted Decision Trees Use["BDT"] = 0; Use["BDTG"] = 1; // --------------------------------------------------------------- std::cout << std::endl; std::cout << "==> Start TMVARegressionApplication" << std::endl; // Select methods (don't look at this code - not of interest) if (myMethodList != "") { for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0; std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' ); for (UInt_t i=0; i<mlist.size(); i++) { std::string regMethod(mlist[i]); if (Use.find(regMethod) == Use.end()) { std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl; for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " "; std::cout << std::endl; return; } Use[regMethod] = 1; } } // -------------------------------------------------------------------------------------------------- // --- Create the Reader object TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); // Create a set of variables and declare them to the reader // - the variable names MUST corresponds in name and type to those given in the weight file(s) used Float_t var1, var2; reader->AddVariable( "var1", &var1 ); reader->AddVariable( "var2", &var2 ); // Spectator variables declared in the training have to be added to the reader, too Float_t spec1,spec2; reader->AddSpectator( "spec1:=var1*2", &spec1 ); reader->AddSpectator( "spec2:=var1*3", &spec2 ); // --- Book the MVA methods TString dir = "dataset/weights/"; TString prefix = "TMVARegression"; // Book method(s) for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) { if (it->second) { TString methodName = it->first + " method"; TString weightfile = dir + prefix + "_" + TString(it->first) + ".weights.xml"; reader->BookMVA( methodName, weightfile ); } } // Book output histograms TH1* hists[100]; Int_t nhists = -1; for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) { TH1* h = new TH1F( it->first.c_str(), TString(it->first) + " method", 100, -100, 600 ); if (it->second) hists[++nhists] = h; } nhists++; // Prepare input tree (this must be replaced by your data source) // in this example, there is a toy tree with signal and one with background events // we'll later on use only the "signal" events for the test in this example. // TFile *input(0); TString fname = "./tmva_reg_example.root"; if (!gSystem->AccessPathName( fname )) { input = TFile::Open( fname ); // check if file in local directory exists } else { TFile::SetCacheFileDir("."); input = TFile::Open("http://root.cern.ch/files/tmva_reg_example.root", "CACHEREAD"); // if not: download from ROOT server } if (!input) { std::cout << "ERROR: could not open data file" << std::endl; exit(1); } std::cout << "--- TMVARegressionApp : Using input file: " << input->GetName() << std::endl; // --- Event loop // Prepare the tree // - here the variable names have to corresponds to your tree // - you can use the same variables as above which is slightly faster, // but of course you can use different ones and copy the values inside the event loop // TTree* theTree = (TTree*)input->Get("TreeR"); std::cout << "--- Select signal sample" << std::endl; theTree->SetBranchAddress( "var1", &var1 ); theTree->SetBranchAddress( "var2", &var2 ); std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl; TStopwatch sw; sw.Start(); for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) { if (ievt%1000 == 0) { std::cout << "--- ... Processing event: " << ievt << std::endl; } theTree->GetEntry(ievt); // Retrieve the MVA target values (regression outputs) and fill into histograms // NOTE: EvaluateRegression(..) returns a vector for multi-target regression for (Int_t ih=0; ih<nhists; ih++) { TString title = hists[ih]->GetTitle(); Float_t val = (reader->EvaluateRegression( title ))[0]; hists[ih]->Fill( val ); } } sw.Stop(); std::cout << "--- End of event loop: "; sw.Print(); // --- Write histograms TFile *target = new TFile( "TMVARegApp.root","RECREATE" ); for (Int_t ih=0; ih<nhists; ih++) hists[ih]->Write(); target->Close(); std::cout << "--- Created root file: \"" << target->GetName() << "\" containing the MVA output histograms" << std::endl; delete reader; std::cout << "==> TMVARegressionApplication is done!" << std::endl << std::endl; }
//////////////////////////////////////////////////////////////////////////////// /// Main /// //////////////////////////////////////////////////////////////////////////////// void GrowTree(TString process, std::string regMethod="BDTG", Long64_t beginEntry=0, Long64_t endEntry=-1) { gROOT->SetBatch(1); TH1::SetDefaultSumw2(1); gROOT->LoadMacro("HelperFunctions.h"); //< make functions visible to TTreeFormula if (!TString(gROOT->GetVersion()).Contains("5.34")) { std::cout << "INCORRECT ROOT VERSION! Please use 5.34:" << std::endl; std::cout << "source /uscmst1/prod/sw/cms/slc5_amd64_gcc462/lcg/root/5.34.02-cms/bin/thisroot.csh" << std::endl; std::cout << "Return without doing anything." << std::endl; return; } const TString indir = "/afs/cern.ch/work/d/degrutto/public/MiniAOD/ZnnHbb_Phys14_PU20bx25/skimV11/"; const TString outdir = "/afs/cern.ch/work/d/degrutto/public/MiniAOD/ZnnHbb_Phys14_PU20bx25/skimV11/step3/"; const TString prefix = "skim_"; const TString suffix = ".root"; TFile *input = TFile::Open(indir + prefix + process + suffix); if (!input) { std::cout << "ERROR: Could not open input file." << std::endl; exit(1); } /// Make output directory if it doesn't exist if (gSystem->AccessPathName(outdir)) gSystem->mkdir(outdir); std::cout << "--- GrowTree : Using input file: " << input->GetName() << std::endl; TTree *inTree = (TTree *) input->Get("tree"); TH1F *hcount = (TH1F *) input->Get("Count"); TFile *output(0); if (beginEntry == 0 && endEntry == -1) output = TFile::Open(outdir + "Step3_" + process + suffix, "RECREATE"); else output = TFile::Open(outdir + "Step3_" + process + TString::Format("_%Li_%Li", beginEntry, endEntry) + suffix, "RECREATE"); TTree *outTree = inTree->CloneTree(0); // Do no copy the data yet /// The clone should not delete any shared i/o buffers. ResetDeleteBranches(outTree); ///-- Set branch addresses ------------------------------------------------- EventInfo EVENT; double hJet_pt[MAXJ], hJet_eta[MAXJ], hJet_phi[MAXJ], hJet_m[MAXJ], hJet_ptRaw[MAXJ], hJet_genPt[MAXJ]; int hJCidx[2]; inTree->SetBranchStatus("*", 1); inTree->SetBranchStatus("hJCidx",1); inTree->SetBranchStatus("Jet_*",1); inTree->SetBranchAddress("hJCidx", &hJCidx); inTree->SetBranchAddress("Jet_pt", &hJet_pt); inTree->SetBranchAddress("Jet_eta", &hJet_eta); inTree->SetBranchAddress("Jet_phi", &hJet_phi); inTree->SetBranchAddress("Jet_mass", &hJet_m); inTree->SetBranchAddress("Jet_rawPt", &hJet_ptRaw); inTree->SetBranchAddress("Jet_mcPt", &hJet_genPt); ///-- Make new branches ---------------------------------------------------- int EVENT_run, EVENT_event; // set these as TTree index? float lumi_ = lumi, efflumi, efflumi_old, efflumi_UEPS_up, efflumi_UEPS_down; float hJet_ptReg[2]; float HptNorm, HptGen, HptReg; float HmassNorm, HmassGen, HmassReg; outTree->Branch("EVENT_run", &EVENT_run, "EVENT_run/I"); outTree->Branch("EVENT_event", &EVENT_event, "EVENT_event/I"); outTree->Branch("lumi", &lumi_, "lumi/F"); outTree->Branch("efflumi", &efflumi, "efflumi/F"); outTree->Branch("efflumi_old", &efflumi_old, "efflumi_old/F"); outTree->Branch("efflumi_UEPS_up", &efflumi_UEPS_up, "efflumi_UEPS_up/F"); outTree->Branch("efflumi_UEPS_down", &efflumi_UEPS_down, "efflumi_UEPS_down/F"); outTree->Branch("hJet_ptReg", &hJet_ptReg, "hJet_ptReg[2]/F"); outTree->Branch("HptNorm", &HptNorm, "HptNorm/F"); outTree->Branch("HptGen", &HptGen, "HptGen/F"); outTree->Branch("HptReg", &HptReg, "HptReg/F"); outTree->Branch("HmassNorm", &HmassNorm, "HmassNorm/F"); outTree->Branch("HmassGen", &HmassGen, "HmassGen/F"); outTree->Branch("HmassReg", &HmassReg, "HmassReg/F"); /// Get effective lumis std::map < std::string, float > efflumis = GetLumis(); efflumi = efflumis[process.Data()]; assert(efflumi > 0); efflumi_old = efflumi; efflumi_UEPS_up = efflumi * hcount->GetBinContent(2) / hcount->GetBinContent(3); efflumi_UEPS_down = efflumi * hcount->GetBinContent(2) / hcount->GetBinContent(4); TTreeFormula* ttf_lheweight = new TTreeFormula("ttf_lheweight", Form("%f", efflumi), inTree); #ifdef STITCH std::map < std::string, std::string > lheweights = GetLHEWeights(); TString process_lhe = process; if (process_lhe.BeginsWith("WJets") && process_lhe != "WJetsHW") process_lhe = "WJets"; else if (process_lhe.BeginsWith("ZJets") && process_lhe != "ZJetsHW") process_lhe = "ZJets"; else process_lhe = ""; TString lheweight = lheweights[process_lhe.Data()]; if (lheweight != "") { delete ttf_lheweight; // Bug fix for ZJetsPtZ100 if (process == "ZJetsPtZ100") lheweight.ReplaceAll("lheV_pt", "999"); std::cout << "BUGFIX: " << lheweight << std::endl; ttf_lheweight = new TTreeFormula("ttf_lheweight", lheweight, inTree); } #endif ttf_lheweight->SetQuickLoad(1); // regression stuff here ///-- Setup TMVA Reader ---------------------------------------------------- TMVA::Tools::Instance(); //< This loads the library TMVA::Reader * reader = new TMVA::Reader("!Color:!Silent"); /// Get the variables const std::vector < std::string > & inputExpressionsReg = GetInputExpressionsReg(); const UInt_t nvars = inputExpressionsReg.size(); Float_t readerVars[nvars]; int idx_rawpt = -1, idx_pt = -1, idx_et = -1, idx_mt = -1; for (UInt_t iexpr = 0; iexpr < nvars; iexpr++) { const TString& expr = inputExpressionsReg.at(iexpr); reader->AddVariable(expr, &readerVars[iexpr]); if (expr.BeginsWith("breg_rawptJER := ")) idx_rawpt = iexpr; else if (expr.BeginsWith("breg_pt := ")) idx_pt = iexpr; else if (expr.BeginsWith("breg_et := ")) idx_et = iexpr; else if (expr.BeginsWith("breg_mt := ")) idx_mt = iexpr; } // assert(idx_rawpt!=-1 && idx_pt!=-1 && idx_et!=-1 && idx_mt!=-1); assert(idx_rawpt!=-1 && idx_pt!=-1 ); /// Setup TMVA regression inputs const std::vector < std::string > & inputExpressionsReg0 = GetInputExpressionsReg0(); const std::vector < std::string > & inputExpressionsReg1 = GetInputExpressionsReg1(); assert(inputExpressionsReg0.size() == nvars); assert(inputExpressionsReg1.size() == nvars); /// Load TMVA weights TString weightdir = "weights/"; TString weightfile = weightdir + "TMVARegression_" + regMethod + ".testweights.xml"; reader->BookMVA(regMethod + " method", weightfile); TStopwatch sw; sw.Start(); /// Create TTreeFormulas TTreeFormula *ttf = 0; std::vector < TTreeFormula * >::const_iterator formIt, formItEnd; std::vector < TTreeFormula * > inputFormulasReg0; std::vector < TTreeFormula * > inputFormulasReg1; std::vector < TTreeFormula * > inputFormulasFJReg0; std::vector < TTreeFormula * > inputFormulasFJReg1; std::vector < TTreeFormula * > inputFormulasFJReg2; for (UInt_t iexpr = 0; iexpr < nvars; iexpr++) { ttf = new TTreeFormula(Form("ttfreg%i_0", iexpr), inputExpressionsReg0.at(iexpr).c_str(), inTree); ttf->SetQuickLoad(1); inputFormulasReg0.push_back(ttf); ttf = new TTreeFormula(Form("ttfreg%i_1", iexpr), inputExpressionsReg1.at(iexpr).c_str(), inTree); ttf->SetQuickLoad(1); inputFormulasReg1.push_back(ttf); } ///-- Loop over events ----------------------------------------------------- Int_t curTree = inTree->GetTreeNumber(); const Long64_t nentries = inTree->GetEntries(); if (endEntry < 0) endEntry = nentries; Long64_t ievt = 0; for (ievt=TMath::Max(ievt, beginEntry); ievt<TMath::Min(nentries, endEntry); ievt++) { if (ievt % 2000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl; const Long64_t local_entry = inTree->LoadTree(ievt); // faster, but only for TTreeFormula if (local_entry < 0) break; inTree->GetEntry(ievt); // same event as received by LoadTree() if (inTree->GetTreeNumber() != curTree) { curTree = inTree->GetTreeNumber(); for (formIt=inputFormulasReg0.begin(), formItEnd=inputFormulasReg0.end(); formIt!=formItEnd; formIt++) (*formIt)->UpdateFormulaLeaves(); // if using TChain for (formIt=inputFormulasReg1.begin(), formItEnd=inputFormulasReg1.end(); formIt!=formItEnd; formIt++) (*formIt)->UpdateFormulaLeaves(); // if using TChain for (formIt=inputFormulasFJReg0.begin(), formItEnd=inputFormulasFJReg0.end(); formIt!=formItEnd; formIt++) (*formIt)->UpdateFormulaLeaves(); // if using TChain for (formIt=inputFormulasFJReg1.begin(), formItEnd=inputFormulasFJReg1.end(); formIt!=formItEnd; formIt++) (*formIt)->UpdateFormulaLeaves(); // if using TChain for (formIt=inputFormulasFJReg2.begin(), formItEnd=inputFormulasFJReg2.end(); formIt!=formItEnd; formIt++) (*formIt)->UpdateFormulaLeaves(); // if using TChain ttf_lheweight->UpdateFormulaLeaves(); } /// These need to be called when arrays of variable size are used in TTree. for (formIt=inputFormulasReg0.begin(), formItEnd=inputFormulasReg0.end(); formIt!=formItEnd; formIt++) (*formIt)->GetNdata(); for (formIt=inputFormulasReg1.begin(), formItEnd=inputFormulasReg1.end(); formIt!=formItEnd; formIt++) (*formIt)->GetNdata(); for (formIt=inputFormulasFJReg0.begin(), formItEnd=inputFormulasFJReg0.end(); formIt!=formItEnd; formIt++) (*formIt)->GetNdata(); for (formIt=inputFormulasFJReg1.begin(), formItEnd=inputFormulasFJReg1.end(); formIt!=formItEnd; formIt++) (*formIt)->GetNdata(); for (formIt=inputFormulasFJReg2.begin(), formItEnd=inputFormulasFJReg2.end(); formIt!=formItEnd; formIt++) (*formIt)->GetNdata(); ttf_lheweight->GetNdata(); /// Fill branches EVENT_run = EVENT.run; EVENT_event = EVENT.event; #ifdef STITCH efflumi = ttf_lheweight->EvalInstance(); // efflumi_UEPS_up = efflumi * hcount->GetBinContent(2) / hcount->GetBinContent(3); //efflumi_UEPS_down = efflumi * hcount->GetBinContent(2) / hcount->GetBinContent(4); #endif bool verbose = false; for (Int_t ihj = 0; ihj < 2; ihj++) { /// Evaluate TMVA regression output for (UInt_t iexpr = 0; iexpr < nvars; iexpr++) { if (ihj==0) { readerVars[iexpr] = inputFormulasReg0.at(iexpr)->EvalInstance(); } else if (ihj==1) { readerVars[iexpr] = inputFormulasReg1.at(iexpr)->EvalInstance(); } } hJet_ptReg[ihj] = (reader->EvaluateRegression(regMethod + " method"))[0]; if (verbose) std::cout << readerVars[idx_pt] << " " << readerVars[idx_rawpt] << " " << hJet_pt[ihj] << " " << hJet_ptReg[ihj] << " " << hJet_genPt[ihj] << std::endl; const TLorentzVector p4Zero = TLorentzVector(0., 0., 0., 0.); // int idx = hJCidx[0] ; // std::cout << "the regressed pt for jet 0 is " << hJet_ptReg[0] << "; the hJCidx is " << hJCidx[0] << ", hence the origianl pt is " << hJet_pt[idx] << std::endl; const TLorentzVector& hJet_p4Norm_0 = makePtEtaPhiM(hJet_pt[hJCidx[0]] , hJet_pt[hJCidx[0]], hJet_eta[hJCidx[0]], hJet_phi[hJCidx[0]], hJet_m[hJCidx[0]]); const TLorentzVector& hJet_p4Norm_1 = makePtEtaPhiM(hJet_pt[hJCidx[1]] , hJet_pt[hJCidx[1]], hJet_eta[hJCidx[1]], hJet_phi[hJCidx[1]], hJet_m[hJCidx[1]]); const TLorentzVector& hJet_p4Gen_0 = hJet_genPt[hJCidx[0]] > 0 ? makePtEtaPhiM(hJet_genPt[hJCidx[0]] , hJet_pt[hJCidx[0]], hJet_eta[hJCidx[0]], hJet_phi[hJCidx[0]], hJet_m[hJCidx[0]]) : p4Zero; const TLorentzVector& hJet_p4Gen_1 = hJet_genPt[hJCidx[1]] > 0 ? makePtEtaPhiM(hJet_genPt[hJCidx[1]] , hJet_pt[hJCidx[1]], hJet_eta[hJCidx[1]], hJet_phi[hJCidx[1]], hJet_m[hJCidx[1]]) : p4Zero; const TLorentzVector& hJet_p4Reg_0 = makePtEtaPhiM(hJet_ptReg[0] , hJet_pt[hJCidx[0]], hJet_eta[hJCidx[0]], hJet_phi[hJCidx[0]], hJet_m[hJCidx[0]]); const TLorentzVector& hJet_p4Reg_1 = makePtEtaPhiM(hJet_ptReg[1] , hJet_pt[hJCidx[1]], hJet_eta[hJCidx[1]], hJet_phi[hJCidx[1]], hJet_m[hJCidx[1]]); HptNorm = (hJet_p4Norm_0 + hJet_p4Norm_1 ).Pt(); HptGen = (hJet_p4Gen_0 + hJet_p4Gen_1 ).Pt(); HptReg = (hJet_p4Reg_0 + hJet_p4Reg_1 ).Pt(); HmassNorm = (hJet_p4Norm_0 + hJet_p4Norm_1 ).M(); HmassGen = (hJet_p4Gen_0 + hJet_p4Gen_1 ).M(); HmassReg = (hJet_p4Reg_0 + hJet_p4Reg_1 ).M(); // std::cout << "HmassReg is " << HmassReg << std::endl; } outTree->Fill(); // fill it! } // end loop over TTree entries /// Get elapsed time sw.Stop(); std::cout << "--- End of event loop: "; sw.Print(); output->cd(); outTree->Write(); output->Close(); input->Close(); delete input; delete output; for (formIt=inputFormulasReg0.begin(), formItEnd=inputFormulasReg0.end(); formIt!=formItEnd; formIt++) delete *formIt; for (formIt=inputFormulasReg1.begin(), formItEnd=inputFormulasReg1.end(); formIt!=formItEnd; formIt++) delete *formIt; for (formIt=inputFormulasFJReg0.begin(), formItEnd=inputFormulasFJReg0.end(); formIt!=formItEnd; formIt++) delete *formIt; for (formIt=inputFormulasFJReg1.begin(), formItEnd=inputFormulasFJReg1.end(); formIt!=formItEnd; formIt++) delete *formIt; for (formIt=inputFormulasFJReg2.begin(), formItEnd=inputFormulasFJReg2.end(); formIt!=formItEnd; formIt++) delete *formIt; delete ttf_lheweight; std::cout << "==> GrowTree is done!" << std::endl << std::endl; return; }
TString useAutoencoder (TString method_name) { TMVA::Tools::Instance(); std::cout << "==> Start useAutoencoder" << std::endl; TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); Float_t signal = 0.0; Float_t outSignal = 0.0; Float_t inSignal = 0.0; std::vector<std::string> localVariableNames (variableNames+additionalVariableNames); std::vector<Float_t> variables (localVariableNames.size ()); auto itVar = begin (variables); for (auto varName : localVariableNames) { Float_t* pVar = &(*itVar); reader->AddVariable(varName.c_str(), pVar); (*itVar) = 0.0; ++itVar; } int idxSignal = std::distance (localVariableNames.begin (), std::find (localVariableNames.begin (), localVariableNames.end (),std::string ("signal"))); TString dir = "weights/"; TString prefix = "TMVAAutoencoder"; TString weightfile = dir + prefix + TString("_") + method_name + TString(".weights.xml"); TString outPrefix = "transformed"; TString outfilename = pathToData + outPrefix + TString("_") + method_name + TString(".root"); reader->BookMVA( method_name, weightfile ); TFile* outFile = new TFile (outfilename.Data (), "RECREATE"); std::vector<std::string> inputNames = {"training"}; std::map<std::string,std::vector<std::string>> varsForInput; varsForInput["training"].emplace_back ("id"); varsForInput["training"].emplace_back ("signal"); for (auto inputName : inputNames) { std::stringstream outfilename; outfilename << inputName << "_transformed__" << method_name.Data () << ".root"; std::cout << outfilename.str () << std::endl; /* return; */ std::stringstream infilename; infilename << pathToData.Data () << inputName << ".root"; TTree* outTree = new TTree("transformed","transformed"); std::vector<Float_t> outVariables (localVariableNames.size ()); itVar = begin (variables); auto itOutVar = begin (outVariables); for (auto varName : localVariableNames) { Float_t* pOutVar = &(*itOutVar); outTree->Branch (varName.c_str (), pOutVar, "F"); (*itOutVar) = 0.0; ++itOutVar; Float_t* pVar = &(*itVar); std::stringstream svar; svar << varName << "_in"; outTree->Branch (svar.str ().c_str (), pVar, "F"); (*itVar) = 0.0; ++itVar; } Float_t signal_original = 0.0; outTree->Branch ("signal_original", &signal_original, "F"); TFile *input(0); std::cout << "infilename = " << infilename.str ().c_str () << std::endl; input = TFile::Open (infilename.str ().c_str ()); TTree* tree = (TTree*)input->Get("data"); Int_t ids; // id field if needed if (std::find (varsForInput[inputName].begin (), varsForInput[inputName].end (), "id") != varsForInput[inputName].end ()) tree->SetBranchAddress("id", &ids); // variables for prediction itVar = begin (variables); for (auto inputName : localVariableNames) { Float_t* pVar = &(*itVar); tree->SetBranchAddress (inputName.c_str(), pVar); ++itVar; } for (Long64_t ievt=0; ievt < tree->GetEntries(); ievt++) { tree->GetEntry(ievt); // predict signal_original = variables.at (idxSignal); for (int forcedSignal = 0; forcedSignal <= 1; ++forcedSignal) { variables.at (idxSignal) = forcedSignal; std::vector<Float_t> regressionValues = reader->EvaluateRegression (method_name); size_t idx = 0; for (auto it = std::begin (regressionValues), itEnd = std::end (regressionValues); it != itEnd; ++it) { outVariables.at (idx) = *it; ++idx; } outTree->Fill (); } } outFile->Write (); input->Close(); } delete reader; return outfilename; }