void PlotParsDistr(TFile* f, TTree* tr, TString strMillepedeRes, TString strOutdir) { for (int isubd=PXB; isubd<=TEC; isubd++){ TString canvName="c_"; canvName+=strMillepedeRes; canvName+="_"; canvName+=StrPlotType(PARS); canvName+="_"; canvName+=subdLabels[isubd]; canvName.ReplaceAll(".res",""); f->cd(); TCanvas* canv = new TCanvas(canvName,canvName,600,600); canv->Divide(3,3); for (int parInd=1; parInd<=9; parInd++){ canv->cd(parInd); TString strCut="((label%20-1)%9+1)=="; strCut+=parInd; strCut+=" && label<700000 && "; strCut+=StrCutSubd(isubd); TString hName="hPars_"; hName+=subdLabels[isubd]; hName+="_"; hName+= StrPar(parInd); TTree* trCut = tr->CopyTree(strCut); float up = trCut->GetMaximum("parVal"); float low = trCut->GetMinimum("parVal"); std::cout<<"low="<<low<<", up="<<up<<", nent="<<trCut->GetEntries()<<std::endl; TH1F* h = new TH1F(hName,hName,100,10000*low,10000*up); TString strDraw="10000*parVal>>"; strDraw+=hName; trCut->Draw(strDraw,strCut,"goff"); h->SetMarkerStyle(2); h->Draw("EP"); }// end of loop over parInd canvName+=".png"; TString saveName=strOutdir+canvName; canv->SaveAs(saveName); saveName.ReplaceAll(".png",".pdf"); canv->SaveAs(saveName); }//end of loop over isubd }// end of PlotParsDistr
TGraphErrors* PlotLightYieldGraph() { string filename; vector<double> x,y,ex,ey; while(1){ cout<<"\n\nEnter next file to process; <enter> to finish."<<endl; getline(cin, filename); if(filename=="") break; //load the tree TTree* Events = GetEventsTree(filename.c_str()); if(!Events) continue; gROOT->ProcessLine(".x analysis/Aliases.C"); double start = Events->GetMinimum("timestamp"); double end = Events->GetMaximum("timestamp"); double error = 0; TString title = TString("Na22 Spectrum, ") + TString(filename)(TRegexp("Run......")); TH1F* hist = new TH1F("Na22Spec",title,200,1000,2500); Events->Draw("sumspec >> Na22Spec","min > 0","e"); double yield = Fit511Photopeak(hist,&error); x.push_back((start+end)/2.); ex.push_back((end-start)/2.); y.push_back(yield); ey.push_back(error); } if(x.size() == 0){ cerr<<"No valid points found!"<<endl; return 0; } TGraphErrors* graph = new TGraphErrors(x.size(),&x[0],&y[0],&ex[0],&ey[0]); graph->Draw("ape"); TAxis* xax = graph->GetXaxis(); xax->SetTitle("Run Time"); xax->SetTimeDisplay(1); xax->SetTimeFormat("%Y/%m/%d"); xax->SetTimeOffset(1,"gmt"); TAxis* yax = graph->GetYaxis(); yax->SetTitle("511 keV Light Yield [pe/keV]"); graph->Draw("ape"); return graph; }
void nbx_check_2(const char * filename="counts.root") { TFile * infile = new TFile(filename,"READ"); TTree * tr = (TTree*) infile->Get("sca"); Int_t NRUNS_tmp = tr->GetMaximum("i"); const Int_t NRUNS = NRUNS_tmp; TH1D * h[NRUNS]; char h_n[NRUNS][256]; char h_t[NRUNS][256]; char cut[NRUNS][256]; TCanvas * c = new TCanvas("c","c",1400,1000); Double_t max; for(Int_t i=0; i<NRUNS; i++) { printf("i=%d\n",i); sprintf(h_n[i],"nbx_bx_%d",i+1); sprintf(h_t[i],"N_{bx} vs. bXing for i=%d",i+1); h[i] = new TH1D(h_n[i],h_n[i],120,0,120); sprintf(cut[i],"tot_bx*(i==%d)",i+1); tr->Project(h_n[i],"bx",cut[i]); max = h[i]->GetMaximum(); h[i]->Scale(1/max); c->Clear(); c->SetGrid(1,1); h[i]->Draw(); if(i==0) c->Print("nbx_check/nbx_vs_bxing.pdf(","pdf"); else if(i+1==NRUNS) c->Print("nbx_check/nbx_vs_bxing.pdf)","pdf"); else c->Print("nbx_check/nbx_vs_bxing.pdf"); }; };
void paracoor( TString fin = "TMVA.root", Bool_t useTMVAStyle = kTRUE ) { // set style and remove existing canvas' TMVAGlob::Initialize( useTMVAStyle ); // checks if file with name "fin" is already open, and if not opens one TFile* file = TMVAGlob::OpenFile( fin ); TTree* tree = (TTree*)file->Get("TestTree"); if(!tree) { cout << "--- No TestTree saved in ROOT file. Parallel coordinates will not be plotted" << endl; return; } // first get list of leaves in tree TObjArray* leafList = tree->GetListOfLeaves(); vector<TString> vars; vector<TString> mvas; for (Int_t iar=0; iar<leafList->GetSize(); iar++) { TLeaf* leaf = (TLeaf*)leafList->At(iar); if (leaf != 0) { TString leafName = leaf->GetName(); if (leafName != "type" && leafName != "weight" && leafName != "boostweight" && leafName != "class" && leafName != "className" && leafName != "classID" && !leafName.Contains("prob_")) { // is MVA ? if (TMVAGlob::ExistMethodName( leafName )) { mvas.push_back( leafName ); } else { vars.push_back( leafName ); } } } } cout << "--- Found: " << vars.size() << " variables" << endl; cout << "--- Found: " << mvas.size() << " MVA(s)" << endl; TString type[2] = { "Signal", "Background" }; const Int_t nmva = mvas.size(); TCanvas* csig[nmva]; TCanvas* cbkg[nmva]; for (Int_t imva=0; imva<mvas.size(); imva++) { cout << "--- Plotting parallel coordinates for : " << mvas[imva] << " & input variables" << endl; for (Int_t itype=0; itype<2; itype++) { // create draw option TString varstr = mvas[imva] + ":"; for (Int_t ivar=0; ivar<vars.size(); ivar++) varstr += vars[ivar] + ":"; varstr.Resize( varstr.Last( ':' ) ); // create canvas TString mvashort = mvas[imva]; mvashort.ReplaceAll("MVA_",""); TCanvas* c1 = (itype == 0) ? csig[imva] : cbkg[imva]; c1 = new TCanvas( Form( "c1_%i",itype ), Form( "Parallel coordinate representation for %s and input variables (%s events)", mvashort.Data(), type[itype].Data() ), 50*(itype), 50*(itype), 750, 500 ); tree->Draw( varstr.Data(), Form("classID==%i",1-itype) , "para" ); c1->ToggleEditor(); gStyle->SetOptTitle(0); TParallelCoord* para = (TParallelCoord*)gPad->GetListOfPrimitives()->FindObject( "ParaCoord" ); TParallelCoordVar* mvavar = (TParallelCoordVar*)para->GetVarList()->FindObject( mvas[imva] ); Double_t minrange = tree->GetMinimum( mvavar->GetName() ); Double_t maxrange = tree->GetMaximum( mvavar->GetName() ); Double_t width = 0.2*(maxrange - minrange); Double_t x1 = minrange, x2 = x1 + width; TParallelCoordRange* parrange = new TParallelCoordRange( mvavar, x1, x2 ); parrange->SetLineColor(4); mvavar->AddRange( parrange ); para->AddSelection("-1"); for (Int_t ivar=1; ivar<TMath::Min(Int_t(vars.size()) + 1,3); ivar++) { TParallelCoordVar* var = (TParallelCoordVar*)para->GetVarList()->FindObject( vars[ivar] ); minrange = tree->GetMinimum( var->GetName() ); maxrange = tree->GetMaximum( var->GetName() ); width = 0.2*(maxrange - minrange); switch (ivar) { case 0: { x1 = minrange; x2 = x1 + width; break; } case 1: { x1 = 0.5*(maxrange + minrange - width)*0.02; x2 = x1 + width*0.02; break; } case 2: { x1 = maxrange - width; x2 = x1 + width; break; } } parrange = new TParallelCoordRange( var, x1, x2 ); parrange->SetLineColor( ivar == 0 ? 2 : ivar == 1 ? 5 : 6 ); var->AddRange( parrange ); para->AddSelection( Form("%i",ivar) ); } c1->Update(); TString fname = Form( "plots/paracoor_c%i_%s", imva, itype == 0 ? "S" : "B" ); TMVAGlob::imgconv( c1, fname ); } } }
//-------------------------------------- void FBI3D(TString namefile="", TString namefile2=""){ Int_t MaxNCoinc = 9e7; Int_t* POS = (Int_t*)malloc(MaxNCoinc*sizeof(Int_t)); FILE* fichero; FILE* fichero2; cout << namefile<<" "<<namefile2<<endl; // === STEP 1 === NORMALIZATION ================ if (namefile2=="normf.raw"){ // Allows reusing previous normalization image cout << "Using precalculated Normalization file" << endl; fichero2 = fopen(namefile2,"rb"); fread(SENS,sizeof(float),nvoxels,fichero2); fclose(fichero2); }else{ // Create normalization from ROOT file (filtered to reduce noise) int nt = 10; // Number of filter iterations SENS = normalization(namefile2,nt); // Storing the normalization image so that it can be used for other reconstructions of this scanner fichero2 = fopen("normf.raw","wb"); fwrite(SENS,sizeof(float),nvoxels,fichero2); fclose(fichero2); } // === STEP 2 === READ ROOT FILE ====== int posic; TFile *f1 = new TFile(namefile,"READ"); TTree *Coincidences = (TTree*)f1->Get("Coincidences"); ncoinc = Coincidences->GetEntries(); cout << "Simulated Coincidences = " << ncoinc << endl; define_branches(Coincidences); Scanner = (Coincidences->GetMaximum("globalPosX1")) - (Coincidences->GetMinimum("globalPosX1")); // Diameter of the scanner from the detected counts FOV_Z = (Coincidences->GetMaximum("globalPosZ1")) - (Coincidences->GetMinimum("globalPosZ1")); dz = float(NZS)/FOV_Z; // === STEP 3 === REFERENCE IMAGE (OPTIONAL) ==== for(Int_t iV=0; iV<nvoxels ; iV++){IMG_N[iV]=0.;} // Initial Image for(Int_t ic = 0; ic <= ncoinc; ic++){ // Coincidences->GetEntry(ic); ix = int(source_X*dxy + RESM); iy = int(source_Y*dxy + RESM); iz = int(source_Z*dz + NZM); if (ix>=0 && ix<RES && iy>=0 && iy<RES && iz>=0 && iz<NZS){ // Check voxel valid iV = iz*RES*RES+iy*RES+ix; IMG_N[iV]++; } } fichero = fopen("image_ref.raw","wb"); fwrite(IMG_N,sizeof(float),nvoxels,fichero); fclose(fichero); Float_t sens_corr = 0.; for(Int_t iV=0; iV<nvoxels ; iV++){ sens_corr = SENS[iV]; if (sens_corr>0){sens_corr = 1.0/sens_corr;} IMG_N[iV]*=sens_corr; } // Reference image corrected by sensitivity fichero = fopen("image_ref_senscor.raw","wb"); fwrite(IMG_N,sizeof(float),nvoxels,fichero); fclose(fichero); // === STEP 4 === INITIAL IMAGE ================ for(Int_t iV=0; iV<nvoxels ; iV++){IMG[iV]=0;} // Initial Image for(Int_t iV=0; iV<ncoinc; iV++){POS[iV]=-1;} // Initial Position // === STEP 5 === ITERATIONS ================== for(Int_t iter=1; iter<=niter ; iter++){ // Iterations (1 iteration consists of a whole loop over all data) for(Int_t ic = 0; ic<ncoinc; ic++){ Coincidences->GetEntry(ic); if ( ic % 100000 == 0 ) cout << "Iter= " << iter << " Processing " << ic << " coincidences " << endl; posic = POS[ic]; POS[ic] = Arek(posic,det1_X,det1_Y,det1_Z,det2_X,det2_Y,det2_Z,IMG,SENS); } // COINCIDENCES } // ITERATIONS f1->Close(); fichero = fopen("image.raw","wb"); if(fichero){ fwrite(IMG,sizeof(int),nvoxels,fichero); fclose(fichero); } // === STEP 6 === FINAL SENSITIVY NORMALIZATION ==== for(Int_t iV=0; iV<nvoxels ; iV++){ if (SENS[iV]>0){ IMG_N[iV] = float(IMG[iV])/SENS[iV]; }else{ IMG_N[iV] = 0.; } } fichero = fopen("image_norm.raw","wb"); if(fichero){ fwrite(IMG_N,sizeof(float),nvoxels,fichero); fclose(fichero); } IMG_N = MedianIMG(IMG_N); fichero = fopen("image_norm_med.raw","wb"); if(fichero){ fwrite(IMG_N,sizeof(float),nvoxels,fichero); fclose(fichero); } } // END
void PlotDecisionBoundary( TString weightFile = "weights/TMVAClassification_BDT.weights.xml",TString v0="var0", TString v1="var1", TString dataFileName = "/home/hvoss/TMVA/TMVA_data/data/data_circ.root") { //--------------------------------------------------------------- // default MVA methods to be trained + tested // this loads the library TMVA::Tools::Instance(); std::cout << std::endl; std::cout << "==> Start TMVAClassificationApplication" << std::endl; // // create the Reader object // TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); // create a set of variables and declare them to the reader // - the variable names must corresponds in name and type to // those given in the weight file(s) that you use Float_t var0, var1; reader->AddVariable( v0, &var0 ); reader->AddVariable( v1, &var1 ); // // book the MVA method // reader->BookMVA( "M1", weightFile ); TFile *f = new TFile(dataFileName); TTree *signal = (TTree*)f->Get("TreeS"); TTree *background = (TTree*)f->Get("TreeB"); //Declaration of leaves types Float_t svar0; Float_t svar1; Float_t bvar0; Float_t bvar1; Float_t sWeight=1.0; // just in case you have weight defined, also set these branchaddresses Float_t bWeight=1.0*signal->GetEntries()/background->GetEntries(); // just in case you have weight defined, also set these branchaddresses // Set branch addresses. signal->SetBranchAddress(v0,&svar0); signal->SetBranchAddress(v1,&svar1); background->SetBranchAddress(v0,&bvar0); background->SetBranchAddress(v1,&bvar1); UInt_t nbin = 50; Float_t xmax = signal->GetMaximum(v0.Data()); Float_t xmin = signal->GetMinimum(v0.Data()); Float_t ymax = signal->GetMaximum(v1.Data()); Float_t ymin = signal->GetMinimum(v1.Data()); xmax = TMath::Max(xmax,background->GetMaximum(v0.Data())); xmin = TMath::Min(xmin,background->GetMinimum(v0.Data())); ymax = TMath::Max(ymax,background->GetMaximum(v1.Data())); ymin = TMath::Min(ymin,background->GetMinimum(v1.Data())); TH2D *hs=new TH2D("hs","",nbin,xmin,xmax,nbin,ymin,ymax); TH2D *hb=new TH2D("hb","",nbin,xmin,xmax,nbin,ymin,ymax); hs->SetXTitle(v0); hs->SetYTitle(v1); hb->SetXTitle(v0); hb->SetYTitle(v1); hs->SetMarkerColor(4); hb->SetMarkerColor(2); TH2F * hist = new TH2F( "MVA", "MVA", nbin,xmin,xmax,nbin,ymin,ymax); // Prepare input tree (this must be replaced by your data source) // in this example, there is a toy tree with signal and one with background events // we'll later on use only the "signal" events for the test in this example. Float_t MinMVA=10000, MaxMVA=-100000; for (Int_t ibin=1; ibin<nbin+1; ibin++){ for (Int_t jbin=1; jbin<nbin+1; jbin++){ var0 = hs->GetXaxis()->GetBinCenter(ibin); var1 = hs->GetYaxis()->GetBinCenter(jbin); Float_t mvaVal=reader->EvaluateMVA( "M1" ) ; if (MinMVA>mvaVal) MinMVA=mvaVal; if (MaxMVA<mvaVal) MaxMVA=mvaVal; hist->SetBinContent(ibin,jbin, mvaVal); } } // creating a fine histograms containing the error rate const Int_t nValBins=100; Double_t sum = 0.; TH1F *mvaS= new TH1F("mvaS","",nValBins,MinMVA,MaxMVA); TH1F *mvaB= new TH1F("mvaB","",nValBins,MinMVA,MaxMVA); TH1F *mvaSC= new TH1F("mvaSC","",nValBins,MinMVA,MaxMVA); TH1F *mvaBC= new TH1F("mvaBC","",nValBins,MinMVA,MaxMVA); Long64_t nentries; nentries = TreeS->GetEntries(); for (Long64_t is=0; is<nentries;is++) { signal->GetEntry(is); sum +=sWeight; var0 = svar0; var1 = svar1; Float_t mvaVal=reader->EvaluateMVA( "M1" ) ; hs->Fill(svar0,svar1); mvaS->Fill(mvaVal,sWeight); } nentries = TreeB->GetEntries(); for (Long64_t ib=0; ib<nentries;ib++) { background->GetEntry(ib); sum +=bWeight; var0 = bvar0; var1 = bvar1; Float_t mvaVal=reader->EvaluateMVA( "M1" ) ; hb->Fill(bvar0,bvar1); mvaB->Fill(mvaVal,bWeight); } //SeparationBase *sepGain = new MisClassificationError(); //SeparationBase *sepGain = new GiniIndex(); SeparationBase *sepGain = new CrossEntropy(); Double_t sTot = mvaS->GetSum(); Double_t bTot = mvaB->GetSum(); mvaSC->SetBinContent(1,mvaS->GetBinContent(1)); mvaBC->SetBinContent(1,mvaB->GetBinContent(1)); Double_t sSel=mvaSC->GetBinContent(1); Double_t bSel=mvaBC->GetBinContent(1); Double_t separationGain=sepGain->GetSeparationGain(sSel,bSel,sTot,bTot); Double_t mvaCut=mvaSC->GetBinCenter(1); Double_t mvaCutOrientation=1; // 1 if mva > mvaCut --> Signal and -1 if mva < mvaCut (i.e. mva*-1 > mvaCut*-1) --> Signal for (Int_t ibin=2;ibin<nValBins;ibin++){ mvaSC->SetBinContent(ibin,mvaS->GetBinContent(ibin)+mvaSC->GetBinContent(ibin-1)); mvaBC->SetBinContent(ibin,mvaB->GetBinContent(ibin)+mvaBC->GetBinContent(ibin-1)); sSel=mvaSC->GetBinContent(ibin); bSel=mvaBC->GetBinContent(ibin); if (separationGain < sepGain->GetSeparationGain(sSel,bSel,sTot,bTot) && mvaSC->GetBinCenter(ibin)<0){ separationGain = sepGain->GetSeparationGain(sSel,bSel,sTot,bTot); mvaCut=mvaSC->GetBinCenter(ibin); if (sSel/bSel > (sTot-sSel)/(bTot-bSel)) mvaCutOrientation=-1; else mvaCutOrientation=1; } } cout << "Min="<<MinMVA << " Max=" << MaxMVA << " sTot=" << sTot << " bTot=" << bTot << " sepGain="<<separationGain << " cut=" << mvaCut << " cutOrientation="<<mvaCutOrientation << endl; delete reader; gStyle->SetPalette(1); plot(hs,hb,hist ,v0,v1,mvaCut); TCanvas *cm=new TCanvas ("cm","",900,1200); cm->cd(); cm->Divide(1,2); cm->cd(1); mvaS->SetLineColor(4); mvaB->SetLineColor(2); mvaS->Draw(); mvaB->Draw("same"); cm->cd(2); mvaSC->SetLineColor(4); mvaBC->SetLineColor(2); mvaBC->Draw(); mvaSC->Draw("same"); // TH1F *add=(TH1F*)mvaBC->Clone("add"); // add->Add(mvaSC); // add->Draw(); // errh->Draw("same"); // // write histograms // TFile *target = new TFile( "TMVAPlotDecisionBoundary.root","RECREATE" ); hs->Write(); hb->Write(); hist->Write(); target->Close(); }
void DrawErrors(Int_t NBINS=50, const char * filename="spin.root") { TFile * infile = new TFile(filename,"READ"); TTree * tr = (TTree*) infile->Get("asy"); // set binning // -- *_div = lower limit of each bin; last one is the upper limit // -- *_width = bin width Int_t phi_bins0, eta_bins0, pt_bins0, en_bins0; if(gSystem->Getenv("PHI")==NULL){fprintf(stderr,"ERROR: source env vars\n"); return;}; sscanf(gSystem->Getenv("PHI"),"%d",&phi_bins0); sscanf(gSystem->Getenv("ETA"),"%d",&eta_bins0); sscanf(gSystem->Getenv("PT"),"%d",&pt_bins0); sscanf(gSystem->Getenv("EN"),"%d",&en_bins0); const Double_t pi=3.1415; const Double_t phi_bins=phi_bins0; const Double_t eta_bins=eta_bins0; const Double_t eta_low=2.6; const Double_t eta_high=4.2; const Double_t pt_bins=pt_bins0; const Double_t pt_low=0; const Double_t pt_high=10; const Double_t en_bins=en_bins0; const Double_t en_low=0; const Double_t en_high=100; const Double_t phi_low = (-1*pi)-0.1; const Double_t phi_high = pi+0.1; Double_t phi_div[phi_bins+1]; Double_t phi_width = (phi_high - phi_low)/phi_bins; for(Int_t i=0; i<phi_bins; i++) phi_div[i] = phi_low + i * phi_width; Double_t eta_div[eta_bins+1]; Double_t eta_width = (eta_high - eta_low)/eta_bins; for(Int_t i=0; i<eta_bins; i++) eta_div[i] = eta_low + i * eta_width; Double_t pt_div[pt_bins+1]; Double_t pt_width = (pt_high - pt_low)/pt_bins; for(Int_t i=0; i<pt_bins; i++) pt_div[i] = pt_low + i * pt_width; Double_t en_div[en_bins+1]; Double_t en_width = (en_high - en_low)/en_bins; for(Int_t i=0; i<en_bins; i++) en_div[i] = en_low + i * en_width; eta_div[eta_bins] = eta_high; pt_div[pt_bins] = pt_high; en_div[en_bins] = en_high; Float_t R3_min, R3_max; Float_t R3_err_min, R3_err_max; Float_t PB_min, PB_max; Float_t PB_err_min, PB_err_max; Float_t PY_min, PY_max; Float_t PY_err_min, PY_err_max; Float_t A_LL_min, A_LL_max; Float_t A_LL_err_min, A_LL_err_max; R3_min = tr->GetMinimum("R3"); R3_max = tr->GetMaximum("R3"); R3_err_min = tr->GetMinimum("R3_err"); R3_err_max = tr->GetMaximum("R3_err"); PB_min = tr->GetMinimum("PB"); PB_max = tr->GetMaximum("PB"); PB_err_min = tr->GetMinimum("PB_err"); PB_err_max = tr->GetMaximum("PB_err"); PY_min = tr->GetMinimum("PY"); PY_max = tr->GetMaximum("PY"); PY_err_min = tr->GetMinimum("PY_err"); PY_err_max = tr->GetMaximum("PY_err"); A_LL_min = tr->GetMinimum("A_LL"); A_LL_max = tr->GetMaximum("A_LL"); A_LL_err_min = tr->GetMinimum("A_LL_err"); A_LL_err_max = tr->GetMaximum("A_LL_err"); TH2F * R3_dist = new TH2F("R3_dist","#sigma(R_{3}) vs. R_{3}", NBINS,R3_min,R3_max,NBINS,R3_err_min,R3_err_max); TH2F * PB_dist = new TH2F("PB_dist","#sigma(P_{B}) vs. P_{B}", NBINS,PB_min,PB_max,NBINS,PB_err_min,PB_err_max); TH2F * PY_dist = new TH2F("PY_dist","#sigma(P_{Y}) vs. P_{Y}", NBINS,PY_min,PY_max,NBINS,PY_err_min,PY_err_max); TH2F * A_LL_dist[eta_bins][pt_bins][en_bins]; TH2F * A_LL_dist_all = new TH2F("A_LL_dist_all","#sigma(A_{LL}) vs. A_{LL} :: all bins", NBINS,A_LL_min,A_LL_max,NBINS,A_LL_err_min,A_LL_err_max); char zero_bin[128]; strcpy(zero_bin,"phi_bin==1 && eta_bin==0 && pt_bin==0 && pt_bin==0"); tr->Project("R3_dist","R3_err:R3",zero_bin); tr->Project("PB_dist","PB_err:PB",zero_bin); tr->Project("PY_dist","PY_err:PY",zero_bin); tr->Project("A_LL_dist_all","A_LL_err:A_LL"); TFile * outfile = new TFile("error.root","RECREATE"); R3_dist->Write(); PB_dist->Write(); PY_dist->Write(); A_LL_dist_all->Write(); char A_LL_dist_n[eta_bins][pt_bins][en_bins][256]; char A_LL_dist_t[eta_bins][pt_bins][en_bins][256]; char A_LL_cut[eta_bins][pt_bins][en_bins][256]; for(Int_t g=0; g<eta_bins; g++) { for(Int_t p=0; p<pt_bins; p++) { for(Int_t e=0; e<en_bins; e++) { sprintf(A_LL_cut[g][p][e],"eta_bin==%d && pt_bin==%d && en_bin==%d",g,p,e); sprintf(A_LL_dist_n[g][p][e],"A_LL_dist_g%d_p%d_e%d",g,p,e); sprintf(A_LL_dist_t[g][p][e], "#sigma(A_{LL}) vs. A_{LL} :: #eta#in[%.2f,%.2f), p_{T}#in[%.2f,%.2f), E#in[%.2f,%.2f)", eta_div[g],eta_div[g+1],pt_div[p],pt_div[p+1],en_div[e],en_div[e+1]); A_LL_dist[g][p][e] = new TH2F(A_LL_dist_n[g][p][e],A_LL_dist_t[g][p][e], NBINS,A_LL_min,A_LL_max,NBINS,A_LL_err_min,A_LL_err_max); tr->Project(A_LL_dist_n[g][p][e],"A_LL_err:A_LL",A_LL_cut[g][p][e]); A_LL_dist[g][p][e]->Write(); }; }; }; }
double QAnalysis::GetMaximum(string name) { TTree* tree = ReadTree(file); double max = tree->GetMaximum(name.c_str()); return max; }
void nbx_step_check(const char * filename="counts.root") { TFile * infile = new TFile(filename,"READ"); TTree * tr = (TTree*) infile->Get("sca"); Int_t NRUNS_tmp = tr->GetMaximum("i"); const Int_t NRUNS = NRUNS_tmp; TH1D * h[NRUNS]; char h_n[NRUNS][256]; char h_t[NRUNS][256]; char cut[NRUNS][256]; TCanvas * c = new TCanvas("c","c",1400,1000); Double_t max; Double_t step[NRUNS][120]; Double_t step_e[NRUNS][120]; Double_t bx_arr[120]; Double_t bx_arr_e[120]; for(Int_t b=0; b<120; b++) { bx_arr[b]=b; bx_arr_e[b]=0; }; TGraphErrors * tg[NRUNS]; for(Int_t i=0; i<NRUNS; i++) { printf("i=%d\n",i); sprintf(h_n[i],"nbx_bx_%d",i+1); sprintf(h_t[i],"N_{bx} vs. bXing for i=%d",i+1); h[i] = new TH1D(h_n[i],h_n[i],120,0,120); sprintf(cut[i],"tot_bx*(i==%d)",i+1); tr->Project(h_n[i],"bx",cut[i]); max = h[i]->GetMaximum(); //h[i]->Scale(1/max); for(Int_t b=1; b<=119; b++) { step[i][b] = h[i]->GetBinContent(b) - h[i]->GetBinContent(b+1); step_e[i][b] = sqrt(step[i][b]); }; tg[i] = new TGraphErrors(120,bx_arr,step[i],bx_arr_e,step_e[i]); c->Clear(); c->SetGrid(1,1); tg[i]->SetMarkerStyle(kFullCircle); tg[i]->Draw("ape"); if(i==0) c->Print("nbx_step_check.pdf(","pdf"); else if(i+1==NRUNS) c->Print("nbx_step_check.pdf)","pdf"); else c->Print("nbx_step_check.pdf"); }; };
void eff_IdHlt(const TString configFile, TString triggerSetString) { // --------------------------------- // Preliminary checks // --------------------------------- // verify whether it was a compilation check if (configFile.Contains("_DebugRun_") || triggerSetString.Contains("_DebugRun_")) { std::cout << "eff_IdHlt: _DebugRun_ detected. Terminating the script\n"; return; } // fast check TriggerConstantSet triggerSet=DetermineTriggerSet(triggerSetString); assert ( triggerSet != TrigSet_UNDEFINED ); // --------------------------------- // Normal execution // --------------------------------- gBenchmark->Start("eff_IdHlt"); //-------------------------------------------------------------------------------------------------------------- // Settings //============================================================================================================== Double_t massLow = 60; Double_t massHigh = 120; // Read in the configuratoin file TString sampleTypeString = ""; TString effTypeString = ""; TString calcMethodString = ""; TString etBinningString = ""; TString etaBinningString = ""; TString dirTag; vector<TString> ntupleFileNames; ifstream ifs; ifs.open(configFile.Data()); if (!ifs.is_open()) { std::cout << "tried to open the configuration file <" << configFile << ">\n"; assert(ifs.is_open()); } string line; Int_t state=0; while(getline(ifs,line)) { if(line[0]=='#') continue; if(line[0]=='%') break; if(state==0){ // Read 1st line of content: data or MC? sampleTypeString = TString(line); state++; }else if(state==1) { // Read 2d content line: efficiency type string effTypeString = TString(line); state++; }else if(state==2) { // Read 3d content line: fitting mode calcMethodString = TString(line); state++; }else if(state==3) { // Read 4th content line: SC ET binning etBinningString = TString(line); state++; }else if(state==4) { // Read 5th content line: SC eta binning etaBinningString = TString(line); state++; }else if(state==5) { // Read 5th content line: SC eta binning dirTag = TString(line); state++; }else if(state==6) { ntupleFileNames.push_back(TString(line)); } } int calcMethod = 0; if(calcMethodString == "COUNTnCOUNT") calcMethod = COUNTnCOUNT; else if(calcMethodString == "COUNTnFIT") calcMethod = COUNTnFIT; else if(calcMethodString == "FITnFIT") calcMethod = FITnFIT; else assert(0); printf("Efficiency calculation method: %s\n", calcMethodString.Data()); int effType = 0; if(effTypeString == "ID") effType = ID; else if(effTypeString == "HLT") effType = HLT; else assert(0); printf("Efficiency type to measure: %s\n", effTypeString.Data()); int etBinning = 0; if(etBinningString == "ETBINS1") etBinning = ETBINS1; else if(etBinningString == "ETBINS5") etBinning = ETBINS5; else assert(0); printf("SC ET binning: %s\n", etBinningString.Data()); int etaBinning = 0; if(etaBinningString == "ETABINS1") etaBinning = ETABINS1; else if(etaBinningString == "ETABINS2") etaBinning = ETABINS2; else assert(0); printf("SC eta binning: %s\n", etaBinningString.Data()); int sample; if(sampleTypeString == "DATA") sample = DATA; else if(sampleTypeString == "MC") sample = MC; else assert(0); printf("Sample: %s\n", sampleTypeString.Data()); // Construct the trigger object TriggerSelection triggers(triggerSetString, (sample==DATA)?true:false, 0); if (effType==HLT) std::cout << "\tHLT efficiency calculation method " << triggers.hltEffCalcName() << ", triggerSet=" << triggers.triggerSetName() << "\n"; else triggers.hltEffCalcMethod(HLTEffCalc_2011Old); TRandom *rnd= new TRandom(); rnd->SetSeed(0); // The label is a string that contains the fields that are passed to // the function below, to be used to name files with the output later. TString label = getLabel(sample, effType, calcMethod, etBinning, etaBinning, triggers); //-------------------------------------------------------------------------------------------------------------- // Main analysis code //============================================================================================================== // // Set up histograms // TH1F* hMass = new TH1F("hMass" ,"",30,massLow,massHigh); TH1F* hMassTotal = new TH1F("hMassTotal","",30,massLow,massHigh); TH1F* hMassPass = new TH1F("hMassPass" ,"",30,massLow,massHigh); TH1F* hMassFail = new TH1F("hMassFail" ,"",30,massLow,massHigh); // Save MC templates if sample is MC TString tagAndProbeDir(TString("../root_files/tag_and_probe/")+dirTag); gSystem->mkdir(tagAndProbeDir,kTRUE); TFile *templatesFile = 0; vector<TH1F*> hPassTemplateV; vector<TH1F*> hFailTemplateV; if( sample != DATA) { // For simulation, we will be saving templates TString labelMC = getLabel(-1111, effType, calcMethod, etBinning, etaBinning, triggers); TString templatesLabel = tagAndProbeDir + TString("/mass_templates_")+labelMC+TString(".root"); templatesFile = new TFile(templatesLabel,"recreate"); for(int i=0; i<getNEtBins(etBinning); i++){ for(int j=0; j<getNEtaBins(etaBinning); j++){ TString hname = "hMassTemplate_Et"; hname += i; hname += "_eta"; hname += j; hPassTemplateV.push_back(new TH1F(hname+TString("_pass"),"",60,massLow,massHigh)); hFailTemplateV.push_back(new TH1F(hname+TString("_fail"),"",60,massLow,massHigh)); } } } else { // For data, we will be using templates, // however, if the request is COUNTnCOUNT, do nothing if( calcMethod != COUNTnCOUNT ){ TString labelMC = getLabel(-1111, effType, calcMethod, etBinning, etaBinning, triggers); TString templatesLabel = tagAndProbeDir+TString("/mass_templates_")+labelMC+TString(".root"); templatesFile = new TFile(templatesLabel); if( ! templatesFile->IsOpen() ) assert(0); } } // This file can be utilized in the future, but for now // opening it just removes complaints about memory resident // trees. No events are actually written. TFile *selectedEventsFile = new TFile("selectedEventsFile.root","recreate"); if (!selectedEventsFile) { assert(0); } TTree *passTree = new TTree("passTree","passTree"); Double_t storeMass, storeEt, storeEta; passTree->Branch("mass",&storeMass,"mass/D"); passTree->Branch("et",&storeEt ,"et/D"); passTree->Branch("eta",&storeEta ,"eta/D"); TTree *failTree = new TTree("failTree","failTree"); failTree->Branch("mass",&storeMass,"mass/D"); failTree->Branch("et",&storeEt ,"et/D"); failTree->Branch("eta",&storeEta ,"eta/D"); int nDivisions = getNEtBins(etBinning)*getNEtaBins(etaBinning); double ymax = 800; if(nDivisions <4 ) ymax = nDivisions * 200; TCanvas *c1 = MakeCanvas("c1","c1", 600, int(ymax)); c1->Divide(2,nDivisions); int eventsInNtuple = 0; int eventsAfterTrigger = 0; int totalCand = 0; int totalCandInMassWindow = 0; int totalCandInEtaAcceptance = 0; int totalCandEtAbove10GeV = 0; int totalCandMatchedToGen = 0; // Loop over files for(UInt_t ifile=0; ifile<ntupleFileNames.size(); ifile++){ // // Access samples and fill histograms // TFile *infile = 0; TTree *eventTree = 0; // Data structures to store info from TTrees mithep::TEventInfo *info = new mithep::TEventInfo(); mithep::TGenInfo *gen = new mithep::TGenInfo(); TClonesArray *dielectronArr = new TClonesArray("mithep::TDielectron"); // Read input file cout << "Processing " << ntupleFileNames[ifile] << "..." << endl; infile = new TFile(ntupleFileNames[ifile]); assert(infile); // Get the TTrees eventTree = (TTree*)infile->Get("Events"); assert(eventTree); // Set branch address to structures that will store the info eventTree->SetBranchAddress("Info",&info); TBranch *infoBr = eventTree->GetBranch("Info"); // check whether the file is suitable for the requested run range UInt_t runNumMin = UInt_t(eventTree->GetMinimum("runNum")); UInt_t runNumMax = UInt_t(eventTree->GetMaximum("runNum")); std::cout << "runNumMin=" << runNumMin << ", runNumMax=" << runNumMax << "\n"; if (!triggers.validRunRange(runNumMin,runNumMax)) { std::cout << "... file contains uninteresting run range\n"; continue; } // Define other branches eventTree->SetBranchAddress("Dielectron",&dielectronArr); TBranch *dielectronBr = eventTree->GetBranch("Dielectron"); TBranch *genBr = 0; if(sample != DATA){ eventTree->SetBranchAddress("Gen",&gen); genBr = eventTree->GetBranch("Gen"); } // loop over events eventsInNtuple += eventTree->GetEntries(); for(UInt_t ientry=0; ientry<eventTree->GetEntries(); ientry++) { // for(UInt_t ientry=0; ientry<200000; ientry++) { // This is for faster turn-around in testing if(sample != DATA) genBr->GetEntry(ientry); // Check that the whole event has fired the appropriate trigger infoBr->GetEntry(ientry); /* Old code // For EPS2011 for both data and MC (starting from Summer11 production) // we use a special trigger for tag and probe that has second leg // unbiased with cuts at HLT ULong_t eventTriggerBit = kHLT_Ele17_CaloIdVT_CaloIsoVT_TrkIdT_TrkIsoVT_SC8_Mass30 | kHLT_Ele32_CaloIdL_CaloIsoVL_SC17; // The tag trigger bit matches the "electron" of the trigger we // use for this tag and probe study: electron+sc ULong_t tagTriggerObjectBit = kHLT_Ele17_CaloIdVT_CaloIsoVT_TrkIdT_TrkIsoVT_SC8_Mass30_EleObj | kHLT_Ele32_CaloIdL_CaloIsoVL_SC17_EleObj; // The probe trigger, however, is any of possibilities used in // the trigger that is used in the main analysis ULong_t probeTriggerObjectBit = kHLT_Ele17_CaloIdL_CaloIsoVL_Ele8_CaloIdL_CaloIsoVL_Ele1Obj | kHLT_Ele17_CaloIdT_TrkIdVL_CaloIsoVL_TrkIsoVL_Ele8_CaloIdT_TrkIdVL_CaloIsoVL_TrkIsoVL_Ele1Obj | kHLT_Ele17_CaloIdL_CaloIsoVL_Ele8_CaloIdL_CaloIsoVL_Ele2Obj | kHLT_Ele17_CaloIdT_TrkIdVL_CaloIsoVL_TrkIsoVL_Ele8_CaloIdT_TrkIdVL_CaloIsoVL_TrkIsoVL_Ele2Obj; */ ULong_t eventTriggerBit= triggers.getEventTriggerBit_TagProbe(info->runNum); if(!(info->triggerBits & eventTriggerBit)) continue; // no trigger accept? Skip to next event... eventsAfterTrigger++; ULong_t tagTriggerObjectBit= triggers.getTagTriggerObjBit(info->runNum); ULong_t probeTriggerObjectBit_Tight= triggers.getProbeTriggerObjBit_Tight(info->runNum); ULong_t probeTriggerObjectBit_Loose= triggers.getProbeTriggerObjBit_Loose(info->runNum); ULong_t probeTriggerObjectBit= probeTriggerObjectBit_Tight | probeTriggerObjectBit_Loose; // loop through dielectrons dielectronArr->Clear(); dielectronBr->GetEntry(ientry); for(Int_t i=0; i<dielectronArr->GetEntriesFast(); i++) { totalCand++; const mithep::TDielectron *dielectron = (mithep::TDielectron*)((*dielectronArr)[i]); // Tag and probe is done around the Z peak if((dielectron->mass < massLow) || (dielectron->mass > massHigh)) continue; totalCandInMassWindow++; // // Exclude ECAL gap region (should already be done for ntuple, but just to make sure...) if((fabs(dielectron->scEta_1)>kGAP_LOW) && (fabs(dielectron->scEta_1)<kGAP_HIGH)) continue; if((fabs(dielectron->scEta_2)>kGAP_LOW) && (fabs(dielectron->scEta_2)<kGAP_HIGH)) continue; // ECAL acceptance cut on supercluster Et if((fabs(dielectron->scEta_1) > 2.5) || (fabs(dielectron->scEta_2) > 2.5)) continue; // outside eta range? Skip to next event... totalCandInEtaAcceptance++; // None of the electrons should be below 10 GeV if((dielectron->scEt_1 < 10) || (dielectron->scEt_2 < 10)) continue; // below supercluster ET cut? Skip to next event... totalCandEtAbove10GeV++; // Next, we will do a loose kinematic matching to generator level // info. // For the data, this is not needed and not done. We take all // candidates, and take care of background by fitting. // For MC, however, we do not fit, but count pass/fail events. // So we need to make sure there is no background. However, even // in the signal Z->ee MC sample there jets and therefore fake // electrons. So we drop all candidates that do not have both leptons // matched. // if( sample != DATA ) if( ! dielectronMatchedToGeneratorLevel(gen, dielectron) ) continue; totalCandMatchedToGen++; // ECAL driven: this condition is NOT applied // Preliminary selection is complete. Now work on tags and probes. TElectron *ele1 = extractElectron(dielectron, 1); TElectron *ele2 = extractElectron(dielectron, 2); bool isTag1 = isTag(ele1, tagTriggerObjectBit); bool isTag2 = isTag(ele2, tagTriggerObjectBit); // Any electron that made it here is eligible to be a probe // for ID cuts. bool isIDProbe1 = true; bool isIDProbe2 = true; bool isIDProbePass1 = passID(ele1); bool isIDProbePass2 = passID(ele2); // Probes for HLT cuts: bool isHLTProbe1 = passID(ele1); bool isHLTProbe2 = passID(ele2); bool isHLTProbePass1 = ( isHLTProbe1 && (ele1 ->hltMatchBits & probeTriggerObjectBit) ); bool isHLTProbePass2 = ( isHLTProbe2 && (ele2 ->hltMatchBits & probeTriggerObjectBit) ); // // Apply tag and probe, and accumulate counters or histograms // bool isProbe1 = false; bool isProbe2 = false; bool isProbePass1 = false; bool isProbePass2 = false; if( effType == ID ){ isProbe1 = isIDProbe1; isProbe2 = isIDProbe2; isProbePass1 = isIDProbePass1; isProbePass2 = isIDProbePass2; }else if( effType == HLT ){ isProbe1 = isHLTProbe1; isProbe2 = isHLTProbe2; isProbePass1 = isHLTProbePass1; isProbePass2 = isHLTProbePass2; if (triggers.useRandomTagTnPMethod(info->runNum)) { if (rnd->Uniform() <= 0.5) { // tag is 1st electron if (!isTag1) continue; isTag2=0; // ignore whether ele2 can be a tag } else { if (!isTag2) continue; isTag1=0; // ignore whether ele1 can be a tag } } }else { printf("ERROR: unknown efficiency type requested\n"); } storeMass = dielectron->mass; // First electron is the tag, second is the probe if( isTag1 && isProbe2){ // total probes hMassTotal->Fill(dielectron->mass); storeEt = dielectron->scEt_2; storeEta = dielectron->scEta_2; int templateBin = getTemplateBin( findEtBin(storeEt,etBinning), findEtaBin(storeEta,etaBinning), etaBinning); if( isProbePass2 ){ // passed hMassPass->Fill(dielectron->mass); passTree->Fill(); if(sample != DATA && templateBin != -1) hPassTemplateV[templateBin]->Fill(dielectron->mass); }else{ // fail hMassFail->Fill(dielectron->mass); failTree->Fill(); if(sample != DATA && templateBin != -1) hFailTemplateV[templateBin]->Fill(dielectron->mass); } } // Second electron is the tag, first is the probe if( isTag2 && isProbe1 ){ // total probes hMassTotal->Fill(dielectron->mass); storeEt = dielectron->scEt_1; storeEta = dielectron->scEta_1; int templateBin = getTemplateBin( findEtBin(storeEt,etBinning), findEtaBin(storeEta,etaBinning), etaBinning); if( isProbePass1 ){ // passed hMassPass->Fill(dielectron->mass); passTree->Fill(); if(sample != DATA && templateBin != -1) hPassTemplateV[templateBin]->Fill(dielectron->mass); }else{ // fail hMassFail->Fill(dielectron->mass); failTree->Fill(); if(sample != DATA && templateBin != -1) hFailTemplateV[templateBin]->Fill(dielectron->mass); } } // In case the full selection is applied: // if( !(isTag1 && ele2_passID) && !(isTag2 && ele1_passID) ) continue; if( !(isTag1 && isIDProbePass2) && !(isTag2 && isIDProbePass1) ) continue; // if( !(isTag1) && !(isTag2) ) continue; // Fill histogram hMass->Fill(dielectron->mass); } // end loop over dielectron candidates } // end loop over events delete infile; infile=0; eventTree=0; delete gen; delete info; delete dielectronArr; } // end loop over files // // Efficiency analysis // // printf("Number of regular candidates: %15.0f\n", hMass->GetSumOfWeights()); printf("Total events in ntuple %15d\n",eventsInNtuple); printf(" events after event level trigger cut %15d\n",eventsAfterTrigger); printf("\nTotal candidates (no cuts) %15d\n",totalCand); printf(" candidates in 60-120 mass window %15d\n",totalCandInMassWindow); printf(" candidates witheta 0-1.4442, 1.566-2.5 %15d\n",totalCandInEtaAcceptance); printf(" candidates, both electrons above 10 GeV %15d\n",totalCandEtAbove10GeV); printf(" candidates matched to GEN level (if MC) %15d\n",totalCandMatchedToGen); printf("\nNumber of probes, total %15.0f\n", hMassTotal->GetSumOfWeights()); printf("Number of probes, passed %15.0f\n", hMassPass->GetSumOfWeights()); printf("Number of probes, failed %15.0f\n", hMassFail->GetSumOfWeights()); // Human-readbale text file to store measured efficiencies TString reslog = tagAndProbeDir+TString("/efficiency_TnP_")+label+TString(".txt"); ofstream effOutput; effOutput.open(reslog); // Print into the results file the header. effOutput << "Efficiency calculation method: " << calcMethodString.Data() << endl; effOutput << "Efficiency type to measure: " << effTypeString.Data() << endl; effOutput << "SC ET binning: " << etBinningString.Data() << endl; effOutput << "SC eta binning: " << etaBinningString.Data() << endl; effOutput << "Sample: " << sampleTypeString.Data() << endl; effOutput << "Files processed: " << endl; for(UInt_t i=0; i<ntupleFileNames.size(); i++) effOutput << " " << ntupleFileNames[i].Data() << endl; // ROOT file to store measured efficiencies in ROOT format TString resroot = tagAndProbeDir+TString("/efficiency_TnP_")+label+TString(".root"); TFile *resultsRootFile = new TFile(resroot,"recreate"); // Fit log TString fitlogname = tagAndProbeDir+TString("/efficiency_TnP_")+label+TString("_fitlog.dat"); ofstream fitLog; fitLog.open(fitlogname); // // Find efficiency // bool useTemplates = false; if(sample == DATA && effType == ID && (calcMethod == COUNTnFIT || FITnFIT) ) useTemplates = true; int NsetBins=120; bool isRECO=0; const char* setBinsType="cache"; measureEfficiency(passTree, failTree, calcMethod, etBinning, etaBinning, c1, effOutput, fitLog, useTemplates, templatesFile, resultsRootFile, NsetBins, isRECO, setBinsType, dirTag, triggers.triggerSetName()); effOutput.close(); fitLog.close(); TString command = "cat "; command += reslog; system(command.Data()); TString fitpicname = tagAndProbeDir+TString("/efficiency_TnP_")+label+TString("_fit.png"); c1->SaveAs(fitpicname); // Save MC templates if(sample != DATA){ templatesFile->cd(); for(int i=0; i<getNEtBins(etBinning); i++){ for(int j=0; j<getNEtaBins(etaBinning); j++){ int templateBin = getTemplateBin( i, j, etaBinning); hPassTemplateV[templateBin]->Write(); hFailTemplateV[templateBin]->Write(); } } templatesFile->Close(); } gBenchmark->Show("eff_IdHlt"); }
void correlations(const char * filename="counts.root") { // read data tree TFile * infile = new TFile(filename,"READ"); TTree * tr = (TTree*) infile->Get("sca"); Int_t index,t,runnum; Int_t bbce,bbcw,bbcx; Int_t zdce,zdcw,zdcx; Int_t vpde,vpdw,vpdx; tr->SetBranchAddress("t",&t); tr->SetBranchAddress("i",&index); tr->SetBranchAddress("runnum",&runnum); tr->SetBranchAddress("bbce",&bbce); tr->SetBranchAddress("bbcw",&bbcw); tr->SetBranchAddress("bbcx",&bbcx); tr->SetBranchAddress("zdce",&zdce); tr->SetBranchAddress("zdcw",&zdcw); tr->SetBranchAddress("zdcx",&zdcx); tr->SetBranchAddress("vpde",&vpde); tr->SetBranchAddress("vpdw",&vpdw); tr->SetBranchAddress("vpdx",&vpdx); // determine maximum trigger rates /* Float_t bbce_max_rate, bbcw_max_rate, bbcx_max_rate; Float_t zdce_max_rate, zdcw_max_rate, zdcx_max_rate; Float_t vpde_max_rate, vpdw_max_rate, vpdx_max_rate; Float_t bbce_rate, bbcw_rate, bbcx_rate; Float_t zdce_rate, zdcw_rate, zdcx_rate; Float_t vpde_rate, vpdw_rate, vpdx_rate; bbce_max_rate = bbcw_max_rate = bbcx_max_rate = 0; zdce_max_rate = zdcw_max_rate = zdcx_max_rate = 0; vpde_max_rate = vpdw_max_rate = vpdx_max_rate = 0; for(Int_t i=0; i<tr->GetEntries(); i++) { tr->GetEntry(i); bbce_rate = ((Float_t)bbce)/((Float_t)t); bbcw_rate = ((Float_t)bbcw)/((Float_t)t); bbcx_rate = ((Float_t)bbcx)/((Float_t)t); zdce_rate = ((Float_t)zdce)/((Float_t)t); zdcw_rate = ((Float_t)zdcw)/((Float_t)t); zdcx_rate = ((Float_t)zdcx)/((Float_t)t); vpde_rate = ((Float_t)vpde)/((Float_t)t); vpdw_rate = ((Float_t)vpdw)/((Float_t)t); vpdx_rate = ((Float_t)vpdx)/((Float_t)t); bbce_max_rate = (bbce_rate > bbce_max_rate) ? bbce_rate : bbce_max_rate; bbcw_max_rate = (bbcw_rate > bbcw_max_rate) ? bbcw_rate : bbcw_max_rate; bbcx_max_rate = (bbcx_rate > bbcx_max_rate) ? bbcx_rate : bbcx_max_rate; zdce_max_rate = (zdce_rate > zdce_max_rate) ? zdce_rate : zdce_max_rate; zdcw_max_rate = (zdcw_rate > zdcw_max_rate) ? zdcw_rate : zdcw_max_rate; zdcx_max_rate = (zdcx_rate > zdcx_max_rate) ? zdcx_rate : zdcx_max_rate; vpde_max_rate = (vpde_rate > vpde_max_rate) ? vpde_rate : vpde_max_rate; vpdw_max_rate = (vpdw_rate > vpdw_max_rate) ? vpdw_rate : vpdw_max_rate; vpdx_max_rate = (vpdx_rate > vpdx_max_rate) ? vpdx_rate : vpdx_max_rate; }; Float_t factor=0.1; bbce_max_rate += factor*bbce_max_rate; bbcw_max_rate += factor*bbcw_max_rate; bbcx_max_rate += factor*bbcx_max_rate; zdce_max_rate += factor*zdce_max_rate; zdcw_max_rate += factor*zdcw_max_rate; zdcx_max_rate += factor*zdcx_max_rate; vpde_max_rate += factor*vpde_max_rate; vpdw_max_rate += factor*vpdw_max_rate; vpdx_max_rate += factor*vpdx_max_rate; // initialise correlation histograms const Int_t NBINS = 100; TH2F * bbc_ew = new TH2F("bbc_ew","BBCE vs BBCW Rate Correlation",NBINS,0,bbcw_max_rate,NBINS,0,bbce_max_rate); TH2F * bbc_ex = new TH2F("bbc_ex","BBCE vs BBCX Rate Correlation",NBINS,0,bbcx_max_rate,NBINS,0,bbce_max_rate); TH2F * bbc_wx = new TH2F("bbc_wx","BBCW vs BBCX Rate Correlation",NBINS,0,bbcx_max_rate,NBINS,0,bbcw_max_rate); TH2F * zdc_ew = new TH2F("zdc_ew","ZDCE vs ZDCW Rate Correlation",NBINS,0,zdcw_max_rate,NBINS,0,zdce_max_rate); TH2F * zdc_ex = new TH2F("zdc_ex","ZDCE vs ZDCX Rate Correlation",NBINS,0,zdcx_max_rate,NBINS,0,zdce_max_rate); TH2F * zdc_wx = new TH2F("zdc_wx","ZDCW vs ZDCX Rate Correlation",NBINS,0,zdcx_max_rate,NBINS,0,zdcw_max_rate); TH2F * vpd_ew = new TH2F("vpd_ew","VPDE vs VPDW Rate Correlation",NBINS,0,vpdw_max_rate,NBINS,0,vpde_max_rate); TH2F * vpd_ex = new TH2F("vpd_ex","VPDE vs VPDX Rate Correlation",NBINS,0,vpdx_max_rate,NBINS,0,vpde_max_rate); TH2F * vpd_wx = new TH2F("vpd_wx","VPDW vs VPDX Rate Correlation",NBINS,0,vpdx_max_rate,NBINS,0,vpdw_max_rate); TH2F * bz_e = new TH2F("bz_e","BBCE vs ZDCE Rate Correlation",NBINS,0,zdce_max_rate,NBINS,0,bbce_max_rate); TH2F * bz_w = new TH2F("bz_w","BBCW vs ZDCW Rate Correlation",NBINS,0,zdcw_max_rate,NBINS,0,bbcw_max_rate); TH2F * bz_x = new TH2F("bz_x","BBCX vs ZDCX Rate Correlation",NBINS,0,zdcx_max_rate,NBINS,0,bbcx_max_rate); TH2F * bv_e = new TH2F("bv_e","BBCE vs VPDE Rate Correlation",NBINS,0,vpde_max_rate,NBINS,0,bbce_max_rate); TH2F * bv_w = new TH2F("bv_w","BBCW vs VPDW Rate Correlation",NBINS,0,vpdw_max_rate,NBINS,0,bbcw_max_rate); TH2F * bv_x = new TH2F("bv_x","BBCX vs VPDX Rate Correlation",NBINS,0,vpdx_max_rate,NBINS,0,bbcx_max_rate); TH2F * vz_e = new TH2F("vz_e","VPDE vs ZDCE Rate Correlation",NBINS,0,zdce_max_rate,NBINS,0,vpde_max_rate); TH2F * vz_w = new TH2F("vz_w","VPDW vs ZDCW Rate Correlation",NBINS,0,zdcw_max_rate,NBINS,0,vpdw_max_rate); TH2F * vz_x = new TH2F("vz_x","VPDX vs ZDCX Rate Correlation",NBINS,0,zdcx_max_rate,NBINS,0,vpdx_max_rate); // projections tr->Project("bbc_ew","bbce/t:bbcw/t","bbce>0 && bbcw>0 && t>0"); tr->Project("bbc_ex","bbce/t:bbcx/t","bbce>0 && bbcx>0 && t>0"); tr->Project("bbc_wx","bbcw/t:bbcx/t","bbcw>0 && bbcx>0 && t>0"); tr->Project("zdc_ew","zdce/t:zdcw/t","zdce>0 && zdcw>0 && t>0"); tr->Project("zdc_ex","zdce/t:zdcx/t","zdce>0 && zdcx>0 && t>0"); tr->Project("zdc_wx","zdcw/t:zdcx/t","zdcw>0 && zdcx>0 && t>0"); tr->Project("vpd_ew","vpde/t:vpdw/t","vpde>0 && vpdw>0 && t>0"); tr->Project("vpd_ex","vpde/t:vpdx/t","vpde>0 && vpdx>0 && t>0"); tr->Project("vpd_wx","vpdw/t:vpdx/t","vpdw>0 && vpdx>0 && t>0"); tr->Project("bz_e","bbce/t:zdce/t","bbce>0 && zdce>0 && t>0"); tr->Project("bz_w","bbcw/t:zdcw/t","bbcw>0 && zdcw>0 && t>0"); tr->Project("bz_x","bbcx/t:zdcx/t","bbcx>0 && zdcx>0 && t>0"); tr->Project("bv_e","bbce/t:vpde/t","bbce>0 && vpde>0 && t>0"); tr->Project("bv_w","bbcw/t:vpdw/t","bbcw>0 && vpdw>0 && t>0"); tr->Project("bv_x","bbcx/t:vpdx/t","bbcx>0 && vpdx>0 && t>0"); tr->Project("vz_e","vpde/t:zdce/t","vpde>0 && zdce>0 && t>0"); tr->Project("vz_w","vpdw/t:zdcw/t","vpdw>0 && zdcw>0 && t>0"); tr->Project("vz_x","vpdx/t:zdcx/t","vpdx>0 && zdcx>0 && t>0"); */ // define run total variables (total scaler counts in a single run) Int_t IMAX_tmp = tr->GetMaximum("i"); const Int_t IMAX = IMAX_tmp; Long_t bbce_total[IMAX]; Long_t bbcw_total[IMAX]; Long_t bbcx_total[IMAX]; Long_t zdce_total[IMAX]; Long_t zdcw_total[IMAX]; Long_t zdcx_total[IMAX]; Long_t vpde_total[IMAX]; Long_t vpdw_total[IMAX]; Long_t vpdx_total[IMAX]; Int_t time[IMAX]; for(Int_t i=0; i<IMAX; i++) { bbce_total[i] = 0; bbcw_total[i] = 0; bbcx_total[i] = 0; zdce_total[i] = 0; zdcw_total[i] = 0; zdcx_total[i] = 0; vpde_total[i] = 0; vpdw_total[i] = 0; vpdx_total[i] = 0; }; // tree loop --> fills run totals arrays Int_t ii; for(Int_t i=0; i<tr->GetEntries(); i++) { tr->GetEntry(i); ii = index-1; // run index starts at 1; arrays start at 0 time[ii] = t; bbce_total[ii] += bbce; bbcw_total[ii] += bbcw; bbcx_total[ii] += bbcx; zdce_total[ii] += zdce; zdcw_total[ii] += zdcw; zdcx_total[ii] += zdcx; vpde_total[ii] += vpde; vpdw_total[ii] += vpdw; vpdx_total[ii] += vpdx; }; // compute trigger rates & and max trigger rates Float_t bbce_rate[IMAX], bbcw_rate[IMAX], bbcx_rate[IMAX]; Float_t zdce_rate[IMAX], zdcw_rate[IMAX], zdcx_rate[IMAX]; Float_t vpde_rate[IMAX], vpdw_rate[IMAX], vpdx_rate[IMAX]; Float_t bbce_rate_max, bbcw_rate_max, bbcx_rate_max; Float_t zdce_rate_max, zdcw_rate_max, zdcx_rate_max; Float_t vpde_rate_max, vpdw_rate_max, vpdx_rate_max; bbce_rate_max = bbcw_rate_max = bbcx_rate_max = 0; zdce_rate_max = zdcw_rate_max = zdcx_rate_max = 0; vpde_rate_max = vpdw_rate_max = vpdx_rate_max = 0; for(Int_t i=0; i<IMAX; i++) { bbce_rate[i] = ((Float_t) bbce_total[i])/((Float_t) time[i]); bbcw_rate[i] = ((Float_t) bbcw_total[i])/((Float_t) time[i]); bbcx_rate[i] = ((Float_t) bbcx_total[i])/((Float_t) time[i]); zdce_rate[i] = ((Float_t) zdce_total[i])/((Float_t) time[i]); zdcw_rate[i] = ((Float_t) zdcw_total[i])/((Float_t) time[i]); zdcx_rate[i] = ((Float_t) zdcx_total[i])/((Float_t) time[i]); vpde_rate[i] = ((Float_t) vpde_total[i])/((Float_t) time[i]); vpdw_rate[i] = ((Float_t) vpdw_total[i])/((Float_t) time[i]); vpdx_rate[i] = ((Float_t) vpdx_total[i])/((Float_t) time[i]); bbce_rate_max = (bbce_rate[i] > bbce_rate_max) ? bbce_rate[i] : bbce_rate_max; bbcw_rate_max = (bbcw_rate[i] > bbcw_rate_max) ? bbcw_rate[i] : bbcw_rate_max; bbcx_rate_max = (bbcx_rate[i] > bbcx_rate_max) ? bbcx_rate[i] : bbcx_rate_max; zdce_rate_max = (zdce_rate[i] > zdce_rate_max) ? zdce_rate[i] : zdce_rate_max; zdcw_rate_max = (zdcw_rate[i] > zdcw_rate_max) ? zdcw_rate[i] : zdcw_rate_max; zdcx_rate_max = (zdcx_rate[i] > zdcx_rate_max) ? zdcx_rate[i] : zdcx_rate_max; zdce_rate_max = (zdce_rate[i] > zdce_rate_max) ? zdce_rate[i] : zdce_rate_max; zdcw_rate_max = (zdcw_rate[i] > zdcw_rate_max) ? zdcw_rate[i] : zdcw_rate_max; zdcx_rate_max = (zdcx_rate[i] > zdcx_rate_max) ? zdcx_rate[i] : zdcx_rate_max; }; Float_t factor=0.1; bbce_rate_max += factor*bbce_rate_max; bbcw_rate_max += factor*bbcw_rate_max; bbcx_rate_max += factor*bbcx_rate_max; zdce_rate_max += factor*zdce_rate_max; zdcw_rate_max += factor*zdcw_rate_max; zdcx_rate_max += factor*zdcx_rate_max; vpde_rate_max += factor*vpde_rate_max; vpdw_rate_max += factor*vpdw_rate_max; vpdx_rate_max += factor*vpdx_rate_max; // initialise correlation histograms const Int_t NBINS = 100; TH2F * bbc_ew = new TH2F("bbc_ew","BBCE vs BBCW Rate Correlation",NBINS,0,bbcw_rate_max,NBINS,0,bbce_rate_max); TH2F * bbc_ex = new TH2F("bbc_ex","BBCE vs BBCX Rate Correlation",NBINS,0,bbcx_rate_max,NBINS,0,bbce_rate_max); TH2F * bbc_wx = new TH2F("bbc_wx","BBCW vs BBCX Rate Correlation",NBINS,0,bbcx_rate_max,NBINS,0,bbcw_rate_max); TH2F * zdc_ew = new TH2F("zdc_ew","ZDCE vs ZDCW Rate Correlation",NBINS,0,zdcw_rate_max,NBINS,0,zdce_rate_max); TH2F * zdc_ex = new TH2F("zdc_ex","ZDCE vs ZDCX Rate Correlation",NBINS,0,zdcx_rate_max,NBINS,0,zdce_rate_max); TH2F * zdc_wx = new TH2F("zdc_wx","ZDCW vs ZDCX Rate Correlation",NBINS,0,zdcx_rate_max,NBINS,0,zdcw_rate_max); TH2F * vpd_ew = new TH2F("vpd_ew","VPDE vs VPDW Rate Correlation",NBINS,0,vpdw_rate_max,NBINS,0,vpde_rate_max); TH2F * vpd_ex = new TH2F("vpd_ex","VPDE vs VPDX Rate Correlation",NBINS,0,vpdx_rate_max,NBINS,0,vpde_rate_max); TH2F * vpd_wx = new TH2F("vpd_wx","VPDW vs VPDX Rate Correlation",NBINS,0,vpdx_rate_max,NBINS,0,vpdw_rate_max); TH2F * bz_e = new TH2F("bz_e","BBCE vs ZDCE Rate Correlation",NBINS,0,zdce_rate_max,NBINS,0,bbce_rate_max); TH2F * bz_w = new TH2F("bz_w","BBCW vs ZDCW Rate Correlation",NBINS,0,zdcw_rate_max,NBINS,0,bbcw_rate_max); TH2F * bz_x = new TH2F("bz_x","BBCX vs ZDCX Rate Correlation",NBINS,0,zdcx_rate_max,NBINS,0,bbcx_rate_max); TH2F * bv_e = new TH2F("bv_e","BBCE vs VPDE Rate Correlation",NBINS,0,vpde_rate_max,NBINS,0,bbce_rate_max); TH2F * bv_w = new TH2F("bv_w","BBCW vs VPDW Rate Correlation",NBINS,0,vpdw_rate_max,NBINS,0,bbcw_rate_max); TH2F * bv_x = new TH2F("bv_x","BBCX vs VPDX Rate Correlation",NBINS,0,vpdx_rate_max,NBINS,0,bbcx_rate_max); TH2F * vz_e = new TH2F("vz_e","VPDE vs ZDCE Rate Correlation",NBINS,0,zdce_rate_max,NBINS,0,vpde_rate_max); TH2F * vz_w = new TH2F("vz_w","VPDW vs ZDCW Rate Correlation",NBINS,0,zdcw_rate_max,NBINS,0,vpdw_rate_max); TH2F * vz_x = new TH2F("vz_x","VPDX vs ZDCX Rate Correlation",NBINS,0,zdcx_rate_max,NBINS,0,vpdx_rate_max); // fill correlation histograms for(Int_t i=0; i<IMAX; i++) { bbc_ew->Fill(bbcw_rate[i],bbce_rate[i]); bbc_ex->Fill(bbcx_rate[i],bbce_rate[i]); bbc_wx->Fill(bbcx_rate[i],bbcw_rate[i]); zdc_ew->Fill(zdcw_rate[i],zdce_rate[i]); zdc_ex->Fill(zdcx_rate[i],zdce_rate[i]); zdc_wx->Fill(zdcx_rate[i],zdcw_rate[i]); vpd_ew->Fill(vpdw_rate[i],vpde_rate[i]); vpd_ex->Fill(vpdx_rate[i],vpde_rate[i]); vpd_wx->Fill(vpdx_rate[i],vpdw_rate[i]); bz_e->Fill(zdce_rate[i],bbce_rate[i]); bz_w->Fill(zdcw_rate[i],bbcw_rate[i]); bz_x->Fill(zdcx_rate[i],bbcx_rate[i]); bv_e->Fill(vpde_rate[i],bbce_rate[i]); bv_w->Fill(vpdw_rate[i],bbcw_rate[i]); bv_x->Fill(vpdx_rate[i],bbcx_rate[i]); vz_e->Fill(zdce_rate[i],vpde_rate[i]); vz_w->Fill(zdcw_rate[i],vpdw_rate[i]); vz_x->Fill(zdcx_rate[i],vpdx_rate[i]); }; // compute correlation coefficients char bbc_ew_text[64]; char bbc_ex_text[64]; char bbc_wx_text[64]; char zdc_ew_text[64]; char zdc_ex_text[64]; char zdc_wx_text[64]; char vpd_ew_text[64]; char vpd_ex_text[64]; char vpd_wx_text[64]; char bz_e_text[64]; char bz_w_text[64]; char bz_x_text[64]; char bv_e_text[64]; char bv_w_text[64]; char bv_x_text[64]; char vz_e_text[64]; char vz_w_text[64]; char vz_x_text[64]; sprintf(bbc_ew_text,"BBC E:W -- C=%0.5f",bbc_ew->GetCorrelationFactor()); sprintf(bbc_ex_text,"BBC E:X -- C=%0.5f",bbc_ex->GetCorrelationFactor()); sprintf(bbc_wx_text,"BBC W:X -- C=%0.5f",bbc_wx->GetCorrelationFactor()); sprintf(zdc_ew_text,"ZDC E:W -- C=%0.5f",zdc_ew->GetCorrelationFactor()); sprintf(zdc_ex_text,"ZDC E:X -- C=%0.5f",zdc_ex->GetCorrelationFactor()); sprintf(zdc_wx_text,"ZDC W:X -- C=%0.5f",zdc_wx->GetCorrelationFactor()); sprintf(vpd_ew_text,"VPD E:W -- C=%0.5f",vpd_ew->GetCorrelationFactor()); sprintf(vpd_ex_text,"VPD E:X -- C=%0.5f",vpd_ex->GetCorrelationFactor()); sprintf(vpd_wx_text,"VPD W:X -- C=%0.5f",vpd_wx->GetCorrelationFactor()); sprintf(bz_e_text,"BBCE:ZDCE -- C=%0.5f",bz_e->GetCorrelationFactor()); sprintf(bz_w_text,"BBCW:ZDCW -- C=%0.5f",bz_w->GetCorrelationFactor()); sprintf(bz_x_text,"BBCX:ZDCX -- C=%0.5f",bz_x->GetCorrelationFactor()); sprintf(bv_e_text,"BBCE:VPDE -- C=%0.5f",bv_e->GetCorrelationFactor()); sprintf(bv_w_text,"BBCW:VPDW -- C=%0.5f",bv_w->GetCorrelationFactor()); sprintf(bv_x_text,"BBCX:VPDX -- C=%0.5f",bv_x->GetCorrelationFactor()); sprintf(vz_e_text,"VPDE:ZDCE -- C=%0.5f",vz_e->GetCorrelationFactor()); sprintf(vz_w_text,"VPDW:ZDCW -- C=%0.5f",vz_w->GetCorrelationFactor()); sprintf(vz_x_text,"VPDX:ZDCX -- C=%0.5f",vz_x->GetCorrelationFactor()); Float_t starty=0.8; Float_t startx=0.3; Float_t interval=0.1; TLatex * corr_latex = new TLatex(startx-0.05,starty,"Correlation Coefficients"); TLatex * bbc_ew_latex = new TLatex(startx,starty-1*interval,bbc_ew_text); TLatex * bbc_ex_latex = new TLatex(startx,starty-2*interval,bbc_ex_text); TLatex * bbc_wx_latex = new TLatex(startx,starty-3*interval,bbc_wx_text); TLatex * zdc_ew_latex = new TLatex(startx,starty-1*interval,zdc_ew_text); TLatex * zdc_ex_latex = new TLatex(startx,starty-2*interval,zdc_ex_text); TLatex * zdc_wx_latex = new TLatex(startx,starty-3*interval,zdc_wx_text); TLatex * vpd_ew_latex = new TLatex(startx,starty-1*interval,vpd_ew_text); TLatex * vpd_ex_latex = new TLatex(startx,starty-2*interval,vpd_ex_text); TLatex * vpd_wx_latex = new TLatex(startx,starty-3*interval,vpd_wx_text); TLatex * bz_e_latex = new TLatex(startx,starty-1*interval,bz_e_text); TLatex * bz_w_latex = new TLatex(startx,starty-2*interval,bz_w_text); TLatex * bz_x_latex = new TLatex(startx,starty-3*interval,bz_x_text); TLatex * bv_e_latex = new TLatex(startx,starty-1*interval,bv_e_text); TLatex * bv_w_latex = new TLatex(startx,starty-2*interval,bv_w_text); TLatex * bv_x_latex = new TLatex(startx,starty-3*interval,bv_x_text); TLatex * vz_e_latex = new TLatex(startx,starty-1*interval,vz_e_text); TLatex * vz_w_latex = new TLatex(startx,starty-2*interval,vz_w_text); TLatex * vz_x_latex = new TLatex(startx,starty-3*interval,vz_x_text); // draw correlation histograms gStyle->SetOptStat(0); TCanvas * canv_corr_bbc = new TCanvas("canv_corr_bbc","canv_corr_bbc",1100,700); canv_corr_bbc->Divide(2,2); TCanvas * canv_corr_zdc = new TCanvas("canv_corr_zdc","canv_corr_zdc",1100,700); canv_corr_zdc->Divide(2,2); TCanvas * canv_corr_vpd = new TCanvas("canv_corr_vpd","canv_corr_vpd",1100,700); canv_corr_vpd->Divide(2,2); TCanvas * canv_corr_bz = new TCanvas("canv_corr_bz","canv_corr_bz",1100,700); canv_corr_bz->Divide(2,2); TCanvas * canv_corr_bv = new TCanvas("canv_corr_bv","canv_corr_bv",1100,700); canv_corr_bv->Divide(2,2); TCanvas * canv_corr_vz = new TCanvas("canv_corr_vz","canv_corr_vz",1100,700); canv_corr_vz->Divide(2,2); for(Int_t i=1; i<=4; i++) { canv_corr_bbc->GetPad(i)->SetGrid(1,1); canv_corr_zdc->GetPad(i)->SetGrid(1,1); canv_corr_vpd->GetPad(i)->SetGrid(1,1); canv_corr_bz->GetPad(i)->SetGrid(1,1); canv_corr_bv->GetPad(i)->SetGrid(1,1); canv_corr_vz->GetPad(i)->SetGrid(1,1); }; canv_corr_bbc->cd(1); bbc_ew->Draw("colz"); canv_corr_bbc->cd(3); bbc_ex->Draw("colz"); canv_corr_bbc->cd(4); bbc_wx->Draw("colz"); canv_corr_zdc->cd(1); zdc_ew->Draw("colz"); canv_corr_zdc->cd(3); zdc_ex->Draw("colz"); canv_corr_zdc->cd(4); zdc_wx->Draw("colz"); canv_corr_vpd->cd(1); vpd_ew->Draw("colz"); canv_corr_vpd->cd(3); vpd_ex->Draw("colz"); canv_corr_vpd->cd(4); vpd_wx->Draw("colz"); canv_corr_bz->cd(1); bz_e->Draw("colz"); canv_corr_bz->cd(3); bz_w->Draw("colz"); canv_corr_bz->cd(4); bz_x->Draw("colz"); canv_corr_bv->cd(1); bv_e->Draw("colz"); canv_corr_bv->cd(3); bv_w->Draw("colz"); canv_corr_bv->cd(4); bv_x->Draw("colz"); canv_corr_vz->cd(1); vz_e->Draw("colz"); canv_corr_vz->cd(3); vz_w->Draw("colz"); canv_corr_vz->cd(4); vz_x->Draw("colz"); canv_corr_bbc->cd(2); corr_latex->Draw(); bbc_ew_latex->Draw(); bbc_ex_latex->Draw(); bbc_wx_latex->Draw(); canv_corr_zdc->cd(2); corr_latex->Draw(); zdc_ew_latex->Draw(); zdc_ex_latex->Draw(); zdc_wx_latex->Draw(); canv_corr_vpd->cd(2); corr_latex->Draw(); vpd_ew_latex->Draw(); vpd_ex_latex->Draw(); vpd_wx_latex->Draw(); canv_corr_bz->cd(2); corr_latex->Draw(); bz_e_latex->Draw(); bz_w_latex->Draw(); bz_x_latex->Draw(); canv_corr_bv->cd(2); corr_latex->Draw(); bv_e_latex->Draw(); bv_w_latex->Draw(); bv_x_latex->Draw(); canv_corr_vz->cd(2); corr_latex->Draw(); vz_e_latex->Draw(); vz_w_latex->Draw(); vz_x_latex->Draw(); };
void csv(TString input="tmva.csvoutput.txt", TString par1="par2", TString par2="par3", TString par3="", TString value="eventEffScaled_5") { std::cout << "Usage:" << std::endl << ".x scripts/csv.C with default arguments" << std::endl << ".x scripts/csv.C(filename, par1, par2, value)" << std::endl << std::endl << " Optional arguments:" << std::endl << " filename path to CSV file" << std::endl << " par1 name of X-parameter branch" << std::endl << " par2 name of Y-parameter branch (if empty, efficiency is drawn as a function of par1)" << std::endl << " value name of result (efficiency) branch" << std::endl << std::endl; TTree *tree = new TTree("data", "data"); tree->ReadFile(input); gStyle->SetPalette(1); gStyle->SetPadRightMargin(0.14); TCanvas *canvas = new TCanvas("csvoutput", "CSV Output", 1200, 900); tree->SetMarkerStyle(kFullDotMedium); tree->SetMarkerColor(kRed); if(par2.Length() > 0) { //tree->Draw(Form("%s:%s", par2.Data(), par1.Data())); if(par3.Length() > 0) tree->Draw(Form("%s:%s:%s:%s", par1.Data(), par2.Data(), par3.Data(), value.Data()), "", "COLZ"); //, "", "Z"); else tree->Draw(Form("%s:%s:%s", par2.Data(), par1.Data(), value.Data()), "", "COLZ"); //, "", "Z"); TH1 *histo = tree->GetHistogram(); if(!histo) return; histo->SetTitle(Form("%s with different classifier parameters", value.Data())); histo->GetXaxis()->SetTitle(Form("Classifier parameter %s", par1.Data())); histo->GetYaxis()->SetTitle(Form("Classifier parameter %s", par2.Data())); if(par3.Length() > 0) histo->GetZaxis()->SetTitle(Form("Classifier parameter %s", par3.Data())); else histo->GetZaxis()->SetTitle(""); if(par3.Length() == 0) { float x = 0; float y = 0; float val = 0; double maxVal = tree->GetMaximum(value); double minVal = tree->GetMinimum(value); tree->SetBranchAddress(par1, &x); tree->SetBranchAddress(par2, &y); tree->SetBranchAddress(value, &val); TLatex l; l.SetTextSize(0.03); Long64_t nentries = tree->GetEntries(); for(Long64_t entry=0; entry < nentries; ++entry) { tree->GetEntry(entry); l.SetTextColor(textColor(val, maxVal, minVal)); l.DrawLatex(x, y, Form("%.3f", val*100)); } } } else { tree->Draw(Form("%s:%s", value.Data(), par1.Data())); TH1 *histo = tree->GetHistogram(); if(!histo) return; histo->SetTitle(Form("%s with different classifier parameters", value.Data())); histo->GetXaxis()->SetTitle(Form("Classifier parameter %s", par1.Data())); histo->GetYaxis()->SetTitle(value); } }
void mk_tree(const char * acc_file="datfiles/acc.dat") { // read acc file into tree TTree * acc = new TTree("acc","counts tree from acc.dat"); char cols[2048]; char bbc_cols[256]; char zdc_cols[256]; char vpd_cols[256]; for(Int_t i=0; i<=7; i++) { if(i==0) { sprintf(bbc_cols,"bbc_%d/D",i); sprintf(zdc_cols,"zdc_%d/D",i); sprintf(vpd_cols,"vpd_%d/D",i); } else { sprintf(bbc_cols,"%s:bbc_%d/D",bbc_cols,i); sprintf(zdc_cols,"%s:zdc_%d/D",zdc_cols,i); sprintf(vpd_cols,"%s:vpd_%d/D",vpd_cols,i); }; }; sprintf(cols,"i/I:runnum/I:fi/I:fill/I:t/D:bx/I:%s:%s:%s:tot_bx/D:blue/I:yell/I",bbc_cols,zdc_cols,vpd_cols); printf("%s\n",cols); acc->ReadFile(acc_file,cols); acc->Print(); Int_t IMAX_tmp = acc->GetMaximum("i"); const Int_t IMAX = IMAX_tmp; // set branch addresses to read through acc tree Int_t index,runnum,fill_index,fill,bx; Double_t bbc[8]; Double_t zdc[8]; Double_t vpd[8]; Double_t time; Double_t tot_bx; Int_t blue,yell; acc->SetBranchAddress("i",&index); acc->SetBranchAddress("runnum",&runnum); acc->SetBranchAddress("fi",&fill_index); acc->SetBranchAddress("fill",&fill); acc->SetBranchAddress("t",&time); acc->SetBranchAddress("bx",&bx); char str[16]; for(Int_t i=0; i<8; i++) { sprintf(str,"bbc_%d",i); acc->SetBranchAddress(str,&bbc[i]); }; for(Int_t i=0; i<8; i++) { sprintf(str,"zdc_%d",i); acc->SetBranchAddress(str,&zdc[i]); }; for(Int_t i=0; i<8; i++) { sprintf(str,"vpd_%d",i); acc->SetBranchAddress(str,&vpd[i]); }; acc->SetBranchAddress("tot_bx",&tot_bx); acc->SetBranchAddress("blue",&blue); acc->SetBranchAddress("yell",&yell); // build arrays for restructuring; arrays are needed so that // we can implement bXing shift corrections Double_t bbce_arr[IMAX][120]; Double_t bbcw_arr[IMAX][120]; Double_t bbcx_arr[IMAX][120]; Double_t zdce_arr[IMAX][120]; Double_t zdcw_arr[IMAX][120]; Double_t zdcx_arr[IMAX][120]; Double_t vpde_arr[IMAX][120]; Double_t vpdw_arr[IMAX][120]; Double_t vpdx_arr[IMAX][120]; Int_t runnum_arr[IMAX]; Int_t fi_arr[IMAX]; Int_t fill_arr[IMAX]; Double_t time_arr[IMAX]; Double_t tot_bx_arr[IMAX][120]; Int_t blue_arr[IMAX][120]; Int_t yell_arr[IMAX][120]; Bool_t kicked_arr[IMAX][120]; // restructure tree into one suitable for analysis TTree * sca = new TTree("sca","restructured tree"); Double_t bbce,bbcw,bbcx; // e=east, w=west, x=coincidence Double_t zdce,zdcw,zdcx; Double_t vpde,vpdw,vpdx; Bool_t okEntry,kicked; sca->Branch("i",&index,"i/I"); sca->Branch("runnum",&runnum,"runnum/I"); sca->Branch("fi",&fill_index,"fi/I"); sca->Branch("fill",&fill,"fill/I"); sca->Branch("t",&time,"t/D"); sca->Branch("bx",&bx,"bx/I"); sca->Branch("bbce",&bbce,"bbce/D"); sca->Branch("bbcw",&bbcw,"bbcw/D"); sca->Branch("bbcx",&bbcx,"bbcx/D"); sca->Branch("zdce",&zdce,"zdce/D"); sca->Branch("zdcw",&zdcw,"zdcw/D"); sca->Branch("zdcx",&zdcx,"zdcx/D"); sca->Branch("vpde",&vpde,"vpde/D"); sca->Branch("vpdw",&vpdw,"vpdw/D"); sca->Branch("vpdx",&vpdx,"vpdx/D"); sca->Branch("tot_bx",&tot_bx,"tot_bx/D"); sca->Branch("blue",&blue,"blue/I"); sca->Branch("yell",&yell,"yell/I"); sca->Branch("kicked",&kicked,"kicked/O"); // read kicked bunches tree from "kicked" file TTree * kicked_tr = new TTree(); kicked_tr->ReadFile("kicked","fill/I:bx/I:spinbit/I"); Int_t kicked_fill,kicked_bx,kicked_spinbit; kicked_tr->SetBranchAddress("fill",&kicked_fill); kicked_tr->SetBranchAddress("bx",&kicked_bx); kicked_tr->SetBranchAddress("spinbit",&kicked_spinbit); for(Int_t q=0; q<acc->GetEntries(); q++) { acc->GetEntry(q); // -- see doc for bit details // BBC, ZDC, VPD bits: [ x w e ] bbce = bbc[1] + bbc[3] + bbc[5] + bbc[7]; // e + we + xe + xwe bbcw = bbc[2] + bbc[3] + bbc[6] + bbc[7]; // w + we + xw + xwe bbcx = bbc[3] + bbc[7]; // we + xwe zdce = zdc[1] + zdc[3] + zdc[5] + zdc[7]; // e + we + xe + xwe zdcw = zdc[2] + zdc[3] + zdc[6] + zdc[7]; // w + we + xw + xwe zdcx = zdc[3] + zdc[7]; // we + xwe vpde = vpd[1] + vpd[3] + vpd[5] + vpd[7]; // e + we + xe + xwe vpdw = vpd[2] + vpd[3] + vpd[6] + vpd[7]; // w + we + xw + xwe vpdx = vpd[3] + vpd[7]; // we + xwe // KICKED BUNCHES // manually omit empty bunches documented in pathologies.dat -- CLEAN UP PROCEDURE // (see 09.01.14 log entry) okEntry=true; // kicked bunches (presumably empty) /* if(fill==17384 && (bx==29 || bx==30 || bx==117)) okEntry=false; if(fill==17416 && bx==79) okEntry=false; if(fill==17491 && bx==105) okEntry=false; if(fill==17519 && (bx==94 || bx==109)) okEntry=false; if(fill==17520 && bx==0) okEntry=false; if(fill==17529 && bx==97) okEntry=false; if(fill==17534 && bx==112) okEntry=false; if(fill==17553 && bx==73) okEntry=false; if(fill==17554 && (bx==7 || bx==14)) okEntry=false; if(fill==17555 && bx==61) okEntry=false; if(fill==17576 && bx==94) okEntry=false; // afterpulse-like bunches -- remove 1st 2 bunches after abort gaps //if(fill==17512 && (bx>=40 && bx<=59)) okEntry=false; //if((fill>=17513 && fill<=17520) && ((bx>=0 && bx<=19) || (bx>=40 && bx<=59))) okEntry=false; */ for(Int_t kk=0; kk<kicked_tr->GetEntries(); kk++) { kicked_tr->GetEntry(kk); if(fill==kicked_fill && bx==kicked_bx) okEntry=false; }; kicked=!okEntry; // cleaned up analysis //kicked=0; // take all bXings // store data into arrays, implementing bXing shift corrections on scalers if(fill==16570 || fill==16567) { bbce_arr[index-1][(bx+113)%120] = bbce; // shift down 7 bXings bbcw_arr[index-1][(bx+113)%120] = bbcw; bbcx_arr[index-1][(bx+113)%120] = bbcx; zdce_arr[index-1][(bx+113)%120] = zdce; // shift down 7 bXings zdcw_arr[index-1][(bx+113)%120] = zdcw; zdcx_arr[index-1][(bx+113)%120] = zdcx; vpde_arr[index-1][(bx+113)%120] = vpde; // shift down 7 bXings vpdw_arr[index-1][(bx+113)%120] = vpdw; vpdx_arr[index-1][(bx+113)%120] = vpdx; } else if(fill == 16582 || fill == 16586 || fill == 16587 || fill == 16592 || fill == 16593 || fill == 16594 || fill == 16597 || fill == 16602) { bbce_arr[index-1][bx] = bbce; // no shift bbcw_arr[index-1][bx] = bbcw; bbcx_arr[index-1][bx] = bbcx; zdce_arr[index-1][bx] = zdce; // no shift zdcw_arr[index-1][bx] = zdcw; zdcx_arr[index-1][bx] = zdcx; vpde_arr[index-1][(bx+1)%120] = vpde; // shift up 1 bXings vpdw_arr[index-1][(bx+1)%120] = vpdw; vpdx_arr[index-1][(bx+1)%120] = vpdx; } else { bbce_arr[index-1][bx] = bbce; bbcw_arr[index-1][bx] = bbcw; bbcx_arr[index-1][bx] = bbcx; zdce_arr[index-1][bx] = zdce; zdcw_arr[index-1][bx] = zdcw; zdcx_arr[index-1][bx] = zdcx; vpde_arr[index-1][bx] = vpde; vpdw_arr[index-1][bx] = vpdw; vpdx_arr[index-1][bx] = vpdx; }; runnum_arr[index-1] = runnum; fi_arr[index-1] = fill_index; fill_arr[index-1] = fill; time_arr[index-1] = time; tot_bx_arr[index-1][bx] = tot_bx; blue_arr[index-1][bx] = blue; yell_arr[index-1][bx] = yell; kicked_arr[index-1][bx] = kicked; }; // fill restructured tree for(Int_t i=0; i<IMAX; i++) { index = i+1; runnum = runnum_arr[i]; fill_index = fi_arr[i]; fill = fill_arr[i]; time = time_arr[i]; for(Int_t b=0; b<120; b++) { bx = b; bbce = bbce_arr[i][b]; bbcw = bbcw_arr[i][b]; bbcx = bbcx_arr[i][b]; zdce = zdce_arr[i][b]; zdcw = zdcw_arr[i][b]; zdcx = zdcx_arr[i][b]; vpde = vpde_arr[i][b]; vpdw = vpdw_arr[i][b]; vpdx = vpdx_arr[i][b]; tot_bx = tot_bx_arr[i][b]; blue = blue_arr[i][b]; yell = yell_arr[i][b]; kicked = kicked_arr[i][b]; sca->Fill(); }; }; TFile * outfile = new TFile("counts.root","RECREATE"); acc->Write("acc"); sca->Write("sca"); printf("counts.root written\n"); };
void bit_combinations(const char * filename="counts.root") { TFile * infile = new TFile(filename,"READ"); TTree * acc = (TTree*) infile->Get("acc"); // set branch addresses Int_t index,runnum,fill_index,fill; Double_t t; Double_t bbc[8]; Double_t zdc[8]; //Double_t vpd[4]; Double_t vpd[8]; Double_t tot_bx; Int_t blue,yell; acc->SetBranchAddress("i",&index); acc->SetBranchAddress("runnum",&runnum); acc->SetBranchAddress("fi",&fill_index); acc->SetBranchAddress("fill",&fill); acc->SetBranchAddress("t",&t); acc->SetBranchAddress("tot_bx",&tot_bx); acc->SetBranchAddress("blue",&blue); acc->SetBranchAddress("yell",&yell); char bbc_br[8][16]; char zdc_br[8][16]; //char vpd_br[4][16]; char vpd_br[8][16]; for(Int_t i=0; i<8; i++) { sprintf(bbc_br[i],"bbc_%d",i); sprintf(zdc_br[i],"zdc_%d",i); acc->SetBranchAddress(bbc_br[i],&(bbc[i])); acc->SetBranchAddress(zdc_br[i],&(zdc[i])); //if(i<4) //{ sprintf(vpd_br[i],"vpd_%d",i); acc->SetBranchAddress(vpd_br[i],&(vpd[i])); //}; }; // ---------------------------------------------- // total counts vs. scaler bit bar charts TH1F * ntot_vs_bits_bbc = new TH1F(); TH1F * ntot_vs_bits_zdc = new TH1F(); TH1F * ntot_vs_bits_vpd = new TH1F(); // scaler bit combination names char comb[8][16]; strcpy(comb[0],"none"); strcpy(comb[1],"e"); strcpy(comb[2],"w"); strcpy(comb[3],"w+e"); strcpy(comb[4],"x"); strcpy(comb[5],"x+e"); strcpy(comb[6],"x+w"); strcpy(comb[7],"x+w+e"); // fill bar charts for(Int_t i=0; i<acc->GetEntries(); i++) { acc->GetEntry(i); for(Int_t j=0; j<8; j++) { ntot_vs_bits_bbc->Fill(comb[j],bbc[j]); ntot_vs_bits_zdc->Fill(comb[j],zdc[j]); //if(j<4) ntot_vs_bits_vpd->Fill(comb[j],vpd[j]); ntot_vs_bits_vpd->Fill(comb[j],vpd[j]); }; }; ntot_vs_bits_bbc->SetStats(0); ntot_vs_bits_bbc->SetTitle("total bbc counts vs. scaler bits"); ntot_vs_bits_bbc->SetBarWidth(0.4); ntot_vs_bits_bbc->SetBarOffset(0.55); ntot_vs_bits_bbc->SetFillColor(50); TCanvas * c_bbc_bits = new TCanvas("c_bbc_bits","c_bbc_bits",700,500); c_bbc_bits->SetGrid(0,1); c_bbc_bits->SetLogy(); ntot_vs_bits_bbc->Draw("bar2"); ntot_vs_bits_zdc->SetStats(0); ntot_vs_bits_zdc->SetTitle("total zdc counts vs. scaler bits"); ntot_vs_bits_zdc->SetBarWidth(0.4); ntot_vs_bits_zdc->SetBarOffset(0.55); ntot_vs_bits_zdc->SetFillColor(50); TCanvas * c_zdc_bits = new TCanvas("c_zdc_bits","c_zdc_bits",700,500); c_zdc_bits->SetGrid(0,1); c_zdc_bits->SetLogy(); ntot_vs_bits_zdc->Draw("bar2"); ntot_vs_bits_vpd->SetStats(0); ntot_vs_bits_vpd->SetTitle("total vpd counts vs. scaler bits"); ntot_vs_bits_vpd->SetBarWidth(0.4); ntot_vs_bits_vpd->SetBarOffset(0.55); ntot_vs_bits_vpd->SetFillColor(50); TCanvas * c_vpd_bits = new TCanvas("c_vpd_bits","c_vpd_bits",700,500); c_vpd_bits->SetGrid(0,1); c_vpd_bits->SetLogy(); ntot_vs_bits_vpd->Draw("bar2"); char outdir[32]; char bbc_outfile[64]; char zdc_outfile[64]; char vpd_outfile[64]; strcpy(outdir,"bit_combos"); sprintf(bbc_outfile,"%s/bbc_bit_combos.png",outdir); sprintf(zdc_outfile,"%s/zdc_bit_combos.png",outdir); sprintf(vpd_outfile,"%s/vpd_bit_combos.png",outdir); c_bbc_bits->Print(bbc_outfile,"png"); c_zdc_bits->Print(zdc_outfile,"png"); c_vpd_bits->Print(vpd_outfile,"png"); printf("%s created\n",bbc_outfile); printf("%s created\n",zdc_outfile); printf("%s created\n",vpd_outfile); // ---------------------------------------------- // get maximum number of runs = IMAX Int_t IMAX_tmp = acc->GetMaximum("i"); const Int_t IMAX = IMAX_tmp; // compute no. bXings with possible interaction in a given run Int_t nbx[IMAX]; for(Int_t i=0; i<IMAX; i++) nbx[i]=0; for(Int_t i=0; i<acc->GetEntries(); i++) { acc->GetEntry(i); if(blue*yell != 0) nbx[index-1]++; }; };
void makePlotWithSelection(TTree* tr, TString varToPlot, TString canvName, TString xAxisName, TCut cutLoose, TCut cutAdditional) { TTree* trCutted = tr->CopyTree(cutLoose); TH1D* histLoose = new TH1D("hist",varToPlot,50,trCutted->GetMinimum(varToPlot)-0.1*fabs(trCutted->GetMinimum(varToPlot)),trCutted->GetMaximum(varToPlot)+0.1*fabs(trCutted->GetMaximum(varToPlot))); trCutted->Draw(varToPlot+TString(">>hist"),"1","goff"); TCanvas* canv = new TCanvas(canvName,canvName); //TAxis* xAxis = histLoose->GetXaxis(); histLoose->GetXaxis()->SetTitle(xAxisName); histLoose->GetYaxis()->SetRangeUser(0,histLoose->GetMaximumStored()); histLoose->SetFillStyle(3004); histLoose->SetFillColor(2); histLoose->Draw(); trCutted->SetFillStyle(1001); trCutted->SetFillColor(3); trCutted->Draw(varToPlot,cutAdditional,"same"); canv->SaveAs(canvName+".png"); delete histLoose; histLoose=0; trCutted=0; //xAxis=0; }
void PlotDecisionBoundary( TString weightFile = "weights/Zprime_vs_QCD_TMVAClassification_BDT.weights.xml",TString v0="lep_pt_ljet", TString v1="met_pt", TString dataFileNameS = "/nfs/dust/cms/user/karavdia/ttbar_semilep_13TeV/RunII_25ns_v1/test_03/uhh2.AnalysisModuleRunner.MC.Zp01w3000.root", TString dataFileNameB = "/nfs/dust/cms/user/karavdia/ttbar_semilep_13TeV/RunII_25ns_v1/test_03/uhh2.AnalysisModuleRunner.MC.QCD_EMEnriched.root") { //--------------------------------------------------------------- // default MVA methods to be trained + tested // this loads the library TMVA::Tools::Instance(); std::cout << std::endl; std::cout << "==> Start TMVAClassificationApplication" << std::endl; // // create the Reader object // TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); // create a set of variables and declare them to the reader // - the variable names must corresponds in name and type to // those given in the weight file(s) that you use // Float_t var0, var1; // reader->AddVariable( v0, &var0 ); // reader->AddVariable( v1, &var1 ); Float_t lep_pt, lep_fbrem, MwT; Float_t log_ljet_pt, log_met_pt, log_lep_pt_ljet; Float_t log_dR_lep_cljet, log_dR_cljet_ljet; Float_t dPhi_lep_cljet; reader->AddVariable("lep_pt", &lep_pt); reader->AddVariable("lep_fbrem", & lep_fbrem); reader->AddVariable("MwT", &MwT); reader->AddVariable("log(ljet_pt)", &log_ljet_pt); reader->AddVariable("log(met_pt)",&log_met_pt); reader->AddVariable("log(lep_pt_ljet)",&log_lep_pt_ljet); reader->AddVariable("log(dR_lep_cljet)",&log_dR_lep_cljet_trans); reader->AddVariable("log(fabs((dR_cljet_ljet-3.14)/3.14))", &log_dR_cljet_ljet); reader->AddSpectator("dPhi_lep_cljet", &dPhi_lep_cljet); // // book the MVA method // reader->BookMVA( "BDT", weightFile ); TFile *fS = new TFile(dataFileNameS); TTree *signal = (TTree*)fS->Get("AnalysisTree"); TFile *fB = new TFile(dataFileNameS); TTree *background = (TTree*)fB->Get("AnalysisTree"); //Declaration of leaves types Float_t svar0; Float_t svar1; Float_t bvar0; Float_t bvar1; Float_t sWeight=1.0; // just in case you have weight defined, also set these branchaddresses Float_t bWeight=1.0*signal->GetEntries()/background->GetEntries(); // just in case you have weight defined, also set these branchaddresses // Set branch addresses. signal->SetBranchAddress(v0,&svar0); signal->SetBranchAddress(v1,&svar1); background->SetBranchAddress(v0,&bvar0); background->SetBranchAddress(v1,&bvar1); UInt_t nbin = 50; Float_t xmax = signal->GetMaximum(v0.Data()); Float_t xmin = signal->GetMinimum(v0.Data()); Float_t ymax = signal->GetMaximum(v1.Data()); Float_t ymin = signal->GetMinimum(v1.Data()); xmax = TMath::Max(xmax,(Float_t)background->GetMaximum(v0.Data())); xmin = TMath::Min(xmin,(Float_t)background->GetMinimum(v0.Data())); ymax = TMath::Max(ymax,(Float_t)background->GetMaximum(v1.Data())); ymin = TMath::Min(ymin,(Float_t)background->GetMinimum(v1.Data())); TH2D *hs=new TH2D("hs","",nbin,xmin,xmax,nbin,ymin,ymax); TH2D *hb=new TH2D("hb","",nbin,xmin,xmax,nbin,ymin,ymax); hs->SetXTitle(v0); hs->SetYTitle(v1); hb->SetXTitle(v0); hb->SetYTitle(v1); hs->SetMarkerColor(4); hb->SetMarkerColor(2); TH2F * hist = new TH2F( "MVA", "MVA", nbin,xmin,xmax,nbin,ymin,ymax); // Prepare input tree (this must be replaced by your data source) // in this example, there is a toy tree with signal and one with background events // we'll later on use only the "signal" events for the test in this example. Float_t MinMVA=10000, MaxMVA=-100000; for (UInt_t ibin=1; ibin<nbin+1; ibin++){ for (UInt_t jbin=1; jbin<nbin+1; jbin++){ var0 = hs->GetXaxis()->GetBinCenter(ibin); var1 = hs->GetYaxis()->GetBinCenter(jbin); Float_t mvaVal=reader->EvaluateMVA( "BDT" ) ; if (MinMVA>mvaVal) MinMVA=mvaVal; if (MaxMVA<mvaVal) MaxMVA=mvaVal; hist->SetBinContent(ibin,jbin, mvaVal); } } // now you need to try to find the MVA-value at which you cut for the plotting of the decision boundary // (Use the smallest number of misclassifications as criterion) const Int_t nValBins=100; Double_t sum = 0.; TH1F *mvaS= new TH1F("mvaS","",nValBins,MinMVA,MaxMVA); mvaS->SetXTitle("MVA-ouput"); mvaS->SetYTitle("#entries"); TH1F *mvaB= new TH1F("mvaB","",nValBins,MinMVA,MaxMVA); mvaB->SetXTitle("MVA-ouput"); mvaB->SetYTitle("#entries"); TH1F *mvaSC= new TH1F("mvaSC","",nValBins,MinMVA,MaxMVA); mvaSC->SetXTitle("MVA-ouput"); mvaSC->SetYTitle("cummulation"); TH1F *mvaBC= new TH1F("mvaBC","",nValBins,MinMVA,MaxMVA); mvaBC->SetXTitle("MVA-ouput"); mvaBC->SetYTitle("cummulation"); Long64_t nentries; nentries = signal->GetEntries(); for (Long64_t is=0; is<nentries;is++) { signal->GetEntry(is); sum +=sWeight; var0 = svar0; var1 = svar1; Float_t mvaVal=reader->EvaluateMVA( "BDT" ) ; hs->Fill(svar0,svar1); mvaS->Fill(mvaVal,sWeight); } nentries = background->GetEntries(); for (Long64_t ib=0; ib<nentries;ib++) { background->GetEntry(ib); sum +=bWeight; var0 = bvar0; var1 = bvar1; Float_t mvaVal=reader->EvaluateMVA( "BDT" ) ; hb->Fill(bvar0,bvar1); mvaB->Fill(mvaVal,bWeight); } SeparationBase *sepGain = new MisClassificationError(); //SeparationBase *sepGain = new GiniIndex(); //SeparationBase *sepGain = new CrossEntropy(); Double_t sTot = mvaS->GetSum(); Double_t bTot = mvaB->GetSum(); mvaSC->SetBinContent(1,mvaS->GetBinContent(1)); mvaBC->SetBinContent(1,mvaB->GetBinContent(1)); Double_t sSel=mvaSC->GetBinContent(1); Double_t bSel=mvaBC->GetBinContent(1); Double_t sSelBest=0; Double_t bSelBest=0; Double_t separationGain=sepGain->GetSeparationGain(sSel,bSel,sTot,bTot); Double_t mvaCut=mvaSC->GetBinCenter(1); Double_t mvaCutOrientation=1; // 1 if mva > mvaCut --> Signal and -1 if mva < mvaCut (i.e. mva*-1 > mvaCut*-1) --> Signal for (UInt_t ibin=2;ibin<nValBins;ibin++){ mvaSC->SetBinContent(ibin,mvaS->GetBinContent(ibin)+mvaSC->GetBinContent(ibin-1)); mvaBC->SetBinContent(ibin,mvaB->GetBinContent(ibin)+mvaBC->GetBinContent(ibin-1)); sSel=mvaSC->GetBinContent(ibin); bSel=mvaBC->GetBinContent(ibin); if (separationGain < sepGain->GetSeparationGain(sSel,bSel,sTot,bTot)){ separationGain = sepGain->GetSeparationGain(sSel,bSel,sTot,bTot); mvaCut=mvaSC->GetBinCenter(ibin); if (sSel/bSel > (sTot-sSel)/(bTot-bSel)) mvaCutOrientation=-1; else mvaCutOrientation=1; sSelBest=sSel; bSelBest=bSel; } } cout << "Min="<<MinMVA << " Max=" << MaxMVA << " sTot=" << sTot << " bTot=" << bTot << " sSel=" << sSelBest << " bSel=" << bSelBest << " sepGain="<<separationGain << " cut=" << mvaCut << " cutOrientation="<<mvaCutOrientation << endl; delete reader; gStyle->SetPalette(1); plot(hs,hb,hist ,v0,v1,mvaCut); TCanvas *cm=new TCanvas ("cm","",900,1200); cm->cd(); cm->Divide(1,2); cm->cd(1); mvaS->SetLineColor(4); mvaB->SetLineColor(2); mvaS->Draw(); mvaB->Draw("same"); cm->cd(2); mvaSC->SetLineColor(4); mvaBC->SetLineColor(2); mvaBC->Draw(); mvaSC->Draw("same"); // TH1F *add=(TH1F*)mvaBC->Clone("add"); // add->Add(mvaSC); // add->Draw(); // errh->Draw("same"); // // write histograms // TFile *target = new TFile( "TMVAPlotDecisionBoundary.root","RECREATE" ); hs->Write(); hb->Write(); hist->Write(); target->Close(); }