void SetAliEnSettings() { // Routine to load settings from an AliEn environment file. ifstream fileIn; fileIn.open(Form("/tmp/gclient_env_%d", gSystem->GetUid())); if (gDebug>0) {printf("P010_TAlien.C: parsing /tmp/gclient_env_$UID\n");} TString lineS,tmp; char line[4096]; while (fileIn.good()){ fileIn.getline(line,4096,'\n'); lineS = line; if (lineS.IsNull()) continue; if (lineS.Contains("export ")) { lineS.ReplaceAll("export ",""); TObjArray* array = lineS.Tokenize("="); if (array->GetEntries() == 2) { TObjString *strVar = (TObjString *) array->At(0); TObjString *strVal = (TObjString *) array->At(1); if ((strVar)&&(strVal)) { tmp = strVal->GetString(); tmp.ReplaceAll("\"",""); tmp.ReplaceAll("$LD_LIBRARY_PATH",gSystem->Getenv("LD_LIBRARY_PATH")); tmp.ReplaceAll("$DYLD_LIBRARY_PATH",gSystem->Getenv("DYLD_LIBRARY_PATH")); tmp.ReplaceAll(" ",""); gSystem->Unsetenv(strVar->GetString()); gSystem->Setenv(strVar->GetString(), tmp); if (gDebug>0) { Info("P010_TAlien", "setting environment %s=\"%s\"", strVar->GetString().Data(), tmp.Data()); } if (!strVar->GetString().CompareTo("GCLIENT_SERVER_LIST")) { gSystem->Unsetenv("alien_API_SERVER_LIST"); gSystem->Setenv("alien_API_SERVER_LIST", tmp); } } if (array) { delete array; array = 0 ; } } else { // parse the MONA_ stuff TObjArray* array = lineS.Tokenize("\" "); TString key=""; TString val=""; for (int i=0; i< array->GetEntries(); i++) { if ( ((TObjString*) array->At(i))->GetString().Contains("=")) { if (key.Length() && val.Length()) { val.Resize(val.Length()-1); if (gDebug>0) { Info("P010_TAlien", "setting environment %s=\"%s\"", key.Data(), val.Data()); } gSystem->Unsetenv(key); gSystem->Setenv(key, val); key=""; val=""; } key = ((TObjString*) array->At(i))->GetString(); key.ReplaceAll("=",""); } else { val+=((TObjString*) array->At(i))->GetString(); val+=" "; } } if (key.Length() && val.Length()) { if (gDebug>0) { Info("P010_TAlien", "setting environment %s=\"%s\"", key.Data(), val.Data()); } gSystem->Unsetenv(key); gSystem->Setenv(key, val); } } } } }
// main GUI void TMVAGui( const char* fName = "~/cern/ntuples/TMVA.root" ) { // Use this script in order to run the various individual macros // that plot the output of TMVA (e.g. running TMVAClassification.C), // stored in the file "TMVA.root" TString curMacroPath(gROOT->GetMacroPath()); //std::cout << curMacroPath << std::endl; // uncomment next line for macros submitted to next root version gROOT->SetMacroPath(curMacroPath+".:/:$ROOTSYS/tmva/test/:"); // for the sourceforge version, including $ROOTSYS/tmva/test in the // macro path is a mistake, especially if "./" was not part of path // add ../macros to the path (comment out next line for the ROOT version of TMVA) // gROOT->SetMacroPath(curMacroPath+":../macros:"); TString curIncludePath=gSystem->GetIncludePath(); TString newIncludePath=TString("-I../ ")+curIncludePath; gSystem->SetIncludePath(newIncludePath); cout << "--- Launch TMVA GUI to view input file: " << fName << endl; // init TMVAGui_inactiveButtons.clear(); // check if file exist TFile* file = TFile::Open( fName ); if (!file) { cout << "==> Abort TMVAGui, please verify filename" << endl; return; } // find all references TMVAGui_keyContent = (TList*)file->GetListOfKeys()->Clone(); // close file file->Close(); TString defaultRequiredClassifier = ""; // gROOT->Reset(); // gStyle->SetScreenFactor(2); // if you have a large screen, select 1,2 or 1.4 // create the control bar TControlBar* cbar = new TControlBar( "vertical", "TMVA Plotting Macros for Classification", 0, 0 ); const TString buttonType( "button" ); // configure buttons Int_t ic = 1; // find all input variables types TList* keylist = GetKeyList( "InputVariables" ); TListIter it( keylist ); TObjString* str = 0; char ch = 'a'; while ((str = (TObjString*)it())) { TString tmp = str->GetString(); TString title = Form( "Input variables '%s'-transformed (training sample)", tmp.ReplaceAll("InputVariables_","").Data() ); if (tmp.Contains( "Id" )) title = "Input variables (training sample)"; ActionButton( cbar, Form( "(%i%c) %s", ic, ch++, title.Data() ), Form( ".x variables.C+(\"%s\",\"%s\",\"%s\")", fName, str->GetString().Data(), title.Data() ), Form( "Plots all '%s'-transformed input variables (macro variables.C(...))", str->GetString().Data() ), buttonType, str->GetString() ); } ic++; // correlation scatter plots it.Reset(); ch = 'a'; while ((str = (TObjString*)it())) { TString tmp = str->GetString(); TString title = Form( "Input variable correlations '%s'-transformed (scatter profiles)", tmp.ReplaceAll("InputVariables_","").Data() ); if (tmp.Contains( "Id" )) title = "Input variable correlations (scatter profiles)"; ActionButton( cbar, Form( "(%i%c) %s", ic, ch++, title.Data() ), Form( ".x CorrGui.C+(\"%s\",\"%s\",\"%s\")", fName, str->GetString().Data(), title.Data() ), Form( "Plots all correlation profiles between '%s'-transformed input variables (macro CorrGui.C(...))", str->GetString().Data() ), buttonType, str->GetString() ); } TString title; // coefficients title =Form( "(%i) Input Variable Linear Correlation Coefficients", ++ic ); ActionButton( cbar, title, Form( ".x correlations.C+(\"%s\")", fName ), "Plots signal and background correlation summaries for all input variables (macro correlations.C)", buttonType ); title =Form( "(%ia) Classifier Output Distributions (test sample)", ++ic ); ActionButton( cbar, title, Form( ".x mvas.C+(\"%s\",0)", fName ), "Plots the output of each classifier for the test data (macro mvas.C(...,0))", buttonType, defaultRequiredClassifier ); title =Form( "(%ib) Classifier Output Distributions (test and training samples superimposed)", ic ); ActionButton( cbar, title, Form( ".x mvas.C+(\"%s\",CompareType)", fName ), "Plots the output of each classifier for the test (histograms) and training (dots) data (macro mvas.C(...,3))", buttonType, defaultRequiredClassifier ); title = Form( "(%ic) Classifier Probability Distributions (test sample)", ic ); ActionButton( cbar, title, Form( ".x mvas.C+(\"%s\",1)", fName ), "Plots the probability of each classifier for the test data (macro mvas.C(...,1))", buttonType, defaultRequiredClassifier ); title =Form( "(%id) Classifier Rarity Distributions (test sample)", ic ); ActionButton( cbar, title, Form( ".x mvas.C+(\"%s\",2)", fName ), "Plots the Rarity of each classifier for the test data (macro mvas.C(...,2)) - background distribution should be uniform", buttonType, defaultRequiredClassifier ); title =Form( "(%ia) Classifier Cut Efficiencies", ++ic ); ActionButton( cbar, title, Form( ".x mvaeffs.C+(\"%s\")", fName ), "Plots signal and background efficiencies versus cut on classifier output (macro mvaeffs.C)", buttonType, defaultRequiredClassifier ); title = Form( "(%ib) Classifier Background Rejection vs Signal Efficiency (ROC curve)", ic ); ActionButton( cbar, title, Form( ".x efficiencies.C+(\"%s\")", fName ), "Plots background rejection vs signal efficiencies (macro efficiencies.C) [\"ROC\" stands for \"Receiver Operation Characteristics\"]", buttonType, defaultRequiredClassifier ); title = Form( "(%ib) Classifier 1/(Backgr. Efficiency) vs Signal Efficiency (ROC curve)", ic ); ActionButton( cbar, title, Form( ".x efficiencies.C+(\"%s\",%d)", fName, 3 ), "Plots 1/(background eff.) vs signal efficiencies (macro efficiencies.C) [\"ROC\" stands for \"Receiver Operation Characteristics\"]", buttonType, defaultRequiredClassifier ); title = Form( "(%i) Parallel Coordinates (requires ROOT-version >= 5.17)", ++ic ); ActionButton( cbar, title, Form( ".x paracoor.C+(\"%s\")", fName ), "Plots parallel coordinates for classifiers and input variables (macro paracoor.C, requires ROOT >= 5.17)", buttonType, defaultRequiredClassifier ); // parallel coordinates only exist since ROOT 5.17 #if ROOT_VERSION_CODE < ROOT_VERSION(5,17,0) TMVAGui_inactiveButtons.push_back( title ); #endif title =Form( "(%i) PDFs of Classifiers (requires \"CreateMVAPdfs\" option set)", ++ic ); ActionButton( cbar, title, Form( ".x probas.C+(\"%s\")", fName ), "Plots the PDFs of the classifier output distributions for signal and background - if requested (macro probas.C)", buttonType, defaultRequiredClassifier ); title = Form( "(%i) Likelihood Reference Distributiuons", ++ic); ActionButton( cbar, title, Form( ".x likelihoodrefs.C+(\"%s\")", fName ), "Plots to verify the likelihood reference distributions (macro likelihoodrefs.C)", buttonType, "Likelihood" ); title = Form( "(%ia) Network Architecture (MLP)", ++ic ); TString call = Form( ".x network.C+g(\"%s\")", fName ); ActionButton( cbar, title, call, "Plots the MLP weights (macro network.C)", buttonType, "MLP" ); title = Form( "(%ib) Network Convergence Test (MLP)", ic ); ActionButton( cbar, title, Form( ".x annconvergencetest.C+(\"%s\")", fName ), "Plots error estimator versus training epoch for training and test samples (macro annconvergencetest.C)", buttonType, "MLP" ); title = Form( "(%i) Decision Trees (BDT)", ++ic ); ActionButton( cbar, title, Form( ".x BDT.C+(\"%s\")", fName ), "Plots the Decision Trees trained by BDT algorithms (macro BDT.C(itree,...))", buttonType, "BDT" ); title = Form( "(%i) Decision Tree Control Plots (BDT)", ++ic ); ActionButton( cbar, title, Form( ".x BDTControlPlots.C+(\"%s\")", fName ), "Plots to monitor boosting and pruning of decision trees (macro BDTControlPlots.C)", buttonType, "BDT" ); // ActionButton( cbar, // Form( "(%i) Rule Ensemble Importance Plots (RuleFit)", ++ic ), // Form( ".x rulevis.C(\"%s\",0)", fName ), // "Plots all input variables with rule ensemble weights, including linear terms (macro rulevis.C)", // buttonType, "RuleFit" ); title = Form( "(%i) Plot Foams (PDEFoam)", ++ic ); ActionButton( cbar, title, ".x PlotFoams.C+(\"~/cern/weights/TMVAClassification_PDEFoam.weights_foams.root\")", "Plot Foams (macro PlotFoams.C)", buttonType, "PDEFoam" ); title = Form( "(%i) General Boost Control Plots", ++ic ); ActionButton( cbar, title, Form( ".x BoostControlPlots.C+(\"%s\")", fName ), "Plots to monitor boosting of general classifiers (macro BoostControlPlots.C)", buttonType, "Boost" ); cbar->AddSeparator(); cbar->AddButton( Form( "(%i) Quit", ++ic ), ".q", "Quit", buttonType ); // set the style cbar->SetTextColor("black"); // there seems to be a bug in ROOT: font jumps back to default after pressing on >2 different buttons // cbar->SetFont("-adobe-helvetica-bold-r-*-*-12-*-*-*-*-*-iso8859-1"); // draw cbar->Show(); // indicate inactive buttons for (UInt_t i=0; i<TMVAGui_inactiveButtons.size(); i++) cbar->SetButtonState(TMVAGui_inactiveButtons[i], 3 ); if (TMVAGui_inactiveButtons.size() > 0) { cout << "=== Note: inactive buttons indicate classifiers that were not trained, ===" << endl; cout << "=== or functionalities that were not invoked during the training ===" << endl; } gROOT->SaveContext(); }
void draw_syst_hists( const char* inwsfile, const char* syst_name, int sig_mass = 0, float max_msig_evts = 8., float max_msb_evts = 16. ) { gStyle -> SetOptStat(0) ; gStyle -> SetPadLeftMargin(0.15) ; gStyle -> SetTitleW( 0.9 ) ; gDirectory -> Delete( "h*" ) ; loadHist( inwsfile ) ; TLine* line = new TLine() ; line -> SetLineStyle(2) ; //---- gSystem -> Exec( "mkdir -p outputfiles/syst-plots" ) ; TString infile_ts( inwsfile ) ; TObjArray* tokens = infile_ts.Tokenize("/") ; TObjString* tos = (TObjString*) (tokens -> At( tokens->GetEntries() - 1 ) ) ; TString fname( tos->GetString() ) ; printf( "ws file name : %s\n", fname.Data() ) ; TString pdfbasename = fname.ReplaceAll(".root","") ; printf(" pdf base name : %s\n", pdfbasename.Data() ) ; char sigsb_str[2][10] = { "msig", "msb" } ; for ( int ssbi=0; ssbi<2; ssbi++ ) { char cname[100] ; sprintf( cname, "can_evts_%s", sigsb_str[ssbi] ) ; TCanvas* can_evts = (TCanvas*) gDirectory -> FindObject( cname ) ; if ( can_evts == 0x0 ) { if ( ssbi == 0 ) { can_evts = new TCanvas( cname, "SIG observables", 1100, 300 ) ; } else { can_evts = new TCanvas( cname, "SB observables", 1100, 300 ) ; } } sprintf( cname, "can_frac_%s", sigsb_str[ssbi] ) ; TCanvas* can_frac = (TCanvas*) gDirectory -> FindObject( cname ) ; if ( can_frac == 0x0 ) { if ( ssbi == 0 ) { can_frac = new TCanvas( cname, "SIG observables, syst (%)", 1100, 300 ) ; } else { can_frac = new TCanvas( cname, "SB observables, syst (%)", 1100, 300 ) ; } } can_evts -> Clear() ; can_evts -> Divide(4,1) ; can_frac -> Clear() ; can_frac -> Divide(4,1) ; for ( int ci=1; ci<=4; ci++ ) { char hname[1000] ; char hnamev[1000] ; char hnamenf[1000] ; sprintf( hname, "h_syst_%s_%s_met%d_nom", syst_name, sigsb_str[ssbi], ci ) ; TH1F* hist_nom = (TH1F*) gDirectory -> FindObject( hname ) ; if ( hist_nom == 0x0 ) { printf("\n\n *** Can't find %s\n\n", hname ) ; return ; } sprintf( hnamenf, "%s_nf", hname ) ; TH1F* hist_nom_nf = (TH1F*) hist_nom -> Clone( hnamenf ) ; sprintf( hname, "h_syst_%s_%s_met%d_m1s", syst_name, sigsb_str[ssbi], ci ) ; TH1F* hist_m1s = (TH1F*) gDirectory -> FindObject( hname ) ; if ( hist_m1s == 0x0 ) { printf("\n\n *** Can't find %s\n\n", hname ) ; return ; } sprintf( hnamev, "%s_var", hname ) ; TH1F* hist_m1s_var = (TH1F*) hist_m1s -> Clone( hnamev ) ; sprintf( hname, "h_syst_%s_%s_met%d_p1s", syst_name, sigsb_str[ssbi], ci ) ; TH1F* hist_p1s = (TH1F*) gDirectory -> FindObject( hname ) ; if ( hist_p1s == 0x0 ) { printf("\n\n *** Can't find %s\n\n", hname ) ; return ; } sprintf( hnamev, "%s_var", hname ) ; TH1F* hist_p1s_var = (TH1F*) hist_p1s -> Clone( hnamev ) ; TString htitle ; htitle = hist_nom -> GetTitle() ; if ( sig_mass > 0 ) { char sigmassstr[1000] ; sprintf( sigmassstr, ", higgsino mass = %d", sig_mass ) ; htitle.ReplaceAll( ", nominal", sigmassstr ) ; } else { htitle.ReplaceAll( ", nominal", "" ) ; } hist_nom -> SetTitle( htitle ) ; htitle = hist_m1s_var -> GetTitle() ; if ( sig_mass > 0 ) { char sigmassstr[1000] ; sprintf( sigmassstr, ", higgsino mass = %d", sig_mass ) ; htitle.ReplaceAll( ", -1 sigma", sigmassstr ) ; } else { htitle.ReplaceAll( ", -1 sigma", "" ) ; } hist_m1s_var -> SetTitle( htitle ) ; hist_nom -> SetLineWidth( 2 ) ; hist_nom_nf -> SetLineWidth( 2 ) ; hist_m1s -> SetLineWidth( 2 ) ; hist_p1s -> SetLineWidth( 2 ) ; // hist_m1s -> SetLineColor( 4 ) ; // hist_p1s -> SetLineColor( 2 ) ; hist_m1s -> SetLineColor( 2 ) ; hist_p1s -> SetLineColor( 4 ) ; hist_nom -> SetFillColor( 18 ) ; float max_evts(0.) ; if ( ssbi == 0 ) { max_evts = max_msig_evts ; } else { max_evts = max_msb_evts ; } float hmax(0.) ; if ( max_evts < 0 ) { if ( 1.2*(hist_nom->GetMaximum()) > hmax ) { hmax = 1.2*(hist_nom->GetMaximum()) ; } if ( 1.2*(hist_m1s->GetMaximum()) > hmax ) { hmax = 1.2*(hist_m1s->GetMaximum()) ; } if ( 1.2*(hist_p1s->GetMaximum()) > hmax ) { hmax = 1.2*(hist_p1s->GetMaximum()) ; } } else { hmax = max_evts ; } hist_nom -> SetMaximum( hmax ) ; for ( int hbi=1; hbi <= hist_nom -> GetNbinsX(); hbi++ ) { float nom_val, p1s_val, m1s_val ; nom_val = hist_nom -> GetBinContent( hbi ) ; p1s_val = hist_p1s -> GetBinContent( hbi ) ; m1s_val = hist_m1s -> GetBinContent( hbi ) ; hist_p1s_var -> SetBinContent( hbi, 0. ) ; hist_m1s_var -> SetBinContent( hbi, 0. ) ; if ( nom_val > 0 ) { hist_p1s_var -> SetBinContent( hbi, (p1s_val - nom_val)/nom_val ) ; hist_m1s_var -> SetBinContent( hbi, (m1s_val - nom_val)/nom_val ) ; printf( " hbi=%d : p1s, nom, m1s : %.2f %.2f %.2f\n", hbi, p1s_val, nom_val, m1s_val ) ; } } // hbi hist_m1s_var -> SetMinimum( -0.3 ) ; hist_m1s_var -> SetMaximum( 0.3 ) ; // hist_m1s_var -> SetLineColor( 4 ) ; // hist_p1s_var -> SetLineColor( 2 ) ; // hist_m1s_var -> SetLineWidth( 2 ) ; // hist_p1s_var -> SetLineWidth( 2 ) ; hist_m1s_var -> SetLineColor( 2 ) ; hist_p1s_var -> SetLineColor( 4 ) ; hist_m1s_var -> SetLineWidth( 2 ) ; hist_p1s_var -> SetLineWidth( 2 ) ; // hist_m1s_var -> SetFillColor( 4 ) ; hist_m1s_var -> SetFillColor( 2 ) ; hist_m1s_var -> SetFillStyle( 3354 ) ; // hist_p1s_var -> SetFillColor( 2 ) ; hist_p1s_var -> SetFillColor( 4 ) ; hist_p1s_var -> SetFillStyle( 3345 ) ; hist_nom -> SetTitleOffset( 1.5, "y" ) ; hist_m1s_var -> SetTitleOffset( 1.5, "y" ) ; hist_nom -> SetTitleSize( 0.05, "y" ) ; hist_m1s_var -> SetTitleSize( 0.05, "y" ) ; hist_nom -> SetLabelSize( 0.07, "x" ) ; hist_m1s_var -> SetLabelSize( 0.07, "x" ) ; hist_nom -> SetLabelSize( 0.05, "y" ) ; hist_m1s_var -> SetLabelSize( 0.05, "y" ) ; hist_nom -> SetLabelOffset( 0.01, "x" ) ; hist_m1s_var -> SetLabelOffset( 0.01, "x" ) ; hist_nom -> SetLabelOffset( 0.01, "y" ) ; hist_m1s_var -> SetLabelOffset( 0.01, "y" ) ; hist_nom -> SetYTitle( "Events at theory Xsec" ) ; hist_m1s_var -> SetYTitle( "Systematic (var-nom)/nom" ) ; can_evts -> cd( ci ) ; hist_nom -> Draw() ; hist_m1s -> Draw("same" ) ; hist_p1s -> Draw("same" ) ; hist_nom_nf -> Draw("same" ) ; hist_nom -> Draw("same axis" ) ; can_frac -> cd( ci ) ; hist_m1s_var -> Draw( ) ; hist_p1s_var -> Draw("same" ) ; line -> DrawLine( hist_m1s_var->GetBinLowEdge(1), 0., hist_m1s_var->GetBinLowEdge( hist_nom -> GetNbinsX() + 1 ), 0. ) ; } // ci char pdfname[10000] ; sprintf( pdfname, "outputfiles/syst-plots/%s-syst-%s-events-%s.pdf", pdfbasename.Data(), syst_name, sigsb_str[ssbi] ) ; can_evts -> SaveAs( pdfname ) ; sprintf( pdfname, "outputfiles/syst-plots/%s-syst-%s-frac-%s.pdf", pdfbasename.Data(), syst_name, sigsb_str[ssbi] ) ; can_frac -> SaveAs( pdfname ) ; } // ssbi. } // draw_syst_hists
void NBD::calcvar(){ double mumin_temp = mumin; double mumax_temp = mumax; double kmin_temp = kmin; double kmax_temp = kmax; this->initmu(mubest[0],mubest[0],mustep); this->initk(kbest[0],kbest[0],kstep); this->fit(); this->initmu(mumin_temp,mumax_temp,mustep); this->initk(kmin_temp,kmax_temp,kstep); TFile *fdata = TFile::Open(dataname.GetName()); TH1D *histo_obs = (TH1D*)fdata->Get(histoname.GetName()); TFile *fGlauber = TFile::Open(Glaubername.GetName()); int binnum = histo_obs->GetNbinsX(); double Minx = histo_obs->GetXaxis()->GetXmin(); double Maxx = histo_obs->GetXaxis()->GetXmax(); TH1D *histo_exp = new TH1D("histo_exp","Simulated distribution;Multiplicity;# of events",binnum,Minx,Maxx); TF1 *NBD_fun = new TF1("NBD_fun","[0]*TMath::Gamma(x+[1])/(TMath::Gamma(x+1)*TMath::Gamma([1]))*TMath::Power([2]/[1],x)/TMath::Power([2]/[1]+1,x+[1])",0,100); NBD_fun->SetParameter(0,1); NBD_fun->SetParameter(1,kbest[0]); NBD_fun->SetParameter(2,mubest[0]); TTree *t = (TTree*) fGlauber ->Get("nt_Pb_Pb"); Float_t Ncoll, Npart, B; Long_t Nevent; t->SetBranchAddress("Ncoll",&Ncoll); t->SetBranchAddress("Npart",&Npart); t->SetBranchAddress("B",&B); Nevent = (Long_t) t->GetEntries(); Long_t Ev; Int_t Bino; Double_t Para, Bi_Para, Mult; TString treestr=Form("tree%.f.root",kbest[0]*mubest[0]*1000); TFile *treefile = new TFile(treestr,"Recreate"); TTree *GlauEvent = new TTree("GlauEvent","Glauber Events"); GlauEvent -> Branch("Mult",&Para,"Mult/D"); GlauEvent -> Branch("Ncoll",&Ncoll,"Ncoll/F"); GlauEvent -> Branch("Npart",&Npart,"Npart/F"); GlauEvent -> Branch("B",&B,"B/F"); for(Ev=0; Ev<Nevent; Ev++){ //if(Ev%100000==0) cout<<"Have run "<<Ev<<" events"<<endl; t->GetEntry(Ev); Para = 0; //make sure that Para doesn't accumulate through loops for(Bino=0; Bino<Ncoll; Bino++){ // Bi_Para = unr.SampleDiscr(); Bi_Para = NBD_fun->GetRandom(); Para += Bi_Para; } histo_exp->Fill(Para); GlauEvent->Fill(); } GlauEvent->SetBranchAddress("Ncoll",&Ncoll); GlauEvent->SetBranchAddress("Npart",&Npart); GlauEvent->SetBranchAddress("B",&B); GlauEvent->SetBranchAddress("Mult",&Mult); Int_t i; if(method[0]==0){ /* double Glau_array[1000000]; Long_t count_Ev=0; for(Ev=0;Ev<Nevent;Ev++){ GlauEvent->GetEntry(Ev); if(Mult>=Minx&&Mult<=Maxx){ count_Ev++; Glau_array[count_Ev-1]=Mult; } } int low=0; int high=count_Ev-1; int k0[N+1]; for(i=0;i<N+1;i++){ k0[i]=count_Ev*(1-centbin[i]); kpoint[i]=findkth(Glau_array,low,high,k0[i]); count_Ev=0; for(Ev=0;Ev<Nevent;Ev++){ GlauEvent->GetEntry(Ev); if(Mult>=Minx&&Mult<=Maxx){ count_Ev++; Glau_array[count_Ev-1]=Mult; } } } */ for(i=0;i<N+1;i++){ kpoint[i] = findpoint(histo_exp,centbin[i]); kpoint_[i] = findpoint(histo_obs,centbin[i]); } } TH1D* hNpart[N+1]; for(i=0; i<N; i++){ //Initialization hNpart[i] = new TH1D(Form("Npart_%d-%d",i,i+1),Form("Npart distribution for %dth bin",i),4000,0,4000); } hNpart[N] = new TH1D(Form("Npart_0-%d",N),Form("Npart distribution for all bin"),4000,0,4000); std::vector<double> PartEvent(N);std::vector<double> PartEvent_(N+1); double TotalEvent=0; for(Ev=0; Ev<Nevent; Ev++){ //if(Ev%100000==0) cout<<"Have run "<<Ev<<" events"<<endl; GlauEvent->GetEntry(Ev); for(i=0; i<N; i++){ if(Mult>kpoint[i]&&Mult<=kpoint[i+1]){ NcollAver[i] += Ncoll; NpartAver[i] += Npart; BAver[i] += B; PartEvent[i]++; TotalEvent++; hNpart[i]->Fill(Npart); } } hNpart[N]->Fill(Npart); for(i=0; i<N+1; i++){ if(method[0]!=0 && Mult>kpoint[i]) PartEvent_[i]++; } } if(method[0]!=0){ for(i=0; i<N+1; i++){ centbin[i]=(double)PartEvent_[i]/TotalEvent; centbin_[i]=(double)histo_obs->Integral(histo_obs->GetXaxis()->FindBin(kpoint[i]),histo_obs->GetXaxis()->GetLast())/histo_obs->Integral(); } } for(i=0; i<N; i++){ if(PartEvent[i]){ NcollAver[i]/=PartEvent[i]; NpartAver[i]/=PartEvent[i]; BAver[i]/=PartEvent[i]; //hNpart[i]->Scale(1.0/PartEvent[i]); Npartdis->Add(hNpart[i]); } } //hNpart[N]->Scale(1.0/TotalEvent); Npartdis->Add(hNpart[N]); treefile->Close(); remove(treestr); }
/******************************************************************************** * Copyright (C) 2014 GSI Helmholtzzentrum fuer Schwerionenforschung GmbH * * * * This software is distributed under the terms of the * * GNU Lesser General Public Licence (LGPL) version 3, * * copied verbatim in the file "LICENSE" * ********************************************************************************/ void run_reco( TString mcEngine="TGeant3", Bool_t AlignDone=true ) { // ---- Load libraries ------------------------------------------------- FairLogger *logger = FairLogger::GetLogger(); // logger->SetLogFileName("MyLog.log"); logger->SetLogToScreen(kTRUE); // logger->SetLogToFile(kTRUE); // logger->SetLogVerbosityLevel("HIGH"); // logger->SetLogFileLevel("DEBUG4"); logger->SetLogScreenLevel("INFO"); // Verbosity level (0=quiet, 1=event level, 2=track level, 3=debug) Int_t iVerbose = 0; // just forget about it, for the moment TString Align= ""; if (AlignDone) { Align= "align_"; } TString InDir = "./data/"; // Input file (MC Events) TString inFile ="testrun_"; inFile = InDir +inFile + Align + mcEngine + ".root"; // Output file name TString outFile ="testreco_"; outFile = InDir + outFile + Align + mcEngine + ".root"; // Parameter file TString parFile ="testparams_"; parFile = InDir + parFile + Align + mcEngine + ".root"; // Millepede file name TString milleFile ="testmille_"; milleFile = InDir + milleFile + Align + mcEngine; TList *parFileList = new TList(); TString workDir = gSystem->Getenv("VMCWORKDIR"); TString paramDir = workDir + "/simulation/Tutorial4/parameters/"; TString paramFile = paramDir + "example.par"; TObjString tutDetDigiFile; tutDetDigiFile.SetString(paramFile); parFileList->Add(&tutDetDigiFile); // ----- Timer -------------------------------------------------------- TStopwatch timer; // ----- Reconstruction run ------------------------------------------- FairRunAna *fRun= new FairRunAna(); FairFileSource *fFileSource = new FairFileSource(inFile); fRun->SetSource(fFileSource); fRun->SetSink(new FairRootFileSink(outFile)); FairRuntimeDb* rtdb = fRun->GetRuntimeDb(); FairParRootFileIo* parInput1 = new FairParRootFileIo(); FairParAsciiFileIo* parIo2 = new FairParAsciiFileIo(); parIo2->open(parFileList, "in"); parInput1->open(parFile.Data()); parIo2->open(parFileList, "in"); rtdb->setFirstInput(parInput1); rtdb->setSecondInput(parIo2); rtdb->setOutput(parInput1); rtdb->saveOutput(); // ----- TorinoDetector hit producers --------------------------------- FairTutorialDet4HitProducerIdealMisalign* hitProducer = new FairTutorialDet4HitProducerIdealMisalign(); hitProducer->DoMisalignment(kFALSE); fRun->AddTask(hitProducer); FairTutorialDet4StraightLineFitter* fitter = new FairTutorialDet4StraightLineFitter(); fitter->SetVersion(2); fRun->AddTask(fitter); FairTutorialDet4MilleWriter* writer = new FairTutorialDet4MilleWriter(); // writer->SetWriteAscii(kTRUE); writer->SetVersion(2); writer->SetFileName(milleFile); fRun->AddTask(writer); fRun->Init(); timer.Start(); fRun->Run(); // ----- Finish ------------------------------------------------------- cout << endl << endl; // Extract the maximal used memory an add is as Dart measurement // This line is filtered by CTest and the value send to CDash FairSystemInfo sysInfo; Float_t maxMemory=sysInfo.GetMaxMemory(); cout << "<DartMeasurement name=\"MaxMemory\" type=\"numeric/double\">"; cout << maxMemory; cout << "</DartMeasurement>" << endl; timer.Stop(); Double_t rtime = timer.RealTime(); Double_t ctime = timer.CpuTime(); Float_t cpuUsage=ctime/rtime; cout << "<DartMeasurement name=\"CpuLoad\" type=\"numeric/double\">"; cout << cpuUsage; cout << "</DartMeasurement>" << endl; cout << endl << endl; cout << "Output file is " << outFile << endl; cout << "Parameter file is " << parFile << endl; cout << "Real time " << rtime << " s, CPU time " << ctime << "s" << endl << endl; cout << "Macro finished successfully." << endl; // ------------------------------------------------------------------------ }
//_____________________________________________________________________________ void FnormMacro( const char* filename="../LHC15g.MuMu.1.root", const char* associatedSimFileName="", const char* associatedSimFileName2="", const char* beamYear="PbPb2011",const int DebugLevel =0) { // //_____ FNorm // analysis.ComputeIntFnormFromCounters("",kFALSE); // //_____ AliAnalysisMuMu ana(filename,associatedSimFileName,associatedSimFileName2,beamYear); AliLog::SetGlobalDebugLevel(DebugLevel); if (!ana.OC() || !ana.CC()) { AliError("No mergeable/counter collection. Consider Upgrade()"); return ; } else { cout << " ================================================================ " << endl; cout << " Compute Mean Fnorm From Counters " << endl; cout << " ================================================================ " << endl; } // Get configuration settings TObjArray* eventTypeArray = ana.Config()->GetListElements(AliAnalysisMuMuConfig::kEventSelectionList,IsSimulation()); TObjArray* triggerMuonArray = ana.Config()->GetListElements(AliAnalysisMuMuConfig::kDimuonTriggerList,IsSimulation()); TObjArray* triggerMBArray = ana.Config()->GetListElements(AliAnalysisMuMuConfig::kMinbiasTriggerList,IsSimulation()); TObjArray* centralityArray = ana.Config()->GetListElements(AliAnalysisMuMuConfig::kCentralitySelectionList, IsSimulation()); // Iterator for loops TIter nextTriggerMuon(triggerMuonArray); TIter nextTriggerMB(triggerMBArray); TIter nextEventType(eventTypeArray); TIter nextCentrality(centralityArray); // Strings TObjString* striggerMuon; TObjString* striggerMB; TObjString* seventType; TObjString* scentrality; //Pointers on histo TH1*h(0x0); TH1*h1(0x0); TH1*h2(0x0); Double_t FNormOverStat(0.); Double_t FNormTotError(0.); Double_t FNormTotErrorInverse(0.); Double_t FNormTotErrorSys(0.); Double_t Norm(1.); Int_t n =0; //counter nextEventType.Reset(); // Loop on each envenType (see MuMuConfig) //============================================================================== while ( ( seventType = static_cast<TObjString*>(nextEventType())) ) { AliDebug(1,Form("EVENTTYPE %s",seventType->String().Data())); nextTriggerMuon.Reset(); // Loop on each Muon trigger (see MuMuConfig) //============================================================================== while ( ( striggerMuon = static_cast<TObjString*>(nextTriggerMuon())) ) { AliDebug(1,Form("-MUON TRIGGER %s",striggerMuon->String().Data())); nextTriggerMB.Reset(); // Loop on each MB trigger (not the ones in MuMuConfig but the ones set) //============================================================================== while ( ( striggerMB = static_cast<TObjString*>(nextTriggerMB())) ) { AliDebug(1,Form("-- MB PAIRCUT %s",striggerMB->String().Data())); nextCentrality.Reset(); // Loop on each centrality //============================================================================== while ( ( scentrality = static_cast<TObjString*>(nextCentrality()) ) ) { TString id(Form("/FNORM-%s/%s/%s/PbPb",striggerMuon->String().Data(),seventType->String().Data(),scentrality->String().Data())); // Path where are saved histos in the mergeable collection h = OC()->Histo(id.Data(),Form("hFNormIntVSrun_%s",striggerMB->String().Data())); if (!h) { AliDebug(1,Form("Can't get histo %s/hFNormIntVSrun_%s",id.Data(),striggerMB->String().Data())); continue; } h1 = OC()->Histo(id.Data(),Form("hFNormInt_%s",striggerMB->String().Data())); if (!h1) { AliDebug(1,Form("Can't get histo %s/hFNormInt_%s",id.Data(),striggerMB->String().Data())); continue; } h2 = OC()->Histo(id.Data(),Form("hFNormIntSys_%s",striggerMB->String().Data())); if (!h2) { AliDebug(1,Form("Can't get histo %s/hFNormIntSys_%s",id.Data(),striggerMB->String().Data())); continue; } cout << Form("Fnorm from %s/%s added",id.Data(),h1->GetName()) << endl; cout << Form("Fnorm from %s/%s added",id.Data(),h2->GetName()) << endl; // Normalise with respect to centrality if (scentrality->String().Contains("V0M_00.00_90.00")) { Norm = 1.; FNormOverStat = FNormOverStat + (Norm*h1->GetBinContent(1)) /(TMath::Power(Norm *h1->GetBinError(1),2.)); FNormTotError = FNormTotError + 1./(TMath::Power(Norm*h1->GetBinError(1),2.)); FNormTotErrorInverse = FNormTotErrorInverse + 1./(TMath::Power(Norm*h1->GetBinError(1),-2.)); FNormTotErrorSys = FNormTotErrorSys + 1./(TMath::Power(Norm*h2->GetBinContent(1),2.)); cout <<"--- Quantities from histogram : " << endl; cout <<" - Norm = " << Norm << endl; cout <<" - FNormHisto = " << h1->GetBinContent(1) << endl; cout <<" - FNormHistoError = " << h1->GetBinError(1) << endl; cout <<" - FNormHistoSys = " << h2->GetBinContent(1) << endl; cout <<"--- Quantities (normalized) from histogram: " << endl; cout <<" - FNormHisto = " << Norm*h1->GetBinContent(1) << endl; cout <<" - FNormHistoError = " << Norm*h1->GetBinError(1) << endl; cout <<" - FNormHistoSys = " << Norm*h2->GetBinContent(1) << endl; cout <<"--- After addition : " << endl; cout <<" - FNormOverStat = " << FNormOverStat << endl; cout <<" - FNormTotError = " << FNormTotError << endl; cout <<" - FNormTotErrorInverse = " << FNormTotErrorInverse << endl; cout <<" - FNormTotErrorSys = " << FNormTotErrorSys << endl; } else if (scentrality->String().Contains("V0M_10.00_50.00")) { Norm = (1./0.4)*0.445*0.9; FNormOverStat = FNormOverStat + (Norm*h1->GetBinContent(1)) /(TMath::Power(Norm *h1->GetBinError(1),2.)); FNormTotError = FNormTotError + 1./(TMath::Power(Norm*h1->GetBinError(1),2.)); FNormTotErrorInverse = FNormTotErrorInverse + 1./(TMath::Power(Norm*h1->GetBinError(1),-2.)); FNormTotErrorSys = FNormTotErrorSys + 1./(TMath::Power(Norm*h2->GetBinContent(1),2.)); cout <<"--- Quantities from histogram : " << endl; cout <<" - Norm = " << Norm << endl; cout <<" - FNormHisto = " << h1->GetBinContent(1) << endl; cout <<" - FNormHistoError = " << h1->GetBinError(1) << endl; cout <<" - FNormHistoSys = " << h2->GetBinContent(1) << endl; cout <<"--- Quantities (normalized) from histogram: " << endl; cout <<" - FNormHisto = " << Norm*h1->GetBinContent(1) << endl; cout <<" - FNormHistoError = " << Norm*h1->GetBinError(1) << endl; cout <<" - FNormHistoSys = " << Norm*h2->GetBinContent(1) << endl; cout <<"--- After addition : " << endl; cout <<" - FNormOverStat = " << FNormOverStat << endl; cout <<" - FNormTotError = " << FNormTotError << endl; cout <<" - FNormTotErrorInverse = " << FNormTotErrorInverse << endl; cout <<" - FNormTotErrorSys = " << FNormTotErrorSys << endl; } else if (scentrality->String().Contains("V0M_00.00_07.50")) { Norm = (1./0.075)*0.443*0.9; FNormOverStat = FNormOverStat + (Norm*h1->GetBinContent(1)) /(TMath::Power(Norm *h1->GetBinError(1),2.)); FNormTotError = FNormTotError + 1./(TMath::Power(Norm*h1->GetBinError(1),2.)); FNormTotErrorInverse = FNormTotErrorInverse + 1./(TMath::Power(Norm*h1->GetBinError(1),-2.)); FNormTotErrorSys = FNormTotErrorSys + 1./(TMath::Power(Norm*h2->GetBinContent(1),2.)); cout <<"--- Quantities from histogram : " << endl; cout <<" - Norm = " << Norm << endl; cout <<" - FNormHisto = " << h1->GetBinContent(1) << endl; cout <<" - FNormHistoError = " << h1->GetBinError(1) << endl; cout <<" - FNormHistoSys = " << h2->GetBinContent(1) << endl; cout <<"--- Quantities (normalized) from histogram: " << endl; cout <<" - FNormHisto = " << Norm*h1->GetBinContent(1) << endl; cout <<" - FNormHistoError = " << Norm*h1->GetBinError(1) << endl; cout <<" - FNormHistoSys = " << Norm*h2->GetBinContent(1) << endl; cout <<"--- After addition : " << endl; cout <<" - FNormOverStat = " << FNormOverStat << endl; cout <<" - FNormTotError = " << FNormTotError << endl; cout <<" - FNormTotErrorInverse = " << FNormTotErrorInverse << endl; cout <<" - FNormTotErrorSys = " << FNormTotErrorSys << endl; } else { AliError("Check this method for centrality selection !"); return; } n++; } } } } cout << "Mean FNorm computed from " << n <<" results = " << FNormOverStat/FNormTotError << " +/- " << TMath::Sqrt(FNormTotErrorInverse) << " (stat) +/-" << TMath::Sqrt(FNormTotErrorSys) << " (sys) " <<endl; delete triggerMuonArray ; delete triggerMBArray ; delete eventTypeArray ; delete centralityArray ; }
//______________________________________________________________________________ ButtonWindow::ButtonWindow() : TGMainFrame(gClient->GetRoot(), 600, 500) { // Main test window. SetWindowName("CaLib Control Panel"); // --------------------------------------------------------------------------------- TGGroupFrame* config_frame = new TGGroupFrame(this, "Calibration and set configuration", kHorizontalFrame); config_frame->SetTitlePos(TGGroupFrame::kLeft); TGVerticalFrame* ver_frame_1 = new TGVerticalFrame(config_frame); // calibration selection fCBox_Calibration = new TGComboBox(ver_frame_1, "Choose calibration"); fCBox_Calibration->Resize(260, 25); ver_frame_1->AddFrame(fCBox_Calibration, new TGLayoutHints(kLHintsLeft, 0, 5, 10, 0)); // fill calibrations gCalibrations = TCMySQLManager::GetManager()->GetAllCalibrations(); for (Int_t i = 0; i < gCalibrations->GetSize(); i++) { TObjString* s = (TObjString*) gCalibrations->At(i); fCBox_Calibration->AddEntry(s->GetString().Data(), i); } fCBox_Calibration->Connect("Selected(Int_t)", "ButtonWindow", this, "EnableModuleSelection(Int_t)"); // calibration module selection fCBox_Module = new TGComboBox(ver_frame_1, "Choose calibration module"); fCBox_Module->Resize(260, 25); ver_frame_1->AddFrame(fCBox_Module, new TGLayoutHints(kLHintsLeft, 0, 5, 10, 0)); // fill modules for (Int_t i = 0; i < gCaLibModules->GetSize(); i++) { TCCalib* cmod = (TCCalib*) gCaLibModules->At(i); fCBox_Module->AddEntry(cmod->GetTitle(), i); } fCBox_Module->Connect("Selected(Int_t)", "ButtonWindow", this, "ReadRunsets(Int_t)"); config_frame->AddFrame(ver_frame_1, new TGLayoutHints(kLHintsFillX)); TGVerticalFrame* ver_frame_2 = new TGVerticalFrame(config_frame); // runset selection fLB_RunSet = new TGListBox(ver_frame_2); fLB_RunSet->SetMultipleSelections(kTRUE); fLB_RunSet->Resize(120, 60); ver_frame_2->AddFrame(fLB_RunSet, new TGLayoutHints(kLHintsLeft | kLHintsExpandY | kLHintsExpandX, 5, 0, 10, 0)); config_frame->AddFrame(ver_frame_2, new TGLayoutHints(kLHintsExpandX | kLHintsExpandY)); AddFrame(config_frame, new TGLayoutHints(kLHintsExpandX | kLHintsExpandY, 5, 5, 5, 5)); // --------------------------------------------------------------------------------- // control buttons TGGroupFrame* control_frame = new TGGroupFrame(this, "Calibration control", kHorizontalFrame); control_frame->SetTitlePos(TGGroupFrame::kLeft); fTB_Init = new TGTextButton(control_frame, "Start module"); ResizeFrame(fTB_Init); fTB_Init->Connect("Clicked()", "ButtonWindow", this, "StartModule()"); control_frame->AddFrame(fTB_Init, new TGLayoutHints(kLHintsExpandX, 0, 0, 10, 0)); fTB_Write = new TGTextButton(control_frame, "Write to DB"); ResizeFrame(fTB_Write); fTB_Write->Connect("Clicked()", "ButtonWindow", this, "DoWrite()"); control_frame->AddFrame(fTB_Write, new TGLayoutHints(kLHintsExpandX, 0, 0, 10, 0)); fTB_Print = new TGTextButton(control_frame, "Print values"); ResizeFrame(fTB_Print); fTB_Print->Connect("Clicked()", "ButtonWindow", this, "Print()"); control_frame->AddFrame(fTB_Print, new TGLayoutHints(kLHintsExpandX, 0, 0, 10, 0)); fTB_PrintChanges = new TGTextButton(control_frame, "Print changes"); ResizeFrame(fTB_PrintChanges); fTB_PrintChanges->Connect("Clicked()", "ButtonWindow", this, "PrintChanges()"); control_frame->AddFrame(fTB_PrintChanges, new TGLayoutHints(kLHintsExpandX, 0, 0, 10, 0)); fTB_Quit = new TGTextButton(control_frame, "Quit"); ResizeFrame(fTB_Quit); fTB_Quit->Connect("Clicked()", "ButtonWindow", this, "Quit()"); control_frame->AddFrame(fTB_Quit, new TGLayoutHints(kLHintsExpandX, 0, 0, 10, 0)); AddFrame(control_frame, new TGLayoutHints(kLHintsExpandX, 5, 5, 0, 5)); // --------------------------------------------------------------------------------- TGHorizontalFrame* nav_main_frame = new TGHorizontalFrame(this); // manual navigation TGGroupFrame* nav_man_frame = new TGGroupFrame(nav_main_frame, "Manual navigation", kHorizontalFrame); nav_man_frame->SetTitlePos(TGGroupFrame::kLeft); fTB_Prev = new TGTextButton(nav_man_frame, "Previous"); ResizeFrame(fTB_Prev); fTB_Prev->SetToolTipText("Go to previous element", 200); fTB_Prev->Connect("Clicked()", "ButtonWindow", this, "DoPrev()"); nav_man_frame->AddFrame(fTB_Prev, new TGLayoutHints(kLHintsLeft, 0, 0, 10, 0)); fTB_Next = new TGTextButton(nav_man_frame, " Next "); ResizeFrame(fTB_Next); fTB_Next->SetToolTipText("Go to next element", 200); fTB_Next->Connect("Clicked()", "ButtonWindow", this, "DoNext()"); nav_man_frame->AddFrame(fTB_Next, new TGLayoutHints(kLHintsLeft, 0, 0, 10, 0)); fTB_Ignore = new TGTextButton(nav_man_frame, " Ignore "); ResizeFrame(fTB_Ignore); fTB_Ignore->SetToolTipText("Go to next element and ignore current one", 200); fTB_Ignore->Connect("Clicked()", "ButtonWindow", this, "DoIgnore()"); nav_man_frame->AddFrame(fTB_Ignore, new TGLayoutHints(kLHintsLeft, 0, 20, 10, 0)); fTB_Goto = new TGTextButton(nav_man_frame, "Go to"); ResizeFrame(fTB_Goto); fTB_Goto->SetToolTipText("Go to specified element", 200); fTB_Goto->Connect("Released()", "ButtonWindow", this, "Goto()"); nav_man_frame->AddFrame(fTB_Goto, new TGLayoutHints(kLHintsLeft, 0, 0, 10, 0)); fNE_Elem = new TGNumberEntry(nav_man_frame, 0, 3, -1, TGNumberFormat::kNESInteger, TGNumberFormat::kNEAAnyNumber, TGNumberFormat::kNELLimitMinMax, 0, 719); ResizeFrame(fNE_Elem); nav_man_frame->AddFrame(fNE_Elem, new TGLayoutHints(kLHintsLeft, 0, 0, 10, 0)); nav_main_frame->AddFrame(nav_man_frame, new TGLayoutHints(kLHintsExpandX, 0, 0, 0, 0)); // automatic navigation TGGroupFrame* nav_auto_frame = new TGGroupFrame(nav_main_frame, "Automatic navigation", kHorizontalFrame); nav_auto_frame->SetTitlePos(TGGroupFrame::kLeft); fNE_Delay = new TGNumberEntry(nav_auto_frame, 0.1, 3, -1, TGNumberFormat::kNESRealTwo, TGNumberFormat::kNEAAnyNumber, TGNumberFormat::kNELLimitMinMax, 0.01, 5); ResizeFrame(fNE_Delay); nav_auto_frame->AddFrame(fNE_Delay, new TGLayoutHints(kLHintsLeft, 0, 5, 10, 0)); fTB_DoAll = new TGTextButton(nav_auto_frame, "Start"); ResizeFrame(fTB_DoAll); fTB_DoAll->SetToolTipText("Process automatically", 200); fTB_DoAll->Connect("Clicked()", "ButtonWindow", this, "DoAll()"); nav_auto_frame->AddFrame(fTB_DoAll, new TGLayoutHints(kLHintsLeft, 0, 0, 10, 0)); fTB_Stop = new TGTextButton(nav_auto_frame, "Stop"); ResizeFrame(fTB_Stop); fTB_Stop->SetToolTipText("Stop processing", 200); fTB_Stop->Connect("Clicked()", "ButtonWindow", this, "Stop()"); nav_auto_frame->AddFrame(fTB_Stop, new TGLayoutHints(kLHintsLeft, 0, 0, 10, 0)); nav_main_frame->AddFrame(nav_auto_frame, new TGLayoutHints(kLHintsLeft, 5, 0, 0, 0)); AddFrame(nav_main_frame, new TGLayoutHints(kLHintsExpandX, 5, 5, 0, 5)); // window configuration Connect("CloseWindow()", "ButtonWindow", this, "Quit()"); DontCallClose(); // Map all subwindows of main frame MapSubwindows(); // Initialize the layout algorithm Resize(GetDefaultSize()); // show window MapRaised(); // move window Move(gClient->GetDisplayWidth() - GetDefaultWidth(), gClient->GetDisplayHeight() - GetDefaultHeight() - 30); }
/// /// Process a directory recursively. /// void html_a_directory(TDirectory *f, TString path, TEnv *params) { TCanvas *c_h1 = 0; if (c_h1 == 0) { int x = params->GetValue("H1.XSize", 150); int y = params->GetValue("H1.YSize", 100); c_h1 = new TCanvas ("c_h1", "1d plots", x, y); } /// /// Check how to make gif plots /// char command[512]; sprintf(command, "which pstoimg &> /dev/null"); bool UsePstoimg = ! system(command); /// /// Generate the output directory /// gSystem->MakeDirectory (path); /// /// Get the html started /// ofstream html (path + "/index.html"); html << "<html><head><title>" << f->GetName() << "</title></head>" << endl; html << "<body>" << endl; html << "<h1>" << f->GetName() << "</h1>" << endl; cout << "Processing directory " << f->GetName() << endl; /// /// Now loop over all the keys in the directory /// f->cd(); TList *objlist = f->GetListOfKeys(); objlist->Sort(); // order alphabetically, instead of order in which they were written TIterator *itr = objlist->MakeIterator(); TKey *key; while ((key = static_cast<TKey*>(itr->Next())) != 0) { TObject *obj = key->ReadObj(); if (obj->IsA()->InheritsFrom("TDirectory")) { TDirectory *d = static_cast<TDirectory*>(obj); html << "<br><a href=\"" << d->GetName() << "/\">" << d->GetName() << "</a>" << endl; html_a_directory(d, path + "/" + d->GetName(), params); } else if (obj->IsA()->InheritsFrom("TObjString")) { TObjString *s = static_cast<TObjString*>(obj); html << "<p><h2>" << key->GetName() << "</h2>" << endl; //html << "<blockquote><pre>" << static_cast<char*>(s->GetString()) // << "</pre></blockquote></p>" // << endl; html << "<blockquote><pre>" << (s->GetString()).Data() << "</pre></blockquote></p>"<< endl; } // else if (obj->IsA()->InheritsFrom("CutFlowTable")) { // CutFlowTable *c = static_cast<CutFlowTable*> (obj); // // html << "<p><h2>" << key->GetName() << "</h2>" << endl; // // CFTPrinterHTML txt (html); // f->cd(); // c->PrintGlobal (txt, "All Events", ""); // html << "</p>" << endl; // } else if (obj->IsA()->InheritsFrom("TCanvas")) { TCanvas *cnv = static_cast<TCanvas*>(obj); cnv->Draw(); cnv->SaveAs(path + "/" + key->GetName() + ".eps"); if (UsePstoimg) { sprintf(command, "pstoimg -type=gif %s &> /dev/null",(path + "/" + key->GetName() + ".eps").Data()); if (system(command) != 0) { cout<<"Could not convert to gif: "<<path + "/" + key->GetName() + ".eps"<<endl; abort(); } } else cnv->SaveAs(path + "/" + key->GetName() + ".gif"); cnv->Close(); html << "<p><a href=\"" << key->GetName() << ".eps\">"; html << "<img src=\"" << key->GetName() << ".gif\">"; html << "</a> <br> " << key->GetName() << ".gif </p>" << endl; } else if (obj->IsA()->InheritsFrom("TH1") && !(obj->IsA()->InheritsFrom("TH2"))) { TH1 *h = static_cast<TH1*> (obj); c_h1->cd(); h->Draw(); c_h1->SaveAs(path + "/" + key->GetName() + ".eps"); if (UsePstoimg) { sprintf(command, "pstoimg -type=gif %s &> /dev/null",(path + "/" + key->GetName() + ".eps").Data()); if (system(command) != 0) { cout<<"Could not convert to gif: "<<path + "/" + key->GetName() + ".eps"<<endl; abort(); } } else c_h1->SaveAs(path + "/" + key->GetName() + ".gif"); html << "<p><a href=\"" << key->GetName() << ".eps\">"; html << "<img src=\"" << key->GetName() << ".gif\">"; html << "</a> <br> " << key->GetName() << ".gif </p>" << endl; } f->cd(); } /// /// Done! /// html << "</body></html>" << endl; html.close(); }
void Draweff(){ int sth=0, Gth=0; TFile *f = TFile::Open(outG); if(sth==0){TString dirname = "std";} else if(sth==1){TString dirname ="Gri055";} else {TString dirname ="Gri101";} gStyle->SetErrorX(0); TString name; TObjString* dataname = (TObjString*)f->Get(Form("dataname")); TObjString* histoname = (TObjString*)f->Get(Form("histoname")); if(Gth==0) name = "G0"; else if(Gth<nGlau) name = Form("Glau_%d",Gth); else name = Form("bin_%d",Gth-nGlau+1); TObjString* Glaubername = (TObjString*)f->Get(Form("%s/%s/Glaubername",dirname.Data(),name.Data())); TVectorD* xmin = (TVectorD*)f->Get(Form("%s/%s/xmin",dirname.Data(),name.Data())); TVectorD* xmax = (TVectorD*)f->Get(Form("%s/%s/xmax",dirname.Data(),name.Data())); TVectorD* mubest = (TVectorD*)f->Get(Form("%s/%s/mubest",dirname.Data(),name.Data())); TVectorD* kbest = (TVectorD*)f->Get(Form("%s/%s/kbest",dirname.Data(),name.Data())); TVectorD* Ndf = (TVectorD*)f->Get(Form("%s/%s/Ndf",dirname.Data(),name.Data())); TVectorD* chis = (TVectorD*)f->Get(Form("%s/%s/chis",dirname.Data(),name.Data())); TVectorD *kpoint = (TVectorD*)f->Get(Form("%s/%s/kpoint",dirname.Data(),name.Data())); TFile *fdata = TFile::Open(dataname->GetString()); TH1D *histo_obs = (TH1D*)fdata->Get(histoname->GetString()); histo_obs->Sumw2(); TFile *fGlauber = TFile::Open(Glaubername->GetString()); int binnum = histo_obs->GetNbinsX(); double Minx = histo_obs->GetXaxis()->GetXmin(); double Maxx = histo_obs->GetXaxis()->GetXmax(); double binsize = (Double_t)(Maxx-Minx)/binnum; int xbinmin=(int)(((*xmin)[0]-Minx)/binsize); int xbinmax=(int)(((*xmax)[0]-Minx)/binsize); TH1D *histo_exp = new TH1D("histo_exp","Simulated distribution;Multiplicity;Event Fraction",binnum,Minx,Maxx); histo_exp->Sumw2(); Int_t ibin; TH1D *histo_obs_norm = (TH1D*)histo_obs->Clone(); histo_obs_norm->Scale(1/histo_obs->Integral(xbinmin,xbinmax)); TF1 *NBD_fun = new TF1("NBD_fun","[0]*TMath::Gamma(x+[1])/(TMath::Gamma(x+1)*TMath::Gamma([1]))*TMath::Power([2]/[1],x)/TMath::Power([2]/[1]+1,x+[1])",0,100); NBD_fun->SetParameter(0,1); //[0]: Normalized constant NBD_fun->SetParameter(1,(*kbest)[0]); //[1]: k value NBD_fun->SetParameter(2,(*mubest)[0]); //[2]: mu value TTree *t = (TTree*) fGlauber->Get("nt_Pb_Pb"); Long_t Nevent; Nevent = (Long_t) t->GetEntries(); Long_t Ev; Int_t Bino; Double_t Para, Bi_Para, Mult; Float_t Ncoll; t->SetBranchAddress("Ncoll",&Ncoll); for(Ev=0; Ev<Nevent; Ev++){ if(Ev%100000==0) cout<<"Have run "<<Ev<<" events"<<endl; t->GetEntry(Ev); Para = 0; //make sure that Para doesn't accumulate through loops for(Bino=0; Bino<Ncoll; Bino++){ Bi_Para = NBD_fun->GetRandom(); Para += Bi_Para; } histo_exp->Fill(Para); } Double_t SumEvent, scale; SumEvent = histo_exp->Integral(xbinmin,xbinmax); scale = 1/SumEvent; TH1D *histo_exp_norm = (TH1D*) histo_exp->Clone(); histo_exp_norm->Scale(scale); TCanvas *c1 = new TCanvas(); gStyle->SetOptStat(kFALSE); double hfbin[]={0,1,2,3,4,6,8,10,13,16,20,25,30,40,55,70,90}; int nhfbin = 16; rehisto_obs_norm = (TH1D*)histo_obs_norm->Rebin(nhfbin,"rehisto_obs_norm",hfbin); normalizeByBinWidth(rehisto_obs_norm); rehisto_exp_norm = (TH1D*)histo_exp_norm->Rebin(nhfbin,"rehisto_exp_norm",hfbin); normalizeByBinWidth(rehisto_exp_norm); TH1D* ratio = (TH1D*)rehisto_obs_norm->Clone("ratio"); ratio->Divide(rehisto_exp_norm); ratio->SetMaximum(1.2); ratio->SetMinimum(0); ratio->GetXaxis()->SetTitle("HF #Sigma E_{T}"); ratio->GetYaxis()->SetTitle("ratio"); /*
void scanDirectory(const char *dirname) { TDirectoryIter iter(dirname); const char *filename = 0; TString ent; TString file; TString html; html.Form(gPreamble,dirname,dirname); TList dirList; TList fileList; while( (filename=iter.Next()) ) { if (filename[0]!='.') { ent.Form("%s/%s", dirname, filename); FileStat_t st; gSystem->GetPathInfo(ent.Data(), st); if (R_ISDIR(st.fMode)) { //fprintf(stderr,"Seeing directory %s\n",ent.Data()); scanDirectory(ent.Data()); dirList.Add(new TObjString(filename)); } else { size_t len = strlen(filename); if (len > 8 && strncmp(filename,"pt_",3)==0 && strncmp(filename+len-5,".root",5)==0) { //fprintf(stderr,"Seeing file %s\n",ent.Data()); file = filename; file[len-5]='\0'; fileList.Add(new TObjString(file)); } } } } dirList.Sort(); fileList.Sort(); TIter next(&dirList); TObjString *obj; html += "<table width=\"500\">\n"; html += gLine; html += gParentDir; while ( (obj = (TObjString*)next()) ) { html += TString::Format(gDirFmt,obj->GetName(),obj->GetName()); } html += gLine; if (!fileList.IsEmpty()) { next = &fileList; while ( (obj = (TObjString*)next()) ) { html += "<tr>"; html += TString::Format(gFiles,obj->GetName(),obj->GetName(),obj->GetName(),obj->GetName()); obj = (TObjString*)next(); if (obj) { html += TString::Format(gFiles,obj->GetName(),obj->GetName(),obj->GetName(),obj->GetName()); } else { html += "<td></td></tr>"; break; } } html += gLine; } html += "</table>\n"; dirList.Delete(); fileList.Delete(); html += "</body>\n"; html += "</html>\n"; ent.Form("%s/pt_index.html",dirname); FILE *output = fopen(ent.Data(),"w"); fprintf(output,"%s",html.Data()); fclose(output); }
void TriggerInputsForMuonEventCuts ( TString runListFilename, TString selectedInputs="", TString defaultStorage = "raw://" ) { AliCDBManager::Instance()->SetDefaultStorage(defaultStorage.Data()); TObjArray inputsList; inputsList.SetOwner(); TObjArray* selectedInputsList = selectedInputs.Tokenize(","); // Read input run list ifstream inFile(runListFilename.Data()); TString srun = ""; if ( inFile.is_open() ) { while ( ! inFile.eof() ) { srun.ReadLine(inFile,kFALSE); if ( ! srun.IsDigit() ) continue; // For each run, read trigger inputs from OCDB Int_t runNumber = srun.Atoi(); AliCDBManager::Instance()->SetRun(runNumber); // Get trigger class configuration AliCDBEntry* entry = AliCDBManager::Instance()->Get("GRP/CTP/Config"); if ( ! entry ) continue; THashList* runInputs = new THashList(); runInputs->SetOwner(); runInputs->SetUniqueID((UInt_t)runNumber); AliTriggerConfiguration* trigConf = (AliTriggerConfiguration*)entry->GetObject(); const TObjArray& trigInputsArray = trigConf->GetInputs(); AliTriggerInput* trigInput = 0x0; TIter next(&trigInputsArray); while ( ( trigInput = static_cast<AliTriggerInput*>(next()) ) ) { if ( selectedInputsList->GetEntriesFast() > 0 && ! selectedInputsList->FindObject(trigInput->GetName()) ) continue; Int_t inputId = (Int_t)TMath::Log2(trigInput->GetMask()); TObjString* currInput = new TObjString(trigInput->GetName()); currInput->SetUniqueID(inputId); runInputs->Add(currInput); } inputsList.Add(runInputs); } inFile.close(); } delete selectedInputsList; // Loop on the trigger inputs // and group runs with an equal list of inputs Int_t nentries = inputsList.GetEntries(); TArrayI checkMask(nentries); checkMask.Reset(1); for ( Int_t irun=0; irun<nentries; irun++ ) { if ( checkMask[irun] == 0 ) continue; THashList* currList = static_cast<THashList*>(inputsList.At(irun)); TString runRange = Form("Run range: %u", currList->GetUniqueID()); for ( Int_t jrun=irun+1; jrun<nentries; jrun++ ) { if ( checkMask[jrun] == 0 ) continue; THashList* checkList = static_cast<THashList*>(inputsList.At(jrun)); Bool_t isDifferent = kFALSE; for ( Int_t itrig=0; itrig<currList->GetEntries(); itrig++ ) { TObjString* currInput = static_cast<TObjString*>(currList->At(itrig)); TObject* checkInput = checkList->FindObject(currInput->GetName()); if ( ! checkInput || checkInput->GetUniqueID() != currInput->GetUniqueID() ) { isDifferent = kTRUE; break; } } // loop on trigger inputs if ( isDifferent ) continue; checkMask[jrun] = 0; runRange += Form(",%u", checkList->GetUniqueID()); } // loop on runs TString outString = "\nSetTrigInputsMap(\""; for ( Int_t itrig=0; itrig<currList->GetEntries(); itrig++ ) { TObjString* currInput = static_cast<TObjString*>(currList->At(itrig)); outString += Form("%s:%u,",currInput->GetString().Data(), currInput->GetUniqueID()); } outString.Append("\");\n"); outString.ReplaceAll(",\"","\""); outString += runRange; printf("%s\n", outString.Data()); } // loop on runs }
void MakeSnapshot(Int_t run, const char* defStorage, TMap* specStorages, const char* snapshotFileName) { AliCDBManager *cdb = AliCDBManager::Instance(); cdb->SetDefaultStorage(defStorage); cdb->SetRun(run); TIter iter(specStorages->GetTable()); TPair *pair = 0; while((pair = dynamic_cast<TPair*> (iter.Next()))){ TObjString* caltype = dynamic_cast<TObjString*> (pair->Key()); TObjString* specstor= dynamic_cast<TObjString*> (pair->Value()); if (caltype && specstor) //TString calType = caltype->GetString(); //TString specStor = specstor->GetString(); //cdb->SetSpecificStorage(calType.Data(),specStor.Data()); cdb->SetSpecificStorage(caltype->GetString().Data(),specstor->GetString().Data()); else //AliFatal("Error reading info for specific storage") Printf("Error reading info for specific storage"); } // ********************************** GRP ****************************************** cdb->Get("GRP/CTP/Config"); cdb->Get("GRP/Calib/LHCClockPhase"); cdb->Get("GRP/GRP/Data"); cdb->Get("GRP/Align/Data"); cdb->Get("GRP/Calib/MeanVertexSPD"); cdb->Get("GRP/Calib/MeanVertex"); cdb->Get("GRP/Calib/MeanVertexTPC"); cdb->Get("GRP/Calib/CosmicTriggers"); cdb->Get("GRP/CTP/Scalers"); cdb->Get("GRP/CTP/CTPtiming"); cdb->Get("GRP/CTP/TimeAlign"); cdb->Get("GRP/GRP/LHCData"); cdb->Get("GRP/Calib/RecoParam"); // ********************************** ALL ****************************************** TString detStr = ("ITS TPC TRD TOF PHOS HMPID EMCAL MUON ZDC PMD T0 VZERO"); //TString detStr = ("ITS MUON TPC"); TObjArray *arr = detStr.Tokenize(' '); for (Int_t iDet=0; iDet<arr->GetEntries(); iDet++) { TObjString *detOStr = dynamic_cast<TObjString*>(arr->At(iDet)); AliCDBManager::Instance()->GetAll(Form("%s/Calib/*",detOStr->GetString().Data())); AliCDBManager::Instance()->Get(Form("%s/Align/Data",detOStr->GetString().Data())); } // ******************************** TRIGGER **************************************** // Temporary fix - one has to define the correct policy in order // to load the trigger OCDB entries only for the detectors that // in the trigger or that are needed in order to put correct // information in ESD AliCDBManager::Instance()->GetAll("TRIGGER/*/*"); // ********************************** HLT ****************************************** // cdb->Get("HLT/ConfigHLT/esdLayout"); // cdb->Get("HLT/Calib/StreamerInfo"); TMap* entriesMap = const_cast<TMap*>(cdb->GetEntryCache()); Printf("\nentriesMap has %d entries!\n", entriesMap->GetEntries()); TList* entriesList = const_cast<TList*>(cdb->GetRetrievedIds()); Printf("\nentriesList has %d entries!\n", entriesList->GetEntries()); //TString filename(TString::Format("CDBsnapshot_Run%d.root",run)); TString filename(snapshotFileName); TFile *f = new TFile(filename.Data(),"recreate"); f->cd(); f->WriteObject(entriesMap,"entriesMap"); f->WriteObject(entriesList,"entriesList"); f->Close(); entriesMap->SetOwnerKeyValue(kFALSE,kFALSE); entriesList->SetOwner(kFALSE); }
void build(const char *filename,const char *lib = 0, const char *obj = 0) { if (obj!=0 && strlen(obj) ) { TString s = gSystem->GetMakeSharedLib(); TString r(" $ObjectFiles "); r.Append(obj); s.ReplaceAll(" $ObjectFiles",r); //gDebug = 5; gSystem->SetMakeSharedLib(s); } if (lib && strlen(lib)) { TString liblist(lib); TObjArray *libs = liblist.Tokenize(" "); TIter iter(libs); TObjString *objstr; TString s = gSystem->GetMakeSharedLib(); TString what("..nothing.."); if (s.Contains("$DepLibs")) { what = " $DepLibs"; } else { what = " $LinkedLibs"; } TString libstolink(" "); while ( (objstr=(TObjString*)iter.Next()) ) { gSystem->Load(objstr->String()); TString libfile( gSystem->GetLibraries(objstr->String(),"DSL",kFALSE)); libstolink.Append(libfile); libstolink.Append(" "); } libstolink.Append(what); s.ReplaceAll(what,libstolink); gSystem->SetMakeSharedLib(s); } #ifdef __CLING__ TString r; #ifdef ClingWorkAroundCallfuncAndInline r.Append(" -DClingWorkAroundCallfuncAndInline "); #endif #ifdef ClingWorkAroundCallfuncAndVirtual r.Append(" -DClingWorkAroundCallfuncAndVirtual "); #endif #ifdef ClingWorkAroundJITandInline r.Append(" -DClingWorkAroundJITandInline "); #endif #ifdef ClingWorkAroundCallfuncAndReturnByValue r.Append(" -DClingWorkAroundCallfuncAndReturnByValue "); #endif #ifdef ClingWorkAroundNoPrivateClassIO r.Append(" -DClingWorkAroundNoPrivateClassIO "); #endif if (r.Length()) { r.Append(" $IncludePath"); TString s = gSystem->GetMakeSharedLib(); s.ReplaceAll(" $IncludePath",r); gSystem->SetMakeSharedLib(s); } #endif #if defined(_WIN32) && !defined(__CYGWIN__) TString fname(filename); if (filename[0]=='/') { // full path name we need to turn it into a windows path fname = gSystem->GetFromPipe(TString::Format("cygpath -w %s",filename)); } fprintf(stderr,"from %s to %s\n",filename,fname.Data()); int result = gSystem->CompileMacro(fname,"kc"); #else int result = gSystem->CompileMacro(filename,"kc"); #endif if (!result) gApplication->Terminate(1); }
int main(int argc, char* argv[]){ if (argc != 6){ std::cout << "wrong number of arguments: usage ./topDataClass_doAnalysis <file name> <mod #> <row #> <col #> <ch #>" << std::endl; return 0; } //define application object theApp = new TApplication("App", &argc, argv); TString inputFileName = theApp->Argv()[1]; std::cout << "Input file name " << inputFileName << std::endl; //create target6 interface object topDataClass *data = new topDataClass(); //specify channel of interest int inMod = atoi(theApp->Argv()[2]); int inRow = atoi(theApp->Argv()[3]); int inCol = atoi(theApp->Argv()[4]); int inCh = atoi(theApp->Argv()[5]); data->setAnalysisChannel( inMod, inRow, inCol, inCh ); //specify timing marker channel data->setTimingMarkerChannel( 0, 0, 1, 2 ); //specify time window data->windowTime = windowTime; //open summary tree file data->openSummaryTree(inputFileName); //create output file TObjArray* strings = inputFileName.Tokenize("/"); TObjString* objstring = (TObjString*) strings->At(strings->GetLast()); TString inputFileNameBase(objstring->GetString()); TString outputFileName = "output_topDataClass_doAnalysis_doublePulseSampleDTFit_"; outputFileName += inMod; outputFileName += "_"; outputFileName += inRow; outputFileName += "_"; outputFileName += inCol; outputFileName += "_"; outputFileName += inCh; outputFileName += "_"; outputFileName += inputFileNameBase; //outputFileName += ".root"; std::cout << " outputFileName " << outputFileName << std::endl; outputFile = new TFile( outputFileName , "RECREATE"); //initialize histograms initializeGlobalHistograms(); //initialize tree branches data->setTreeBranches(); //load pulse info into arrays data->selectPulsesForArray(); //loop over selected events, apply corrections histogram pulse time distributions //monitor pulse time vs event # gPulseTimeVsEventNum = new TGraphErrors(); for(int entry = 0; entry < data->numUsed; entry++) { //skip events not in arrays if( entry >= maxNumEvt ) continue; double pulseTime = data->measurePulseTimeArrayEntry(entry,1); double pulseHeight = data->adc_0_A[entry]; int smpBinNumIn128Array = data->getSmpBinNumIn128Array(entry); double smpPos = data->getSmpPos(entry); //apply analysis cuts //if( smpBinNumIn128Array >= 127 ) // continue; //measure pulse time vs event # gPulseTimeVsEventNum->SetPoint( gPulseTimeVsEventNum->GetN() , data->eventNum_A[entry], pulseTime ); //cut on event # if( data->eventNum_A[entry] < 0 ) continue; //histogram selected pulse distributions hPulseHeightInitial->Fill( data->adc_0_A[entry] ); hPulseTimeInitial->Fill( pulseTime ); hPulseSampleInitial->Fill(data->smp_0_A[entry]); hPulseSmp128Initial->Fill(smpBinNumIn128Array); hPulseTimeVsSmp128Initial->Fill(smpBinNumIn128Array, pulseTime ); hPulseTimeVsSmp128PosInitial->Fill(smpBinNumIn128Array + smpPos, pulseTime ); hPulseTimeVsHeightInitial->Fill(pulseHeight, pulseTime ); hPulseTimeVsFTSWInitial->Fill(data->ftsw_A[entry], pulseTime ); hFTSWVsSmp128Initial->Fill(smpBinNumIn128Array, data->ftsw_A[entry]); } //loop over selected events, measure time difference between timing marker and selected channel pulses gTest = new TGraphErrors(); data->measurePulseMarkerTimeDifferenceDistribution(hPulseTimeMarkTimeDiffInitial,hPulseTimeMarkTimeDiffVsMarkSmpBinNumInitial ); //run fit analysis doDoublePulseFit(data); //write output file writeOutputFile(); //delete target6 data object delete data; return 1; }
const char* m0_vs_m12_nofloat(const char* textfile = 0, TH2D* inputHist = 0, const char* rootfile = "m0m12_nofloat.root", TString id1="m0",TString id2="m12", int nbinsX=21,int nbinsY=17, float minX=20,float maxX=860, float minY=92.5, float maxY=347.5) { // set combination style and remove existing canvas' CombinationGlob::Initialize(); initialize(); // get the harvested tree TTree* tree = harvesttree( textfile!=0 ? textfile : 0 ); if (tree==0) { cout << "Cannot open list file. Exit." << endl; return ""; } // store histograms to output file const char* outfile(0); if (textfile!=0) { TObjArray* arr = TString(textfile).Tokenize("/"); TObjString* objstring = (TObjString*)arr->At( arr->GetEntries()-1 ); outfile = Form("%s%s",objstring->GetString().Data(),".root"); delete arr; } else { outfile = rootfile; } cout << "Histograms being written to : " << outfile << endl; TFile* output = TFile::Open(outfile,"RECREATE"); output->cd(); TH2D* hist = nullptr; if (inputHist!=NULL){ TH2D *clonehclPmin2=(TH2D*)inputHist->Clone(); hist = DrawUtil::triwsmooth( tree, "p1:m12:m0", "hclPmin2" , "Observed CLsplusb", "p1>=0 && p1<=1", clonehclPmin2 );} else{ hist = DrawUtil::triwsmooth( tree, "p1:m12:m0", "hclPmin2" , "Observed CLsplusb", "p1>=0 && p1<=1", inputHist);} if (hist!=0) { hist->Smooth(); hist->Write(); delete hist; hist=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* sigp1 = nullptr; if (inputHist!=NULL){ TH2D *clonesigp1=(TH2D*)inputHist->Clone(); sigp1 = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(p1):m12:m0", "sigp1" , "One-sided significance of CLsplusb", "(p1>0 && p1<=1)", clonesigp1 );} else{ sigp1 = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(p1):m12:m0", "sigp1" , "One-sided significance of CLsplusb", "(p1>0 && p1<=1)", inputHist );} if (sigp1!=0) { sigp1->Smooth(); sigp1->Write(); delete sigp1; sigp1=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// TH2D* p0exp = nullptr; if (inputHist!=NULL){ TH2D *clonep0exp=(TH2D*)inputHist->Clone(); p0exp = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(p0exp):m12:m0", "p0exp" , "One-sided significance p0 (exp)", "(p0exp>0 && p0exp<=1)", clonep0exp );} else{ p0exp = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(p0exp):m12:m0", "p0exp" , "One-sided significance p0 (exp)", "(p0exp>0 && p0exp<=1)", inputHist );} if (p0exp!=0) { p0exp->Smooth(); p0exp->Write(); delete p0exp; p0exp=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* p0expraw = nullptr; if (inputHist!=NULL){ TH2D *clonep0expraw=(TH2D*)inputHist->Clone(); p0expraw = DrawUtil::triwsmooth( tree, "p0exp:m12:m0", "p0expraw" , "One-sided significance p0 (exp)", "(p0exp>0 && p0exp<=1)", clonep0expraw );} else{ p0expraw = DrawUtil::triwsmooth( tree, "p0exp:m12:m0", "p0expraw" , "One-sided significance p0 (exp)", "(p0exp>0 && p0exp<=1)", inputHist );} if (p0expraw!=0) { p0expraw->Smooth(); p0expraw->Write(); delete p0expraw; p0expraw=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } // cls:clsexp:clsu1s:clsd1 TH2D* p1clsf = nullptr; if (inputHist!=NULL){ TH2D *clonep1clsf=(TH2D*)inputHist->Clone(); p1clsf = DrawUtil::triwsmooth( tree, "CLs:m12:m0", "sigp1clsf" , "Observed CLs", "p1>0 && p1<=1", clonep1clsf );} else{ p1clsf = DrawUtil::triwsmooth( tree, "CLs:m12:m0", "sigp1clsf" , "Observed CLs", "p1>0 && p1<=1", inputHist ); } if (p1clsf!=0) { p1clsf->Smooth(); p1clsf->Write(); delete p1clsf; p1clsf=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* sigp1clsf = nullptr; if (inputHist!=NULL){ TH2D *clonesigp1clsf=(TH2D*)inputHist->Clone(); sigp1clsf = DrawUtil::triwsmooth( tree, "StatTools::GetSigma( CLs ):m12:m0", "sigp1clsf" , "One-sided significance of observed CLs", "p1>0 && p1<=1",clonesigp1clsf );} else{ sigp1clsf = DrawUtil::triwsmooth( tree, "StatTools::GetSigma( CLs ):m12:m0", "sigp1clsf" , "One-sided significance of observed CLs", "p1>0 && p1<=1", inputHist );} if (sigp1clsf!=0) { sigp1clsf->Smooth(); sigp1clsf->Write(); delete sigp1clsf; sigp1clsf=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* sigp1expclsf = nullptr; if (inputHist!=NULL){ TH2D *clonesigp1expclsf=(TH2D*)inputHist->Clone(); sigp1expclsf = DrawUtil::triwsmooth( tree, "StatTools::GetSigma( CLsexp ):m12:m0", "sigp1expclsf" , "One-sided significance of expected CLs", "p1>0 && p1<=1", clonesigp1expclsf );} else{ sigp1expclsf = DrawUtil::triwsmooth( tree, "StatTools::GetSigma( CLsexp ):m12:m0", "sigp1expclsf" , "One-sided significance of expected CLs", "p1>0 && p1<=1", inputHist );} if (sigp1expclsf!=0) { sigp1expclsf->Smooth(); sigp1expclsf->Write(); delete sigp1expclsf; sigp1expclsf=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* sigclsu1s = nullptr; if (inputHist!=NULL){ TH2D *clonesigclsu1s=(TH2D*)inputHist->Clone(); sigclsu1s = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(clsu1s):m12:m0", "sigclsu1s" , "One-sided significance of expected CLs (+1 sigma)", "clsu1s>0", clonesigclsu1s );} else{ sigclsu1s = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(clsu1s):m12:m0", "sigclsu1s" , "One-sided significance of expected CLs (+1 sigma)", "clsu1s>0", inputHist );} if (sigclsu1s!=0) { sigclsu1s->Smooth(); sigclsu1s->Write(); delete sigclsu1s; sigclsu1s=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* sigclsd1s = nullptr; if (inputHist!=NULL){ TH2D *clonesigclsd1s=(TH2D*)inputHist->Clone(); sigclsd1s = DrawUtil::triwsmooth( tree , "StatTools::GetSigma(clsd1s):m12:m0", "sigclsd1s" , "One-sided significance of expected CLs (-1 sigma)", "clsd1s>0",clonesigclsd1s );} else{ sigclsd1s = DrawUtil::triwsmooth( tree, "StatTools::GetSigma(clsd1s):m12:m0", "sigclsd1s" , "One-sided significance of expected CLs (-1 sigma)", "clsd1s>0", inputHist );} if (sigclsd1s!=0) { sigclsd1s->Smooth(); sigclsd1s->Write(); delete sigclsd1s; sigclsd1s=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } ///////////////////////////////////////////////////// upper limit * cross section plots TH2D* UpperLimit = nullptr; if (inputHist!=NULL){ TH2D *cloneupperlimit=(TH2D*)inputHist->Clone(); UpperLimit = DrawUtil::triwsmooth( tree, "upperLimit:m12:m0", "upperLimit" , "upperlimit","1", cloneupperlimit);} else{ UpperLimit = DrawUtil::triwsmooth( tree, "upperLimit:m12:m0", "upperLimit" , "upperlimit","1", inputHist);} if (UpperLimit!=0) { UpperLimit->Smooth(); UpperLimit->Write(); delete UpperLimit; UpperLimit=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* ExpectedUpperLimit = nullptr; if (inputHist!=NULL){ TH2D *cloneexpectedUpperlimit=(TH2D*)inputHist->Clone(); ExpectedUpperLimit = DrawUtil::triwsmooth( tree, "expectedUpperLimit:m12:m0", "expectedUpperLimit" , "expectedUpperlimit","1", cloneexpectedUpperlimit);} else{ ExpectedUpperLimit = DrawUtil::triwsmooth( tree, "expectedUpperLimit:m12:m0", "expectedUpperLimit" , "expectedUpperlimit","1", inputHist);} if (ExpectedUpperLimit!=0) { ExpectedUpperLimit->Smooth(); ExpectedUpperLimit->Write(); delete ExpectedUpperLimit; ExpectedUpperLimit=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* xsec = nullptr; if (inputHist!=NULL){ TH2D *clonexsec=(TH2D*)inputHist->Clone(); xsec = DrawUtil::triwsmooth( tree, "xsec:m12:m0", "xsec" , "xsec","1", clonexsec);} else{ xsec = DrawUtil::triwsmooth( tree, "xsec:m12:m0", "xsec" , "xsec","1", inputHist);} if (xsec!=0) { xsec->Smooth(); xsec->Write(); delete xsec; xsec=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } TH2D* excludedXsec = nullptr; if (inputHist!=NULL){ TH2D *cloneexcludedXsec=(TH2D*)inputHist->Clone(); excludedXsec = DrawUtil::triwsmooth( tree, "excludedXsec:m12:m0", "excludedXsec" , "excludedXsec","1", cloneexcludedXsec);} else{ excludedXsec = DrawUtil::triwsmooth( tree, "excludedXsec:m12:m0", "excludedXsec" , "excludedXsec","1", inputHist);} if (excludedXsec!=0) { excludedXsec->Smooth(); excludedXsec->Write(); delete excludedXsec; excludedXsec=0; } else { cout << "Cannot make smoothed significance histogram. Exit." << endl; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// output->Close(); //if (output!=0) { delete output; output=0; } cout << "Done." << endl; return outfile; }
void plotLimit(TString outputDir="./", TString inputs="", TString inputXSec="", bool strengthLimit=true, bool blind=false, double energy=7, double luminosity=5.035, TString legendName="ee and #mu#mu channels") { //style options gStyle->SetCanvasBorderMode(0); gStyle->SetCanvasColor(kWhite); gStyle->SetFrameBorderMode(0); gStyle->SetFrameBorderSize(1); gStyle->SetFrameFillColor(0); gStyle->SetFrameFillStyle(0); gStyle->SetFrameLineColor(1); gStyle->SetFrameLineStyle(1); gStyle->SetFrameLineWidth(1); gStyle->SetOptFit(1); gStyle->SetFitFormat("5.4g"); gStyle->SetFuncColor(2); gStyle->SetOptFile(0); //gStyle->SetOptStat(0); gStyle->SetOptStat("mr"); gStyle->SetStatColor(kWhite); gStyle->SetStatFont(42); gStyle->SetStatFontSize(0.04); gStyle->SetStatTextColor(1); gStyle->SetStatFormat("6.4g"); gStyle->SetStatBorderSize(1); gStyle->SetStatH(0.1); gStyle->SetStatW(0.2); gStyle->SetPadTopMargin(0.05); gStyle->SetPadBottomMargin(0.13); gStyle->SetPadLeftMargin(0.16); gStyle->SetPadRightMargin(0.02); gStyle->SetOptTitle(0); gStyle->SetTitleFont(42); gStyle->SetTitleColor(1); gStyle->SetTitleTextColor(1); gStyle->SetTitleFillColor(10); gStyle->SetTitleFontSize(0.05); gStyle->SetAxisColor(1, "XYZ"); gStyle->SetStripDecimals(kTRUE); gStyle->SetTickLength(0.03, "XYZ"); gStyle->SetNdivisions(510, "XYZ"); gStyle->SetPadTickX(1); // To get tick marks on the opposite side of the frame gStyle->SetPadTickY(1); gStyle->SetEndErrorSize(2); gStyle->SetErrorX(0.); gStyle->SetMarkerStyle(20); gROOT->ForceStyle(); gStyle->SetPadTopMargin (0.06); gStyle->SetPadBottomMargin(0.12); gStyle->SetPadRightMargin (0.06); gStyle->SetPadLeftMargin (0.14); gStyle->SetTitleSize(0.04, "XYZ"); gStyle->SetTitleXOffset(1.1); gStyle->SetTitleYOffset(1.45); gStyle->SetPalette(1); gStyle->SetNdivisions(505); string suffix = string(outputDir.Data()); double cprime=1.0; double brnew=0.0; double XSecScaleFactor = 1.0; if(suffix.find("_cp")!=string::npos){ sscanf(suffix.c_str()+suffix.find("_cp"), "_cp%lf_brn%lf", &cprime, &brnew); XSecScaleFactor = pow(cprime,2) * (1-brnew); } //get xsec * br from summary file getXSecXBR(inputXSec); //get the limits from the tree TFile* file = TFile::Open(inputs); printf("Looping on %s\n",inputs.Data()); if(!file) return; if(file->IsZombie()) return; TTree* tree = (TTree*)file->Get("limit"); tree->GetBranch("mh" )->SetAddress(&Tmh ); tree->GetBranch("limit" )->SetAddress(&Tlimit ); tree->GetBranch("limitErr" )->SetAddress(&TlimitErr); tree->GetBranch("quantileExpected")->SetAddress(&TquantExp); int N = tree->GetEntriesFast() / 6 ;// 6Limits per mass point (observed, meand , +-1sigma, +-2sigma) double* MassAxis = new double[N]; double* ObsLimit = new double[N]; fillLimitArray(tree,-1 ,ObsLimit,MassAxis); if(!strengthLimit)scaleLimitsByXSecXBR(N, MassAxis, ObsLimit); double* ExpLimitm2 = new double[N]; fillLimitArray(tree,0.025,ExpLimitm2); if(!strengthLimit)scaleLimitsByXSecXBR(N, MassAxis, ExpLimitm2); double* ExpLimitm1 = new double[N]; fillLimitArray(tree,0.160,ExpLimitm1); if(!strengthLimit)scaleLimitsByXSecXBR(N, MassAxis, ExpLimitm1); double* ExpLimit = new double[N]; fillLimitArray(tree,0.500,ExpLimit ); if(!strengthLimit)scaleLimitsByXSecXBR(N, MassAxis, ExpLimit); double* ExpLimitp1 = new double[N]; fillLimitArray(tree,0.840,ExpLimitp1); if(!strengthLimit)scaleLimitsByXSecXBR(N, MassAxis, ExpLimitp1); double* ExpLimitp2 = new double[N]; fillLimitArray(tree,0.975,ExpLimitp2); if(!strengthLimit)scaleLimitsByXSecXBR(N, MassAxis, ExpLimitp2); file->Close(); //make TH Cross-sections double* ThXSec = new double[N]; for(unsigned int i=0;i<N;i++){ThXSec[i] = xsecXbr[MassAxis[i]];} //scale TH cross-section and limits according to scale factor //this only apply to NarrowResonnance case for(unsigned int i=0;i<N;i++){ if(strengthLimit){ ObsLimit[i] /= XSecScaleFactor; ExpLimitm2[i]/= XSecScaleFactor; ExpLimitm1[i]/= XSecScaleFactor; ExpLimit [i]/= XSecScaleFactor; ExpLimitp1[i]/= XSecScaleFactor; ExpLimitp2[i]/= XSecScaleFactor; } ThXSec[i] *= XSecScaleFactor; } //limits in terms of signal strength TCanvas* c = new TCanvas("c", "c",600,600); TH1F* framework = new TH1F("Graph","Graph",1,15,70); framework->SetStats(false); framework->SetTitle(""); framework->GetXaxis()->SetTitle("A boson mass [GeV]"); framework->GetYaxis()->SetTitleOffset(1.70); if(strengthLimit){ framework->GetYaxis()->SetTitle("#mu = #sigma_{95%} / #sigma_{th}"); framework->GetYaxis()->SetRangeUser(5E-2,5E1); c->SetLogy(true); }else{ framework->GetYaxis()->SetTitle("#sigma_{95%} (fb)"); framework->GetYaxis()->SetRangeUser(1E-1,1E3); c->SetLogy(true); } framework->Draw(); TGraph* TGObsLimit = new TGraph(N,MassAxis,ObsLimit); TGObsLimit->SetLineWidth(2); TGraph* TGExpLimit = new TGraph(N,MassAxis,ExpLimit); TGExpLimit->SetLineWidth(2); TGExpLimit->SetLineStyle(2); TCutG* TGExpLimit1S = GetErrorBand("1S", N, MassAxis, ExpLimitm1, ExpLimitp1); TCutG* TGExpLimit2S = GetErrorBand("2S", N, MassAxis, ExpLimitm2, ExpLimitp2); TGExpLimit2S->SetFillColor(5); TGraph* THXSec = new TGraph(N,MassAxis,ThXSec); THXSec->SetLineWidth(2); THXSec->SetLineStyle(1); THXSec->SetLineColor(4); TGExpLimit->SetLineColor(2); TGExpLimit->SetLineStyle(1); TGObsLimit->SetLineWidth(2); TGObsLimit->SetMarkerStyle(20); TGExpLimit2S->Draw("fc same"); TGExpLimit1S->Draw("fc same"); if(!blind) TGObsLimit->Draw("same CP"); TGExpLimit->Draw("same C"); if(strengthLimit){ TLine* SMLine = new TLine(framework->GetXaxis()->GetXmin(),1.0,framework->GetXaxis()->GetXmax(),1.0); SMLine->SetLineWidth(2); SMLine->SetLineStyle(1); SMLine->SetLineColor(4); SMLine->Draw("same C"); }else{ THXSec->Draw("same C"); } TPaveText *pave = new TPaveText(0.1,0.96,0.99,0.99,"NDC"); char LumiLabel[1024]; if(energy<9){ sprintf(LumiLabel,"CMS preliminary, #sqrt{s}=%.0f TeV, #scale[0.5]{#int} L=%6.1ffb^{-1} - %20s",energy, luminosity,legendName.Data()); }else{ sprintf(LumiLabel,"CMS preliminary, #sqrt{s}=%.0f TeV #scale[0.5]{#int} L=%6.1ffb^{-1}, #sqrt{s}=%.0f TeV #scale[0.5]{#int} L=%6.1ffb^{-1}",7.0,5.0,8.0,19.7); } pave->SetBorderSize(0); pave->SetFillStyle(0); pave->SetTextFont(42); TObjArray* tokens = (TString(LumiLabel)).Tokenize("\\\\"); int nt = tokens->GetEntries(); for(int it=0; it<nt; ++it){ TObjString * t = (TObjString *)tokens->At(it); pave->AddText(t->GetString()); } pave->Draw("same"); TLegend* LEG = new TLegend(0.55,0.75,0.85,0.95); LEG->SetHeader("Signal XSec x 1000"); LEG->SetFillColor(0); LEG->SetFillStyle(0); LEG->SetTextFont(42); LEG->SetBorderSize(0); LEG->AddEntry(THXSec , "TH prediction" ,"L"); LEG->AddEntry(TGExpLimit , "median expected" ,"L"); LEG->AddEntry(TGExpLimit1S , "expected #pm 1#sigma" ,"F"); LEG->AddEntry(TGExpLimit2S , "expected #pm 2#sigma" ,"F"); if(!blind) LEG->AddEntry(TGObsLimit , "observed" ,"LP"); LEG->Draw(); c->RedrawAxis(); c->SaveAs(outputDir+"Limit.png"); c->SaveAs(outputDir+"Limit.C"); c->SaveAs(outputDir+"Limit.pdf"); //save a summary of the limits FILE* pFileSum = fopen((outputDir+"LimitSummary").Data(),"w"); for(int i=0;i<N;i++){ fprintf(pFileSum, "$%8.6E$ & $%8.6E$ & $[%8.6E,%8.6E]$ & $[%8.6E,%8.6E]$ & $%8.6E$ & Th=$%8.6E$\\\\\\hline\n",MassAxis[i], ExpLimit[i], ExpLimitm1[i], ExpLimitp1[i], ExpLimitm2[i], ExpLimitp2[i], ObsLimit[i], ThXSec[i]); if(int(MassAxis[i])%50!=0)continue; printf("%f ",ObsLimit[i]); }printf("\n"); fclose(pFileSum); pFileSum = fopen((outputDir+"LimitRange").Data(),"w"); fprintf(pFileSum, "EXPECTED LIMIT --> "); printLimits(pFileSum,TGExpLimit, MassAxis[0], MassAxis[N-1]); if(!blind) fprintf(pFileSum, "OBSERVED LIMIT --> "); printLimits(pFileSum,TGObsLimit, MassAxis[0], MassAxis[N-1]); fprintf(pFileSum, "Exp Limits for Model are: "); for(int i=0;i<N;i++){if(int(MassAxis[i])%50!=0)continue; fprintf(pFileSum, "%f+-%f ",ExpLimit[i], (ExpLimitp1[i]-ExpLimitm1[i]))/2.0;}fprintf(pFileSum,"\n"); if(!blind) { fprintf(pFileSum, "Obs Limits for Model are: "); for(int i=0;i<N;i++){if(int(MassAxis[i])%50!=0)continue; fprintf(pFileSum, "%f ",ObsLimit[i]);}fprintf(pFileSum,"\n"); } fclose(pFileSum); }
KVList* KV_CCIN2P3_GE::GetListOfJobs() { // Create and fill list with KVBatchJob objects describing current jobs // Delete list after use KVList* list_of_jobs = new KVList; // use qstat -r to get list of job ids and jobnames TString reply = gSystem->GetFromPipe("qstat -r"); TObjArray* lines = reply.Tokenize("\n"); Int_t nlines = lines->GetEntries(); for (Int_t line_number = 0; line_number < nlines; line_number++) { TString thisLine = ((TObjString*)(*lines)[line_number])->String(); if (thisLine.Contains("Full jobname:")) { // previous line contains job-id and status TString lastLine = ((TObjString*)(*lines)[line_number - 1])->String(); TObjArray* bits = lastLine.Tokenize(" "); Int_t jobid = ((TObjString*)(*bits)[0])->String().Atoi(); TString status = ((TObjString*)(*bits)[4])->String(); // date & time jobs started (running job) or submitted (queued job) TString sdate = ((TObjString*)(*bits)[5])->String();// mm/dd/yyyy TString stime = ((TObjString*)(*bits)[6])->String();// hh:mm:ss Int_t dd, MM, yyyy, hh, mm, ss; sscanf(sdate.Data(), "%d/%d/%d", &MM, &dd, &yyyy); sscanf(stime.Data(), "%d:%d:%d", &hh, &mm, &ss); KVDatime submitted(yyyy, MM, dd, hh, mm, ss); delete bits; bits = thisLine.Tokenize(": "); TString jobname = ((TObjString*)(*bits)[2])->String(); delete bits; KVGEBatchJob* job = new KVGEBatchJob(); job->SetName(jobname); job->SetJobID(jobid); job->SetStatus(status); job->SetSubmitted(submitted); list_of_jobs->Add(job); } } delete lines; if (!list_of_jobs->GetEntries()) return list_of_jobs; // use qstat -j [jobid] to get cpu and memory used and also the resource requests TIter next_job(list_of_jobs); KVGEBatchJob* job; while ((job = (KVGEBatchJob*)next_job())) { // for running jobs, read in from [jobname].status file // the number of events read/to read, disk used if (!strcmp(job->GetStatus(), "r")) job->UpdateDiskUsedEventsRead(); reply = gSystem->GetFromPipe(Form("qstat -j %d", job->GetJobID())); lines = reply.Tokenize("\n"); nlines = lines->GetEntries(); for (Int_t line_number = 0; line_number < nlines; line_number++) { TString thisLine = ((TObjString*)(*lines)[line_number])->String(); if (thisLine.BeginsWith("usage")) { TObjArray* bits = thisLine.Tokenize("=,"); TString stime = ((TObjString*)(*bits)[1])->String();// hh:mm:ss or d:hh:mm:ss Int_t dd, hh, mm, ss; TObjArray* tmp = stime.Tokenize(":"); dd = 0; if (tmp->GetEntries() == 4) sscanf(stime.Data(), "%d:%2d:%2d:%2d", &dd, &hh, &mm, &ss); else sscanf(stime.Data(), "%2d:%2d:%2d", &hh, &mm, &ss); delete tmp; job->SetCPUusage((dd * 24 + hh) * 3600 + mm * 60 + ss); TString smem = ((TObjString*)(*bits)[7])->String();// xxx.xxxxM job->SetMemUsed(smem); delete bits; } else if (thisLine.BeginsWith("hard resource_list:")) { TObjArray* bits = thisLine.Tokenize(": "); TString res = ((TObjString*)(*bits)[2])->String();//os=sl5,xrootd=1,irods=1,s_vmem=1024M,s_fsize=50M,s_cpu=36000 res.ReplaceAll("s_vmem", "vmem"); res.ReplaceAll("s_fsize", "fsize"); res.ReplaceAll("s_cpu", "ct"); job->SetResources(res); TObjArray* bbits = res.Tokenize(","); TIter next_res(bbits); TObjString* ss; while ((ss = (TObjString*)next_res())) { TString g = ss->String(); if (g.BeginsWith("ct=")) { g.Remove(0, 3); job->SetCPUmax(g.Atoi()); } else if (g.BeginsWith("vmem=")) { g.Remove(0, 5); job->SetMemMax(g); } else if (g.BeginsWith("fsize=")) { g.Remove(0, 6); job->SetDiskMax(g); } } delete bits; delete bbits; } } delete lines; //} } return list_of_jobs; }
void MultiHistoOverlap(TString namesandlabels, Int_t nOfFiles, const TString& outDir="./"){ gROOT->Reset(); gROOT->ProcessLine(".L tdrstyle.C"); gROOT->ProcessLine("setTDRStyle()"); // gSystem->Load("libRooFit"); // using namespace RooFit; // preamble TPaveText *cmsprel = new TPaveText(0.19, 0.95, 0.95, 0.99, "NDC"); cmsprel->SetTextSize(0.03); cmsprel->SetTextFont(42); cmsprel->SetFillColor(0); cmsprel->SetBorderSize(0); cmsprel->SetMargin(0.01); cmsprel->SetTextAlign(12); // align left TString text = "CMS Preliminary 2011"; cmsprel->AddText(0.0, 0.5,text); TString text2 = "#sqrt{s} = 7 TeV |#eta_{#mu}|<2.4"; cmsprel->AddText(0.8, 0.5, text2); TList* FileList = new TList(); TList* LabelList = new TList(); TObjArray *nameandlabelpairs = namesandlabels.Tokenize(","); for (Int_t i = 0; i < nameandlabelpairs->GetEntries(); ++i) { TObjArray *aFileLegPair = TString(nameandlabelpairs->At(i)->GetName()).Tokenize("="); if(aFileLegPair->GetEntries() == 2) { FileList->Add( TFile::Open(aFileLegPair->At(0)->GetName()) ); LabelList->Add( aFileLegPair->At(1) ); } else { std::cout << "Please give file name and legend entry in the following form:\n" << " filename1=legendentry1,filename2=legendentry2\n"; } } Int_t NOfFiles = FileList->GetSize(); if ( NOfFiles!=nOfFiles ){ std::cout<<"&MSG-e: NOfFiles = "<<nOfFiles<<std::endl; return; } std::vector<TString> LegLabels; LegLabels.reserve(nOfFiles); for(Int_t j=0; j < nOfFiles; j++) { TObjString* legend = (TObjString*)LabelList->At(j); LegLabels.push_back(legend->String()); std::cout<<"LegLabels["<<j<<"]"<<LegLabels[j]<<std::endl; } TLegend *leg=0; TCanvas* c0 = new TCanvas("c0", "c0",50, 20, 800,600); TCanvas* c1 = new TCanvas("c1", "c1",50, 20, 800,600); TCanvas* c2 = new TCanvas("c2", "c2",50, 20, 800,600); TCanvas* c3 = new TCanvas("c3", "c3",50, 20, 800,600); TCanvas* c4 = new TCanvas("c4", "c4",50, 20, 800,600); TCanvas* c5 = new TCanvas("c5", "c5",50, 20, 1200,800); TCanvas* c6 = new TCanvas("c6", "c6",50, 20, 1200,800); TCanvas* c0s = new TCanvas("c0s", "c0s",50, 20, 800,600); TCanvas* c1s = new TCanvas("c1s", "c1s",50, 20, 800,600); TCanvas* c2s = new TCanvas("c2s", "c2s",50, 20, 800,600); TCanvas* c3s = new TCanvas("c3s", "c3s",50, 20, 800,600); TCanvas* cFit = new TCanvas("cFit", "cFit",50, 20, 1600, 800); //----------------- CANVAS C0 --------------// c0->SetFillColor(0); c0->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Mass VS muon phi plus ------------------------------- TH1D *histoMassVsPhiPlus[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoMassVsPhiPlus[j] = (TH1D*)fin->Get("MassVsPhiPlus/allHistos/meanHisto"))){ histoMassVsPhiPlus[j]->SetLineStyle(linestylelist[j]); histoMassVsPhiPlus[j]->SetMarkerColor(colorlist[j]); histoMassVsPhiPlus[j]->SetLineColor(colorlist[j]); histoMassVsPhiPlus[j]->SetMarkerStyle(markerstylelist[j]); // histoMassVsPhiPlus[j]->SetMarkerSize(0.75); if ( j == 0 ) { histoMassVsPhiPlus[j]->GetXaxis()->SetTitle("positive muon #phi (rad)"); histoMassVsPhiPlus[j]->GetYaxis()->SetTitle("M_{#mu#mu} (GeV)"); // histoMassVsPhiPlus[j]->GetYaxis()->SetRangeUser(88.5,93.5); histoMassVsPhiPlus[j]->GetYaxis()->SetRangeUser(90.0,91.5); histoMassVsPhiPlus[j]->GetXaxis()->SetRangeUser(-3.14,3.14); histoMassVsPhiPlus[j]->Draw(); } else { histoMassVsPhiPlus[j]->Draw("SAME"); } leg->AddEntry(histoMassVsPhiPlus[j],LegLabels[j],"PL"); } } //cmsprel->Draw("same"); leg->Draw("same"); c0->SaveAs(outDir+"MassVsPhiPlus.png"); //----------------- CANVAS C1 --------------// c1->SetFillColor(0); c1->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Mass VS muon eta plus ------------------------------- TH1D *histoMassVsEtaPlus[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoMassVsEtaPlus[j] = (TH1D*)fin->Get("MassVsEtaPlus/allHistos/meanHisto"))){ histoMassVsEtaPlus[j]->SetLineStyle(linestylelist[j]); histoMassVsEtaPlus[j]->SetMarkerColor(colorlist[j]); histoMassVsEtaPlus[j]->SetLineColor(colorlist[j]); histoMassVsEtaPlus[j]->SetMarkerStyle(markerstylelist[j]); // histoMassVsEtaPlus[j]->SetMarkerSize(0.75); if ( j == 0 ) { histoMassVsEtaPlus[j]->GetXaxis()->SetTitle("positive muon #eta"); histoMassVsEtaPlus[j]->GetYaxis()->SetTitle("M_{#mu#mu} (GeV)"); // histoMassVsEtaPlus[j]->GetYaxis()->SetRangeUser(88.5,93.5); histoMassVsEtaPlus[j]->GetYaxis()->SetRangeUser(90.0,91.5); histoMassVsEtaPlus[j]->GetXaxis()->SetRangeUser(-2.41,2.41); histoMassVsEtaPlus[j]->Draw(); } else { histoMassVsEtaPlus[j]->Draw("SAME"); } leg->AddEntry(histoMassVsEtaPlus[j],LegLabels[j],"PL"); } } //cmsprel->Draw("same"); leg->Draw("same"); c1->SaveAs(outDir+"MassVsEtaPlus.png"); //----------------- CANVAS C2 --------------// c2->SetFillColor(0); c2->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Mass VS muon eta plus - eta minus ------------------------------- TH1D *histoMassVsEtaPlusMinusDiff[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoMassVsEtaPlusMinusDiff[j] = (TH1D*)fin->Get("MassVsEtaPlusMinusDiff/allHistos/meanHisto"))){ histoMassVsEtaPlusMinusDiff[j]->SetLineStyle(linestylelist[j]); histoMassVsEtaPlusMinusDiff[j]->SetMarkerColor(colorlist[j]); histoMassVsEtaPlusMinusDiff[j]->SetLineColor(colorlist[j]); histoMassVsEtaPlusMinusDiff[j]->SetMarkerStyle(markerstylelist[j]); // histoMassVsEtaPlusMinusDiff[j]->SetMarkerSize(0.75); if ( j == 0 ) { histoMassVsEtaPlusMinusDiff[j]->GetXaxis()->SetTitle("#eta pos. muon #eta neg. muon"); histoMassVsEtaPlusMinusDiff[j]->GetYaxis()->SetTitle("M_{#mu#mu} (GeV)"); // histoMassVsEtaPlusMinusDiff[j]->GetYaxis()->SetRangeUser(88.0,96.0); histoMassVsEtaPlusMinusDiff[j]->GetYaxis()->SetRangeUser(90.0,91.5); histoMassVsEtaPlusMinusDiff[j]->GetXaxis()->SetRangeUser(-3,3); histoMassVsEtaPlusMinusDiff[j]->Draw(); } else { histoMassVsEtaPlusMinusDiff[j]->Draw("SAME"); } leg->AddEntry(histoMassVsEtaPlusMinusDiff[j],LegLabels[j],"PL"); } } //cmsprel->Draw("same"); leg->Draw("same"); c2->SaveAs(outDir+"MassVsEtaPlusMinusDiff.png"); //----------------- CANVAS C3 --------------// c3->SetFillColor(0); c3->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Mass VS muon phi minus ------------------------------- TH1D *histoMassVsPhiMinus[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoMassVsPhiMinus[j] = (TH1D*)fin->Get("MassVsPhiMinus/allHistos/meanHisto"))){ histoMassVsPhiMinus[j]->SetLineStyle(linestylelist[j]); histoMassVsPhiMinus[j]->SetMarkerColor(colorlist[j]); histoMassVsPhiMinus[j]->SetLineColor(colorlist[j]); histoMassVsPhiMinus[j]->SetMarkerStyle(markerstylelist[j]); // histoMassVsPhiMinus[j]->SetMarkerSize(0.75); if ( j == 0 ) { histoMassVsPhiMinus[j]->GetXaxis()->SetTitle("negative muon #phi (rad)"); histoMassVsPhiMinus[j]->GetYaxis()->SetTitle("M_{#mu#mu} (GeV)"); // histoMassVsPhiMinus[j]->GetYaxis()->SetRangeUser(88.5,93.5); histoMassVsPhiMinus[j]->GetYaxis()->SetRangeUser(90.0,91.5); histoMassVsPhiMinus[j]->GetXaxis()->SetRangeUser(-3.14,3.14); histoMassVsPhiMinus[j]->Draw(); } else { histoMassVsPhiMinus[j]->Draw("SAME"); } leg->AddEntry(histoMassVsPhiMinus[j],LegLabels[j],"PL"); } } //cmsprel->Draw("same"); leg->Draw("same"); c3->SaveAs(outDir+"MassVsPhiMinus.png"); //----------------- CANVAS C4 --------------// c4->SetFillColor(0); c4->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Mass VS muon eta minus ------------------------------- TH1D *histoMassVsEtaMinus[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoMassVsEtaMinus[j] = (TH1D*)fin->Get("MassVsEtaMinus/allHistos/meanHisto"))){ histoMassVsEtaMinus[j]->SetLineStyle(linestylelist[j]); histoMassVsEtaMinus[j]->SetMarkerColor(colorlist[j]); histoMassVsEtaMinus[j]->SetLineColor(colorlist[j]); histoMassVsEtaMinus[j]->SetMarkerStyle(markerstylelist[j]); // histoMassVsEtaMinus[j]->SetMarkerSize(0.75); if ( j == 0 ) { histoMassVsEtaMinus[j]->GetXaxis()->SetTitle("negative muon #eta"); histoMassVsEtaMinus[j]->GetYaxis()->SetTitle("M_{#mu#mu} (GeV)"); // histoMassVsEtaMinus[j]->GetYaxis()->SetRangeUser(88.5,93.5); histoMassVsEtaMinus[j]->GetYaxis()->SetRangeUser(90.0,91.5); histoMassVsEtaMinus[j]->GetXaxis()->SetRangeUser(-2.41,2.41); histoMassVsEtaMinus[j]->Draw(); } else { histoMassVsEtaMinus[j]->Draw("SAME"); } leg->AddEntry(histoMassVsEtaMinus[j],LegLabels[j],"PL"); } } //cmsprel->Draw("same"); leg->Draw("same"); c4->SaveAs(outDir+"MassVsEtaMinus.png"); //----------------- CANVAS C5 --------------// c5->SetFillColor(0); c5->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Mass VS muon phi plus ------------------------------- TH2D *histoMassVsEtaPhiPlus[nOfFiles]; TStyle *newStyle; newStyle->SetPalette(1); // newStyle->SetOptTitle(1); Double_t zMin(82.); Double_t zMax(96.); for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoMassVsEtaPhiPlus[j] = (TH2D*)fin->Get("MassVsEtaPhiPlus/allHistos/meanHisto"))){ if ( j == 0 ) { histoMassVsEtaPhiPlus[j]->SetTitle(LegLabels[j]); histoMassVsEtaPhiPlus[j]->GetXaxis()->SetTitle("positive muon #phi (rad)"); histoMassVsEtaPhiPlus[j]->GetYaxis()->SetTitle("positive muon #eta"); zMin = histoMassVsEtaPhiPlus[j]->GetMinimum(); zMax = histoMassVsEtaPhiPlus[j]->GetMaximum(); histoMassVsEtaPhiPlus[j]->Draw("COLZ"); c5->SaveAs(outDir+"MassVsEtaPhiPlus_file0.png"); } else { histoMassVsEtaPhiPlus[j]->SetTitle(LegLabels[j]); histoMassVsEtaPhiPlus[j]->SetMinimum(zMin); histoMassVsEtaPhiPlus[j]->SetMaximum(zMax); histoMassVsEtaPhiPlus[j]->Draw("COLZ"); c5->SaveAs(outDir+"MassVsEtaPhiPlus_file"+(TString)Form("%d",(Int_t)j)+".png"); } } } //cmsprel->Draw("same"); // //----------------- CANVAS C6 --------------// // c6->SetFillColor(0); // c6->cd(); // leg = new TLegend(0.50,0.25,0.90,0.40); // leg->SetBorderSize(1); // leg->SetFillColor(0); // leg->SetTextFont(42); // // Mass VS muon phi minus ------------------------------- // TH2D *histoMassVsEtaPhiMinus[nOfFiles]; // for(Int_t j=0; j < nOfFiles; j++) { // TFile *fin = (TFile*)FileList->At(j); // if (( histoMassVsEtaPhiMinus[j] = (TH2D*)fin->Get("MassVsEtaPhiMinus/allHistos/meanHisto"))){ // if ( j == 0 ) { // histoMassVsEtaPhiMinus[j]->GetXaxis()->SetTitle("negative muon #phi (rad)"); // histoMassVsEtaPhiMinus[j]->GetYaxis()->SetTitle("negative muon #eta"); // zMin = histoMassVsEtaPhiMinus[j]->GetMinimum(); // zMax = histoMassVsEtaPhiMinus[j]->GetMaximum(); // histoMassVsEtaPhiMinus[j]->Draw(); // } else { // histoMassVsEtaPhiMinus[j]->SetMinimum(zMin); // histoMassVsEtaPhiMinus[j]->SetMaximum(zMax); // histoMassVsEtaPhiMinus[j]->Draw("SAME"); // } // leg->AddEntry(histoMassVsEtaPhiMinus[j],LegLabels[j],"PL"); // } // } // //cmsprel->Draw("same"); // leg->Draw("same"); // c6->SaveAs(outDir+"MassVsEtaPhiMinus.png"); // newStyle->SetOptTitle(0); const Color_t colorlist_resol[7]={kBlack,kGreen,kBlue,kMagenta,kCyan,kTeal,kRed}; const Int_t linestylelist_resol[7]={1,1,1,1,1,1,1}; const Int_t stylelist_resol[7]={1,1,1,1,1,1,1}; const Style_t markerstylelist_resol[7]={kOpenCircle,kOpenTriangleUp,kOpenTriangleUp,kOpenCircle,kOpenTriangleUp,kOpenCircle,kOpenTriangleUp}; // //----------------- CANVAS C0S --------------// // c0s->SetFillColor(0); // c0s->cd(); // leg = new TLegend(0.50,0.25,0.90,0.40); // leg->SetBorderSize(1); // leg->SetFillColor(0); // leg->SetTextFont(42); // // Sigma VS muon phi plus ------------------------------- // TH1D *histoSigmaVsPhiPlus[nOfFiles]; // for(Int_t j=0; j < nOfFiles; j++) { // TFile *fin = (TFile*)FileList->At(j); // if (( histoSigmaVsPhiPlus[j] = (TH1D*)fin->Get("MassVsPhiPlus/allHistos/sigmaHisto"))){ // histoSigmaVsPhiPlus[j]->SetLineStyle(linestylelist_resol[j]); // histoSigmaVsPhiPlus[j]->SetMarkerColor(colorlist_resol[j]); // histoSigmaVsPhiPlus[j]->SetLineColor(colorlist_resol[j]); // histoSigmaVsPhiPlus[j]->SetMarkerStyle(markerstylelist_resol[j]); // // histoSigmaVsPhiPlus[j]->SetMarkerSize(0.75); // if ( j == 0 ) { // histoSigmaVsPhiPlus[j]->GetXaxis()->SetTitle("positive muon #phi (rad)"); // histoSigmaVsPhiPlus[j]->GetYaxis()->SetTitle("#sigma(M_{#mu#mu}) (GeV)"); // // histoSigmaVsPhiPlus[j]->GetYaxis()->SetRangeUser(88.5,93.5); // histoSigmaVsPhiPlus[j]->GetYaxis()->SetRangeUser(0.,3.); // histoSigmaVsPhiPlus[j]->GetXaxis()->SetRangeUser(-3.14,3.14); // histoSigmaVsPhiPlus[j]->Draw(); // } else { // histoSigmaVsPhiPlus[j]->Draw("SAME"); // } // leg->AddEntry(histoSigmaVsPhiPlus[j],LegLabels[j],"PL"); // } // } // //cmsprel->Draw("same"); // leg->Draw("same"); // c0s->SaveAs(outDir+"SigmaVsPhiPlus.png"); //----------------- CANVAS C1S --------------// c1s->SetFillColor(0); c1s->cd(); leg = new TLegend(0.50,0.25,0.90,0.40); leg->SetBorderSize(1); leg->SetFillColor(0); leg->SetTextFont(42); // Sigma VS muon eta plus ------------------------------- TH1D *histoSigmaVsEtaPlus[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoSigmaVsEtaPlus[j] = (TH1D*)fin->Get("MassVsEtaPlus/allHistos/sigmaHisto"))){ histoSigmaVsEtaPlus[j]->SetLineStyle(linestylelist_resol[j]); histoSigmaVsEtaPlus[j]->SetMarkerColor(colorlist_resol[j]); histoSigmaVsEtaPlus[j]->SetLineColor(colorlist_resol[j]); histoSigmaVsEtaPlus[j]->SetMarkerStyle(markerstylelist_resol[j]); // histoSigmaVsEtaPlus[j]->SetMarkerSize(0.75); if ( j == 0 ) { histoSigmaVsEtaPlus[j]->GetXaxis()->SetTitle("positive muon #eta"); histoSigmaVsEtaPlus[j]->GetYaxis()->SetTitle("#sigma(M_{#mu#mu}) (GeV)"); // histoSigmaVsEtaPlus[j]->GetYaxis()->SetRangeUser(88.5,93.5); histoSigmaVsEtaPlus[j]->GetYaxis()->SetRangeUser(0.,3.); histoSigmaVsEtaPlus[j]->GetXaxis()->SetRangeUser(-2.41,2.41); histoSigmaVsEtaPlus[j]->Draw(); } else { histoSigmaVsEtaPlus[j]->Draw("SAME"); } leg->AddEntry(histoSigmaVsEtaPlus[j],LegLabels[j],"PL"); } } //cmsprel->Draw("same"); leg->Draw("same"); c1s->SaveAs(outDir+"SigmaVsEtaPlus.png"); // //----------------- CANVAS C2S --------------// // c2s->SetFillColor(0); // c2s->cd(); // leg = new TLegend(0.50,0.25,0.90,0.40); // leg->SetBorderSize(1); // leg->SetFillColor(0); // leg->SetTextFont(42); // // Sigma VS muon eta plus - eta minus ------------------------------- // TH1D *histoSigmaVsEtaPlusMinusDiff[nOfFiles]; // for(Int_t j=0; j < nOfFiles; j++) { // TFile *fin = (TFile*)FileList->At(j); // if (( histoSigmaVsEtaPlusMinusDiff[j] = (TH1D*)fin->Get("MassVsEtaPlusMinusDiff/allHistos/sigmaHisto"))){ // histoSigmaVsEtaPlusMinusDiff[j]->SetLineStyle(linestylelist_resol[j]); // histoSigmaVsEtaPlusMinusDiff[j]->SetMarkerColor(colorlist_resol[j]); // histoSigmaVsEtaPlusMinusDiff[j]->SetLineColor(colorlist_resol[j]); // histoSigmaVsEtaPlusMinusDiff[j]->SetMarkerStyle(markerstylelist_resol[j]); // // histoSigmaVsEtaPlusMinusDiff[j]->SetMarkerSize(0.75); // if ( j == 0 ) { // histoSigmaVsEtaPlusMinusDiff[j]->GetXaxis()->SetTitle("#eta pos. muon - #eta neg. muon"); // histoSigmaVsEtaPlusMinusDiff[j]->GetYaxis()->SetTitle("#sigma(M_{#mu#mu}) (GeV)"); // // histoSigmaVsEtaPlusMinusDiff[j]->GetYaxis()->SetRangeUser(88.0,96.0); // histoSigmaVsEtaPlusMinusDiff[j]->GetYaxis()->SetRangeUser(0.,3.); // //histoSigmaVsEtaPlusMinusDiff[j]->GetYaxis()->SetRangeUser(90.60,90.75); // histoSigmaVsEtaPlusMinusDiff[j]->GetXaxis()->SetRangeUser(-3.2,3.2); // histoSigmaVsEtaPlusMinusDiff[j]->Draw(); // } else { // histoSigmaVsEtaPlusMinusDiff[j]->Draw("SAME"); // } // leg->AddEntry(histoSigmaVsEtaPlusMinusDiff[j],LegLabels[j],"PL"); // } // } // //cmsprel->Draw("same"); // leg->Draw("same"); // c2s->SaveAs(outDir+"SigmaVsEtaPlusMinusDiff.png"); // //----------------- CANVAS C3S --------------// // c3s->SetFillColor(0); // c3s->cd(); // leg = new TLegend(0.35,0.15,0.55,0.35); // leg->SetBorderSize(1); // leg->SetFillColor(0); // leg->SetTextFont(42); // // Sigma VS muon pT ------------------------------- // TH1D *histoSigmaVsPt[nOfFiles]; // for(Int_t j=0; j < nOfFiles; j++) { // TFile *fin = (TFile*)FileList->At(j); // if (( histoSigmaVsPt[j] = (TH1D*)fin->Get("MassVsPt/allHistos/sigmaHisto"))){ // histoSigmaVsPt[j]->SetLineStyle(linestylelist_resol[j]); // histoSigmaVsPt[j]->SetMarkerColor(colorlist_resol[j]); // histoSigmaVsPt[j]->SetLineColor(colorlist_resol[j]); // histoSigmaVsPt[j]->SetMarkerStyle(markerstylelist_resol[j]); // // histoSigmaVsPt[j]->SetMarkerSize(0.75); // if ( j == 0 ) { // histoSigmaVsPt[j]->GetXaxis()->SetTitle("muon p_T (GeV)"); // histoSigmaVsPt[j]->GetYaxis()->SetTitle("#sigma(M_{#mu#mu}) (GeV)"); // // histoSigmaVsPt[j]->GetYaxis()->SetRangeUser(88.0,96.0); // histoSigmaVsPt[j]->GetYaxis()->SetRangeUser(0.,3.); // //histoSigmaVsPt[j]->GetYaxis()->SetRangeUser(90.60,90.75); // histoSigmaVsPt[j]->GetXaxis()->SetRangeUser(15.,105.); // histoSigmaVsPt[j]->Draw(); // } else { // histoSigmaVsPt[j]->Draw("SAME"); // } // leg->AddEntry(histoSigmaVsPt[j],LegLabels[j],"PL"); // } // } // //cmsprel->Draw("same"); // leg->Draw("same"); // c3s->SaveAs(outDir+"SigmaVsPt.png"); //----------------- CANVAS CFIT --------------// cFit->SetFillColor(0); cFit->cd(); Float_t nN = TMath::Sqrt(nOfFiles); Int_t nX = (Int_t)nN; if ( nN-nX > 0.5 ) nX++; Int_t nY = (Int_t)(nOfFiles/nX); std::cout << nX << " ," << nY << std::endl; cFit->Divide(nOfFiles,1); // Mass VS muon phi plus ------------------------------- TFile *ZFitFile = new TFile("ZFitFile.root","RECREATE"); RooPlot *histoLineShape[nOfFiles]; for(Int_t j=0; j < nOfFiles; j++) { TFile *fin = (TFile*)FileList->At(j); if (( histoLineShape[j] = (RooPlot*)fin->Get("hRecBestResAllEvents_Mass_frame"))){ std::cout<<"Writing fit histogrem file n. "<<j<<std::endl; histoLineShape[j]->Write(); cFit->cd(j+1); histoLineShape[j]->SetTitle(LegLabels[j]); histoLineShape[j]->Draw(); histoLineShape[j]->GetXaxis()->SetTitle("M_{#mu#mu} (GeV)"); // TPaveText *cmsprel2 = new TPaveText(0.19, 0.95, 0.95, 0.99, "NDC"); // cmsprel2->SetTextSize(0.03); // cmsprel2->SetTextFont(42); // cmsprel2->SetFillColor(0); // cmsprel2->SetBorderSize(0); // cmsprel2->SetMargin(0.01); // cmsprel2->SetTextAlign(12); // align left // cmsprel2->AddText(0.666666, 0.5, LegLabels[j]); } } ZFitFile->Close(); // cmsprel2->Draw("same"); cFit->SaveAs("ZFitFile.root"); return; };
void hPYphocalc(){ gStyle->SetOptStat(kFALSE); const int maxNpart = 100; int sth=0, Gth=0; TFile *f = TFile::Open(outG); if(sth==0){TString dirname = "std";} else if(sth==1){TString dirname ="Gri055";} else {TString dirname ="Gri101";} TObjString* dataname = (TObjString*)f->Get(Form("dataname")); TObjString* histoname = (TObjString*)f->Get(Form("histoname")); TFile *fdata = TFile::Open(dataname->GetName()); TString name; if(Gth==0) name = "G0"; else if(Gth<nGlau) name = Form("Glau_%d",Gth); else name = Form("bin_%d",Gth-nGlau+1); TObjString* Glaubername = (TObjString*)f->Get(Form("%s/%s/Glaubername",dirname.Data(),name.Data())); TVectorD* k0 = (TVectorD*)f->Get(Form("%s/%s/k0",dirname.Data(),name.Data())); TVectorD* theta0 = (TVectorD*)f->Get(Form("%s/%s/theta0",dirname.Data(),name.Data())); TVectorD* xmin = (TVectorD*)f->Get(Form("%s/%s/xmin",dirname.Data(),name.Data())); TVectorD* xmax = (TVectorD*)f->Get(Form("%s/%s/xmax",dirname.Data(),name.Data())); TVectorD* thetabest = (TVectorD*)f->Get(Form("%s/%s/thetabest",dirname.Data(),name.Data())); TVectorD* kbest = (TVectorD*)f->Get(Form("%s/%s/kbest",dirname.Data(),name.Data())); TVectorD* kpoint = (TVectorD*)f->Get(Form("%s/%s/kpoint",dirname.Data(),name.Data())); TVectorD* NcollAver = (TVectorD*)f->Get(Form("%s/%s/NcollAver",dirname.Data(),name.Data())); TVectorD* centbin = (TVectorD*)f->Get(Form("%s/%s/centbin",dirname.Data(),name.Data())); TFile *fGlauber = TFile::Open(Glaubername->GetName()); //(*k0)[0]=1.39; (*kbest)[0]=0.425; //(*theta0)[0]=3.41; (*thetabest)[0]=1.30; TF1 *gammafun[maxNpart]; TF1 *gammafunevt[maxNpart]; TF1 *gammafunnucl[maxNpart]; TF1 *gammafunnuclNcoll[maxNpart]; double kevt = (*k0)[0]-(*kbest)[0]; for(int iNpart=0;iNpart<maxNpart;iNpart++){ gammafun[iNpart] = new TF1("gammafun","TMath::GammaDist(x,[0],0,[1])",0,200); gammafunevt[iNpart] = new TF1("gammafunevt","TMath::GammaDist(x,[0],0,[1])",0,200); gammafunnucl[iNpart] = new TF1("gammafunnucl","TMath::GammaDist(x,[0],0,[1])",0,200); gammafunnuclNcoll[iNpart] = new TF1("gammafunnuclNcoll","TMath::GammaDist(x,[0],0,[1])",0,200); double k_=(*k0)[0]+(*kbest)[0]*(iNpart-2); double theta_=(*theta0)[0]+(*thetabest)[0]*TMath::Log(iNpart-1); gammafun[iNpart]->SetParameter(0,k_); //[1]: k value gammafun[iNpart]->SetParameter(1,theta_); //[2]: theta value gammafunevt[iNpart]->SetParameter(0,kevt); gammafunevt[iNpart]->SetParameter(1,theta_); gammafunnucl[iNpart]->SetParameter(0,(*kbest)[0]); gammafunnucl[iNpart]->SetParameter(1,theta_); gammafunnuclNcoll[iNpart]->SetParameter(0,(*kbest)[0]*(iNpart-1)); gammafunnuclNcoll[iNpart]->SetParameter(1,theta_); if(iNpart==2){ gammafunnuclNcoll[iNpart]->SetNpx(1e4); gammafunnuclNcoll[iNpart]->SetRange(1e-11,200); } } TTree *t = (TTree*)fGlauber->Get("nt_p_Pb"); Float_t Ncoll, Npart, B; Long_t Nevent; t->SetBranchAddress("Ncoll",&Ncoll); t->SetBranchAddress("Npart",&Npart); t->SetBranchAddress("B",&B); Nevent = (Long_t) t->GetEntries(); Long_t Ev; Int_t Bino; Double_t Para, Para_nucl, Para_p, Para_evt, Bi_Para_nucl, Bi_Para_evt; double yUCM[8]={}; double yPCM[8]={}; double yVCM[8]={}; double yUCM_[200]={}; double yPCM_[200]={}; double yVCM_[200]={}; double C=1e-4; double PNcoll[maxNpart]={}; TH1D *histo_obs = (TH1D*)fdata->Get(histoname->GetName()); TH1D *histo_obs_norm = (TH1D*)histo_obs->Clone(); histo_obs_norm->Scale(1/histo_obs->Integral()); TH1D* hUCM = new TH1D("hUCM","hUCM",200,0,200); TH1D* hPCM = new TH1D("hPCM","hPCM",200,0,200); TH1D* hVCM = new TH1D("hVCM","hVCM",200,0,200); TH2D* NcollvsET = new TH2D("NcollvsET","NcollvsET",100,0,100,2000,0,400); for(Ev=0;Ev<Nevent;Ev++){ t->GetEntry(Ev); PNcoll[(int)Ncoll]++; } for(int i=0;i<maxNpart;i++){ PNcoll[i]/=Nevent; cout<<PNcoll[i]<<"\t"; } cout<<endl; for(Ev=0;Ev<Nevent;Ev++){ if(Ev%100000==0) cout<<"\t"<<"Have run "<<Ev<<" events"<<endl; t->GetEntry(Ev); Para = gammafun[(int)Npart]->GetRandom(); Para_nucl = gammafunnuclNcoll[(int)Npart]->GetRandom(); Para_p = gammafunnuclNcoll[(int)Npart]->GetRandom(); Para_evt = 0; for(int i=0;i<N-1;i++) if(Para>=(*kpoint)[i] && Para<(*kpoint)[i+1]) int ibin = i; for(int Bino=0;Bino<Ncoll;Bino++){ Bi_Para_evt = gammafunevt[(int)Npart]->GetRandom(); Para_evt += Bi_Para_evt; } double PNcollET = gammafun[(int)Npart]->Eval(Para); // double k = gammafun[(int)Npart]->GetParameter(0); double theta=(*theta0)[0]+(*thetabest)[0]*TMath::Log(Npart-1); double YNcollUCM = C*Ncoll; double YNcollPCM = C/1.0/(*kbest)[0]/theta*(Para_nucl); double YNcollVCM = C/2.0*(Para_nucl/(*kbest)[0]/theta+Ncoll); yUCM[ibin] += PNcoll[(int)Ncoll]*PNcollET*YNcollUCM; yPCM[ibin] += PNcoll[(int)Ncoll]*PNcollET*YNcollPCM; yVCM[ibin] += PNcoll[(int)Ncoll]*PNcollET*YNcollVCM; yUCM_[(int)Para] += PNcoll[(int)Ncoll]*PNcollET*YNcollUCM; yPCM_[(int)Para] += PNcoll[(int)Ncoll]*PNcollET*YNcollPCM; yVCM_[(int)Para] += PNcoll[(int)Ncoll]*PNcollET*YNcollVCM; NcollvsET->Fill(Ncoll,Para); } for(int ibin=1;ibin<hUCM->GetNbinsX();ibin++){ hUCM->SetBinContent(ibin,yUCM_[ibin-1]); hPCM->SetBinContent(ibin,yPCM_[ibin-1]); hVCM->SetBinContent(ibin,yVCM_[ibin-1]); } TCanvas *c1 = new TCanvas(); TCanvas *c2 = new TCanvas(); c1->SetLogy(); c2->SetLogx(); c2->SetLogy(); c2->SetLogz(); c1->cd(); TH1D* hFrame = new TH1D("","",200,0,200); hFrame->SetTitle(""); hFrame->GetXaxis()->SetTitle("HF #Sigma E_{T} |#eta|>4"); hFrame->GetYaxis()->SetTitle("Yield no units"); hFrame->GetXaxis()->SetRangeUser(0,150); hFrame->GetYaxis()->SetRangeUser(1e-6,1); hFrame->Draw(); histo_obs_norm->SetMarkerStyle(20); histo_obs_norm->SetMarkerSize(1.0); histo_obs_norm->SetMarkerColor(1); histo_obs_norm->Draw("Psame"); hUCM->SetMarkerStyle(24); hUCM->SetMarkerSize(1.0); hUCM->SetMarkerColor(2); hPCM->SetMarkerStyle(29); hPCM->SetMarkerSize(1.0); hPCM->SetMarkerColor(4); hVCM->SetMarkerStyle(34); hVCM->SetMarkerSize(1.0); hVCM->SetMarkerColor(5); hUCM->Draw("Psame"); hPCM->Draw("Psame"); hVCM->Draw("Psame"); TLegend *leg = new TLegend(0.1,0.2,0.5,0.45); leg->SetFillColor(0); leg->SetFillStyle(0); leg->SetBorderSize(0); leg->SetTextFont(42); leg->SetTextSize(0.03); leg->AddEntry(histo_obs_norm,"minimum bias events","lp"); leg->AddEntry(hUCM,"hard scattering events(UCM)","lp"); leg->AddEntry(hPCM,"hard scattering events(PCM)","lp"); leg->AddEntry(hVCM,"hard scattering events(VCM)","lp"); leg->Draw("same"); c1->Print("paperfig3_CMS.png"); c2->cd(); gStyle->SetOptStat("nemr"); NcollvsET->GetXaxis()->SetTitle("Ncoll"); NcollvsET->GetYaxis()->SetTitle("HF #Sigma E_{T} |#eta|>4"); NcollvsET->Draw("colz"); c2->Print("NcollvsET2D.png"); ofstream fstr("result_CMS.dat"); fstr<<"i"<<"\t"<<"centbin"<<"\t"<<"kpoint"<<"\t"<<"NcollAver"<<"\t"<<"UCM"<<"\t"<<"PCM"<<"\t"<<"VCM"<<"\t"<<"pho1"<<"\t"<<"pho2"<<"\t"<<"MB"<<endl; for(int i=0;i<N-1;i++){ fstr<<i<<"\t"<<(*centbin)[i]*100<<"% to "<<(*centbin)[i+1]*100<<"% \t"<<(*kpoint)[i]<<" to "<<(*kpoint)[i+1]<<"\t"<<(*NcollAver)[i]<<"\t"<<yUCM[i]<<"\t"<<yPCM[i]<<"\t"<<yVCM[i]<<"\t"<<yPCM[i]/yUCM[i]<<"\t"<<yVCM[i]/yUCM[i]<<"\t"<<"undetermined"<<endl; } }
int convertToInt(const TObjArray& run_ls_event, int idx) { TObjString* entry = dynamic_cast<TObjString*>(run_ls_event.At(idx)); assert(entry); return atoi(entry->GetString().Data()); }
/** * 1. Data sample : pp200 W->e nu with pile-up corresponding to 1 MHz min. bias * events, 50 K event y2011, 10 K event y2012. * * 2. Proof of principal: no pile-up for both PPV and KFV * * a. Reconstructed primary track multiplicity versus corresponding MC * "reconstructable" (i.e. in n STAR acceptance,no. TPC MC hits >= 15) tracks * multiplicity. * * b. Corrected reconstructed primary track multiplicity (i.e. multiplied by * QA/100.) versus corresponding MC "reconstructable" (i.e. in n STAR * acceptance,no. TPC MC hits >= 15) tracks multiplicity. * * c. Efficiency primary vertex reconstruction versus MC "reconstructable" * tracks multiplicity. * * 3. With pileup. repeat above (a-c) with old ranking scheme for * * I. Any reconstructed primary vertex which is matched with MC trigger * vertex (MC = 1) * * II. The best (in sense of ranking) reconstructed primary vertex which is * matched with MC trigger vertex (MC = 1) * * III. The best (in sense of ranking) reconstructed primary vertex which is * not matched with MC trigger vertex (MC != 1) * * 4. With pileup. repeat above (a-c) with new ranking scheme for cases I-III */ void MuMcPrVKFV2012(Long64_t nevent, const char *file, const std::string& outFile, bool fillNtuple) { #ifdef __TMVA__ boost::replace_last(outFile, ".root", ""); outFile += ".TMVArank.root"; // create a set of variables and declare them to the reader // - the variable names must corresponds in name and type to // those given in the weight file(s) that you use TString separator(":"); TString Vnames(vnames); TObjArray *array = Vnames.Tokenize(separator); std::vector<std::string> inputVars; TIter next(array); TObjString *objs; while ((objs = (TObjString *) next())) { std::cout << objs->GetString() << std::endl; } inputVars.push_back("beam"); inputVars.push_back("postx"); inputVars.push_back("prompt"); inputVars.push_back("cross"); inputVars.push_back("tof"); inputVars.push_back("notof"); inputVars.push_back("EEMC"); inputVars.push_back("noEEMC"); inputVars.push_back("chi2"); std::vector<double> *inputVec = new std::vector<double>( inputVars.size() ); IClassifierReader *classReader = new ReadBDT( inputVars ); #endif /* __TMVA__ */ TFile *fOut = TFile::Open(outFile.c_str(), "recreate"); data_t data; // Book histograms const int nMcRecMult = 75; TArrayD xMult(nMcRecMult + 1); xMult[0] = -0.5; for (int i = 1; i <= nMcRecMult; i++) { if (xMult[i - 1] < 50) xMult[i] = xMult[i - 1] + 1; // 1 - 50 else if (xMult[i - 1] < 100) xMult[i] = xMult[i - 1] + 2; // 51 - 75 else if (xMult[i - 1] < 200) xMult[i] = xMult[i - 1] + 10; // 76 - 85 else xMult[i] = xMult[i - 1] + 100; // 86 -100 } TH1D *McRecMulT = new TH1D("McRecMulT", "Reconstructable multiplicity for trigger Mc Vertex", nMcRecMult, xMult.GetArray()); struct Name_t { const Char_t *Name; const Char_t *Title; }; const Name_t HCases[3] = { {"Any", "Any vertex matched with MC == 1"}, {"Good", "The best rank vertex with MC == 1"}, {"Bad", "The best rank vertex with MC != 1"} }; const Name_t Plots[4] = { {"Mult" , "the reconstructed (uncorrected) track multiplicity versus Reconstructable multiplicity"}, {"MultQA" , "the reconstructed (corrected for QA) track multiplicity versus Reconstructable multiplicity"}, {"McRecMul", "Reconstructable multiplicity"}, {"YvsX" , "Bad versus Good value"} }; /* h p */ TH1 *hists[3][4]; for (int h = 0; h < 3; h++) { for (int p = 0; p < 4; p++) { TString Name(Plots[p].Name); Name += HCases[h].Name; TString Title(Plots[p].Title); Title += " for "; Title += HCases[h].Title; Title += " vertex"; if (p < 2) hists[h][p] = new TH2D(Name, Title, nMcRecMult, xMult.GetArray(), nMcRecMult, xMult.GetArray()); else if (p == 2) hists[h][p] = new TH1D(Name, Title, nMcRecMult, xMult.GetArray()); } } TNtuple *VertexG = new TNtuple("VertexG", "good vertex & global params info", vnames); TNtuple *VertexB = new TNtuple("VertexB", "bad vertex & global params info", vnames); // ---------------------------------------------- StMuDstMaker *maker = new StMuDstMaker(0, 0, "", file, "st:MuDst.root", 1e9); // set up maker in read mode // 0,0 this mean read mode // dir read all files in this directory // file bla.lis real all file in this list, if (file!="") dir is ignored // filter apply filter to filenames, multiple filters are separated by ':' // 10 maximum number of file to read maker->SetStatus("*", 0); std::vector<std::string> activeBranchNames = { "MuEvent", "PrimaryVertices", "StStMuMcVertex", "StStMuMcTrack" }; // Set Active braches for (const auto& branchName : activeBranchNames) maker->SetStatus(branchName.c_str(), 1); TChain *tree = maker->chain(); Long64_t nentries = tree->GetEntries(); nevent = TMath::Min(nevent, nentries); std::cout << nentries << " events in chain " << nevent << " will be read." << std::endl; tree->SetCacheSize(-1); //by setting the read cache to -1 we set it to the AutoFlush value when writing tree->SetCacheLearnEntries(1); //one entry is sufficient to learn tree->SetCacheEntryRange(0, nevent); for (Long64_t ev = 0; ev < nevent; ev++) { if (maker->Make()) break; StMuDst *muDst = maker->muDst(); // get a pointer to the StMuDst class, the class that points to all the data StMuEvent *muEvent = muDst->event(); // get a pointer to the class holding event-wise information int referenceMultiplicity = muEvent->refMult(); // get the reference multiplicity TClonesArray *PrimaryVertices = muDst->primaryVertices(); int nPrimaryVertices = PrimaryVertices->GetEntriesFast(); TClonesArray *MuMcVertices = muDst->mcArray(0); int nMuMcVertices = MuMcVertices->GetEntriesFast(); TClonesArray *MuMcTracks = muDst->mcArray(1); int nMuMcTracks = MuMcTracks->GetEntriesFast(); if ( nevent >= 10 && ev % int(nevent*0.1) == 0 ) { std::cout << "Event #" << ev << "\tRun\t" << muEvent->runId() << "\tId: " << muEvent->eventId() << " refMult= " << referenceMultiplicity << "\tPrimaryVertices " << nPrimaryVertices << "\t" << " " << nMuMcVertices << "\t" << " " << nMuMcTracks << std::endl; } // const Double_t field = muEvent->magneticField()*kilogauss; if (! nMuMcVertices || ! nMuMcTracks || nPrimaryVertices <= 0) { std::cout << "Ev. " << ev << " has no MC information ==> skip it" << std::endl; std::cout << "OR no reconstructed verticies found" << std::endl; continue; } // Count number of MC tracks at a vertex with TPC reconstructable tracks std::multimap<int, int> Mc2McHitTracks; for (int m = 0; m < nMuMcTracks; m++) { StMuMcTrack *McTrack = (StMuMcTrack *) MuMcTracks->UncheckedAt(m); if (McTrack->No_tpc_hit() < 15) continue; Mc2McHitTracks.insert(std::pair<int, int>(McTrack->IdVx(), McTrack->Id())); } // This is the "reconstructable" track multiplicity int nMcTracksWithHits = Mc2McHitTracks.count(1); // Let's skip events in which we do not expect to reconstruct any tracks // (and thus vertex) from the primary vertex if (nMcTracksWithHits <= 0) continue; // This is our denominator histogram for efficiencies McRecMulT->Fill(nMcTracksWithHits); // ============= Build map between Rc and Mc vertices std::map<StMuPrimaryVertex *, StMuMcVertex *> reco2McVertices; TArrayF vertexRanks(nPrimaryVertices); int mcMatchedVertexIndex = -1; // any vertex with MC==1 and highest reconstrated multiplicity. int vertexMaxMultiplicity = -1; // First loop over all verticies in this event. There is at least one // must be available for (int recoVertexIndex = 0; recoVertexIndex < nPrimaryVertices; recoVertexIndex++) { vertexRanks[recoVertexIndex] = -1e10; StMuPrimaryVertex *recoVertex = (StMuPrimaryVertex *) PrimaryVertices->UncheckedAt(recoVertexIndex); if ( !AcceptVX(recoVertex) ) continue; // Check Mc if (recoVertex->idTruth() < 0 || recoVertex->idTruth() > nMuMcVertices) { std::cout << "ERROR: Illegal idTruth " << recoVertex->idTruth() << " The track is ignored" << std::endl; continue; } StMuMcVertex *mcVertex = (StMuMcVertex *) MuMcVertices->UncheckedAt(recoVertex->idTruth() - 1); if (mcVertex->Id() != recoVertex->idTruth()) { std::cout << "ERROR: Mismatched idTruth " << recoVertex->idTruth() << " and mcVertex Id " << mcVertex->Id() << " The vertex is ignored" << std::endl; continue; } reco2McVertices[recoVertex] = mcVertex; vertexRanks[recoVertexIndex] = recoVertex->ranking(); if (recoVertex->idTruth() == 1 && recoVertex->noTracks() > vertexMaxMultiplicity) { mcMatchedVertexIndex = recoVertexIndex; vertexMaxMultiplicity = recoVertex->noTracks(); } FillData(data, recoVertex); #ifdef __TMVA__ Float_t *dataArray = &data.beam; for (size_t j = 0; j < inputVec->size(); j++) (*inputVec)[j] = dataArray[j]; vertexRanks[recoVertexIndex] = classReader->GetMvaValue( *inputVec ); #endif } // If we reconstructed a vertex which matches the MC one we fill the // numerator of the "Any" efficiency histogram if (mcMatchedVertexIndex != -1) { StMuPrimaryVertex *recoVertexMatchedMc = (StMuPrimaryVertex*) PrimaryVertices->UncheckedAt(mcMatchedVertexIndex); double nTracks = recoVertexMatchedMc->noTracks(); double nTracksQA = nTracks * recoVertexMatchedMc->qaTruth() / 100.; hists[0][0]->Fill(nMcTracksWithHits, nTracks); hists[0][1]->Fill(nMcTracksWithHits, nTracksQA); hists[0][2]->Fill(nMcTracksWithHits); } // Now deal with the highest rank vertex int maxRankVertexIndex = TMath::LocMax(nPrimaryVertices, vertexRanks.GetArray()); StMuPrimaryVertex *recoVertexMaxRank = (StMuPrimaryVertex*) PrimaryVertices->UncheckedAt(maxRankVertexIndex); StMuMcVertex *mcVertex = reco2McVertices[recoVertexMaxRank]; double nTracks = recoVertexMaxRank->noTracks(); double nTracksQA = nTracks * recoVertexMaxRank->qaTruth() / 100.; // Fill numerator for "good" and "bad" efficiencies int h = ( mcVertex && mcVertex->Id() == 1) ? 1 : 2; hists[h][0]->Fill(nMcTracksWithHits, nTracks); hists[h][1]->Fill(nMcTracksWithHits, nTracksQA); hists[h][2]->Fill(nMcTracksWithHits); // Proceed with filling ntuple only if requested by the user if ( !fillNtuple ) continue; // Second loop over all verticies in this event for (int recoVertexIndex = 0; recoVertexIndex < nPrimaryVertices; recoVertexIndex++) { StMuPrimaryVertex *recoVertex = (StMuPrimaryVertex *) PrimaryVertices->UncheckedAt(recoVertexIndex); if ( !AcceptVX(recoVertex) ) continue; StMuMcVertex *mcVertex = reco2McVertices[recoVertex]; if ( !mcVertex ) { std::cout << "No Match from RC to MC" << std::endl; continue; } if (vtxeval::gDebugFlag) { std::cout << Form("Vx[%3i]", recoVertexIndex) << *recoVertex << " " << *mcVertex; int nMcTracksWithHitsatL = Mc2McHitTracks.count(recoVertex->idTruth()); std::cout << Form("Number of McTkHit %4i rank %8.3f", nMcTracksWithHitsatL, vertexRanks[recoVertexIndex]); } int IdPar = mcVertex->IdParTrk(); if (IdPar > 0 && IdPar <= nMuMcTracks) { StMuMcTrack *mcTrack = (StMuMcTrack *) MuMcTracks->UncheckedAt(IdPar - 1); if (mcTrack && vtxeval::gDebugFlag) std::cout << " " << mcTrack->GeName(); } FillData(data, recoVertex); double nTracks = recoVertex->noTracks(); if (mcVertex->Id() == 1 && nTracks == vertexMaxMultiplicity) {// good VertexG->Fill(&data.beam); } else { // bad VertexB->Fill(&data.beam); } } if ( !gROOT->IsBatch() ) { if (vtxeval::ask_user()) return; } else { vtxeval::gDebugFlag = false; } } fOut->Write(); }
void MakeLHCDataEntry(char* storageUri="local://$ALICE_ROOT/../AliRoot/OCDB", Int_t firstRun=0, Int_t lastRun=999999999) { AliCDBManager *cdb = AliCDBManager::Instance(); cdb->SetDefaultStorage(storageUri); // Get time start from the simulated LHCData file Double_t timeStart = 0.0; Double_t timeEnd = 1.0e+10; TString fileName(gSystem->ExpandPathName("$ALICE_ROOT/../AliRoot/GRP/ShuttleInput/testShuttle_GRP_run_number_testShuttle_data.txt")); Printf("Getting the file %s", fileName.Data()); const Int_t fgknLHCDP = 9; // number of dcs dps from LHC data const char* fgkLHCDataPoints[fgknLHCDP] = { "LHC_Beam_Energy", "LHC_MachineMode", "LHC_BeamMode", "LHC_Beams_Particle_Type", "BPTX_Phase_Shift_B1", "BPTX_Phase_Shift_B2", "LHC_Particle_Type_B1", "LHC_Particle_Type_B2", "LHC_Data_Quality_Flag" }; AliGRPObject *grpobj = new AliGRPObject(); // grpobj->SetBeamEnergyIsSqrtSHalfGeV(); // new format // //Getting the LHC Data from DCS FXS // AliLHCReader lhcReader; // Processing data to be put in AliGRPObject // Energy Printf("*************Energy "); TObjArray* energyArray = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[0]); if (energyArray){ Float_t energy = ProcessEnergy(energyArray,timeStart); if (energy != -1.) { grpobj->SetBeamEnergy(energy); grpobj->SetBeamEnergyIsSqrtSHalfGeV(kTRUE); } delete energyArray; } else { AliError("Energy not found in LHC Data file!!!"); } Double_t timeBeamModeEnd = timeEnd; // max validity for Beam Mode Double_t timeMachineModeEnd = timeEnd; // max validity for Machine Mode Double_t timeBeamEnd = timeEnd; // max validity for Beam Type Double_t timeBeamTypeEnd[2] = {timeEnd, timeEnd}; // max validity for Beam Type1,2 Double_t timeBeamModeStart = -1; // min validity for Beam Mode Double_t timeMachineModeStart = -1; // min validity for Machine Mode Double_t timeBeamStart = -1; // min validity for Beam Type Double_t timeBeamTypeStart[2] = {-1,-1}; // min validity for Beam Type1,2 Int_t indexBeamMode = -1; // index of measurement used to set Beam Mode Int_t indexMachineMode = -1; // index of measurement used to set Machine Mode Int_t indexBeam = -1; // index of measurement used to set Beam Type Int_t indexBeamType[2] = {-1, -1}; // index of measurement used to set Beam Type1,2 Bool_t foundBeamModeStart = kFALSE; // flag to be set in case an entry for the Beam Mode is found before (or at) SOR Bool_t foundMachineModeStart = kFALSE; // flag to be set in case an entry for the Machine Mode is found before (or at) SOR Bool_t foundBeamStart = kFALSE; // flag to be set in case an entry for the Beam Type is found before (or at) SOR Bool_t foundBeamTypeStart[2] = {kFALSE, kFALSE}; // flag to be set in case an entry for the Beam Type1,2 is found before (or at) SOR Bool_t flagBeamMode = kFALSE; //flag set true if a changed occurred in BeamMode Bool_t flagMachineMode = kFALSE; //flag set true if a changed occurred in MachineMode Bool_t flagBeam = kFALSE; //flag set true if a changed occurred in BeamType Bool_t flagBeamType[2] = {kFALSE, kFALSE}; //flag set true if a changed occurred in BeamType1,2 Double_t arrayTimes[5]={2.E9, 2.E9, 2.E9, 2.E9, 2.E9}; // array to keep track of the times of the possible changes of the LHC DPs; each entry set to Wed May 18 2033, 03:33:20 GMT (ALICE should not be running anymore...) // arrayTimes elements order correspond to the one used in the array of the strings fgkLHCDataPoints, i.e.: // arrayTimes[0] --> MachineMode // arrayTimes[1] --> BeamMode // arrayTimes[2] --> BeamType (when written together) // arrayTimes[3] --> BeamType1 (when written separate) // arrayTimes[4] --> BeamType2 (when written separate) // BeamMode Printf("*************BeamMode (LHCState) "); TObjArray* beamModeArray = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[2]); Int_t nBeamMode = -1; if (beamModeArray){ nBeamMode = beamModeArray->GetEntries(); if (nBeamMode==0){ Printf("Found zero entries for the Beam Mode, leaving it empty"); } else{ for (Int_t iBeamMode = 0; iBeamMode<nBeamMode; iBeamMode++){ AliDCSArray* beamMode = (AliDCSArray*)beamModeArray->At(iBeamMode); if (beamMode){ if (beamMode->GetTimeStamp()<=timeStart && beamMode->GetTimeStamp()>=timeBeamModeStart){// taking always the very last entry: of two measurements have the same timestamp, the last one is taken timeBeamModeStart = beamMode->GetTimeStamp(); indexBeamMode = iBeamMode; foundBeamModeStart = kTRUE; } else { break; } } } if (!foundBeamModeStart){ Printf("No value for the Beam Mode found before start of run, the Beam Mode will remain empty"); } else { AliDCSArray* beamMode = (AliDCSArray*)beamModeArray->At(indexBeamMode); TObjString* beamModeString = beamMode->GetStringArray(0); Printf(Form("LHC State (corresponding to BeamMode) = %s (set at %f)",(beamModeString->String()).Data(),beamMode->GetTimeStamp())); grpobj->SetLHCState(beamModeString->String()); if (indexBeamMode < nBeamMode-1){ AliDCSArray* beamMode1 = (AliDCSArray*)beamModeArray->At(indexBeamMode+1); if (beamMode1){ if (beamMode1->GetTimeStamp()<=timeStart){ Printf("ERROR: you did not choose the correct value! there is still something before (or at) SOR, but later than this!"); } else if (beamMode1->GetTimeStamp()>timeStart && beamMode1->GetTimeStamp()<=timeEnd){ timeBeamModeEnd = beamMode1->GetTimeStamp(); TObjString* beamModeString1 = beamMode1->GetStringArray(0); TString bmString0 = beamModeString->String(); TString bmString1 = beamModeString1->String(); if (bmString0.CompareTo(bmString1.Data(),TString::kIgnoreCase) == -1){ Printf("WARNING: The beam mode changed from %s to %s during the run at timestamp %f! Setting it to %s and keeping track of the time of the change to set MaxTimeLHCValidity afterward",bmString0.Data(), bmString1.Data(), timeBeamModeEnd, bmString0.Data()); flagBeamMode = kTRUE; arrayTimes[1]=timeBeamModeEnd; } } } else { Printf("Invalid pointer for the first entry for Beam Mode after the first valid one, not considering anything after what has already been found"); } } } } delete beamModeArray; } else{ Printf("ERROR: Beam mode array not found in LHC Data file!!!"); } // MachineMode Printf("*************MachineMode "); TObjArray* machineModeArray = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[1]); Int_t nMachineMode = -1; if (machineModeArray){ nMachineMode = machineModeArray->GetEntries(); if (nMachineMode==0){ Printf("No Machine Mode found, leaving it empty"); } else{ for (Int_t iMachineMode = 0; iMachineMode<nMachineMode; iMachineMode++){ AliDCSArray* machineMode = (AliDCSArray*)machineModeArray->At(iMachineMode); if (machineMode){ if (machineMode->GetTimeStamp()<=timeStart && machineMode->GetTimeStamp()>=timeMachineModeStart){// taking always the very last entry: of two measurements have the same timestamp, the last one is taken timeMachineModeStart = machineMode->GetTimeStamp(); indexMachineMode = iMachineMode; foundMachineModeStart = kTRUE; } else{ break; } } } if (!foundMachineModeStart){ Printf("No value for the Machine Mode found before start of run, the Machine Mode will remain empty"); } else { AliDCSArray* machineMode = (AliDCSArray*)machineModeArray->At(indexMachineMode); TObjString* machineModeString = machineMode->GetStringArray(0); Printf(Form("MachineMode = %s (set at %f)",(machineModeString->String()).Data(),machineMode->GetTimeStamp())); grpobj->SetMachineMode(machineModeString->String()); if (indexMachineMode < nMachineMode-1){ AliDCSArray* machineMode1 = (AliDCSArray*)machineModeArray->At(indexMachineMode+1); if (machineMode1){ if (machineMode1->GetTimeStamp()>timeStart && machineMode1->GetTimeStamp()<=timeEnd){ timeMachineModeEnd = machineMode1->GetTimeStamp(); TObjString* machineModeString1 = machineMode1->GetStringArray(0); TString mmString0 = machineModeString->String(); TString mmString1 = machineModeString1->String(); if (mmString0.CompareTo(mmString1.Data(),TString::kIgnoreCase) == -1){ Printf("WARNING: The machine mode changed from %s to %s during the run at timestamp %f! Setting it to %s and keeping track of the time of the change to set MaxTimeLHCValidity afterward",mmString0.Data(),mmString1.Data(),timeMachineModeEnd,mmString0.Data()); flagMachineMode = kTRUE; arrayTimes[0]=timeMachineModeEnd; } } } else { Printf("Invalid pointer for the first entry for Machine Mode after the first valid one, not considering anything after what has already been found"); } } } } delete machineModeArray; } else{ Printf("ERROR: Machine mode array not found in LHC Data file!!!"); } // BeamType1 and BeamType2 - both put in the same string Printf("*************BeamType "); TObjArray* beamArray = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[3]); if (beamArray){ Int_t nBeam = beamArray->GetEntries(); if (nBeam==0){ Printf("No Beam Type found, leaving it empty"); } else{ for (Int_t iBeam = 0; iBeam<nBeam; iBeam++){ AliDCSArray* beam = (AliDCSArray*)beamArray->At(iBeam); if (beam){ if (beam->GetTimeStamp()<=timeStart && beam->GetTimeStamp()>=timeBeamStart){// taking always the very last entry: of two measurements have the same timestamp, the last one is taken timeBeamStart = beam->GetTimeStamp(); indexBeam = iBeam; foundBeamStart = kTRUE; } else{ break; } } } if (!foundBeamStart){ Printf("No value for the Beam Type found before start of run, the (common) Beam Type will remain empty"); } else { AliDCSArray* beam = (AliDCSArray*)beamArray->At(indexBeam); TObjString* beamString = beam->GetStringArray(0); TString beamType = beamString->String(); Printf(Form("Beam Type = %s",beamType.Data())); if (beamType.CompareTo("PROTON",TString::kIgnoreCase) == 0){ Printf("Setting beam type to p-p"); grpobj->SetBeamType("p-p"); } else { // if there is no PROTON beam, we suppose it is Pb, and we put A-A Printf("Setting beam type to A-A"); grpobj->SetBeamType("A-A"); } /* else if (beamType.CompareTo("LEAD82",TString::kIgnoreCase) == 0){ Printf("Setting beam type to Pb-Pb"); grpobj->SetBeamType("Pb-Pb"); } else{ Printf("ERROR: Beam Type not known, leaving it empty"); } */ if (indexBeam < nBeam-1){ AliDCSArray* beam1 = (AliDCSArray*)beamArray->At(indexBeam+1); if (beam1){ if (beam1->GetTimeStamp()>timeStart && beam1->GetTimeStamp()<=timeEnd){ timeBeamEnd = beam1->GetTimeStamp(); TObjString* beamString1 = beam1->GetStringArray(0); TString beamType1 = beamString1->String(); if (beamType.CompareTo(beamType1.Data(),TString::kIgnoreCase) == -1){ Printf("WARNING: The Beam Type changed from %s to %s during the run at timestamp %f! Setting it to %s and keeping track of the time of the change to set MaxTimeLHCValidity afterward",beamType.Data(),(beamString1->String()).Data(),timeBeamEnd,beamType.Data()); flagBeam = kTRUE; arrayTimes[2] = timeBeamEnd; } } } else { Printf("Invalid pointer for the first entry for Beam Type after the first valid one, not considering anything after what has already been found"); } } } } delete beamArray; } else{ Printf("ERROR: Beam Type array not found in LHC Data file!!!"); } // BeamType1 and BeamType2 - in separete string Printf("*************BeamType, 1 and 2 "); Int_t indexBeamTypeString = 6; // index of the string with the alias of BeanType1 in the array fgkLHCDataPoints TString combinedBeamType = "-"; // combined beam type, built from beam type 1 and beam type 2 TString combinedBeamTypeFromLHC = "-"; // combined beam type, built from beam type 1 and beam type 2 AS SENT FROM LHC for (Int_t ibeamType = 0; ibeamType<2; ibeamType++){ beamArray = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[indexBeamTypeString+ibeamType]); if (beamArray){ Int_t nBeam = beamArray->GetEntries(); if (nBeam==0){ Printf(Form("No Beam Type %s found, leaving it empty",fgkLHCDataPoints[indexBeamTypeString+ibeamType])); } else{ for (Int_t iBeam = 0; iBeam<nBeam; iBeam++){ AliDCSArray* beam = (AliDCSArray*)beamArray->At(iBeam); if (beam){ if (beam->GetTimeStamp()<=timeStart && beam->GetTimeStamp()>=timeBeamTypeStart[ibeamType]){// taking always the very last entry: of two measurements have the same timestamp, the last one is taken timeBeamTypeStart[ibeamType] = beam->GetTimeStamp(); indexBeamType[ibeamType] = iBeam; foundBeamTypeStart[ibeamType] = kTRUE; } else{ break; } } } if (!foundBeamTypeStart[ibeamType]){ Printf(Form("No value for the Beam Type %s found before start of run, the Beam Type %d will remain empty", fgkLHCDataPoints[indexBeamTypeString+ibeamType], ibeamType)); } else { AliDCSArray* beam = (AliDCSArray*)beamArray->At(indexBeam); TObjString* beamString = beam->GetStringArray(0); TString beamType = beamString->String(); Printf(Form("Beam Type (for %s) = %s", fgkLHCDataPoints[indexBeamTypeString+ibeamType], beamType.Data())); TString singleBeam = ParseBeamTypeString(beamType,ibeamType); Printf(Form("Single Beam Type for beam %d set to %s", ibeamType, singleBeam.Data())); grpobj->SetSingleBeamType(ibeamType, singleBeam); if (beamType.CompareTo("PROTON",TString::kIgnoreCase) == 0){ Printf(Form("Setting beam %d for combined beam type to p", ibeamType)); if (ibeamType == 0) combinedBeamType.Prepend("p"); else combinedBeamType.Append("p"); } else { // if there is no PROTON beam, we suppose it is Pb, and we put A-A Printf(Form("Setting beam %d for combined beam type to A",ibeamType)); if (ibeamType == 0) combinedBeamType.Prepend("A"); else combinedBeamType.Append("A"); } if (ibeamType == 0) combinedBeamTypeFromLHC.Prepend(beamType); else combinedBeamTypeFromLHC.Append(beamType); /* else if (beamType.CompareTo("LEAD82",TString::kIgnoreCase) == 0){ Printf("Setting beam type to Pb-Pb"); grpobj->SetSingleBeamType(ibeamType, "Pb-Pb"); } else{ Printf("ERROR: Beam Type not known, leaving it empty"); } */ if (indexBeamType[ibeamType] < nBeam-1){ AliDCSArray* beam1 = (AliDCSArray*)beamArray->At(indexBeam+1); if (beam1){ if (beam1->GetTimeStamp()>timeStart && beam1->GetTimeStamp()<=timeEnd){ timeBeamTypeEnd[ibeamType] = beam1->GetTimeStamp(); TObjString* beamString1 = beam1->GetStringArray(0); TString beamType1 = beamString1->String(); if (beamType.CompareTo(beamType1.Data(),TString::kIgnoreCase) == -1){ Printf("WARNING: The Beam Type for %s changed from %s to %s during the run at timestamp %f! Setting it to %s and keeping track of the time of the change to set MaxTimeLHCValidity afterward",fgkLHCDataPoints[indexBeamTypeString+ibeamType],beamType.Data(),(beamString1->String()).Data(),timeBeamEnd,beamType.Data()); flagBeamType[ibeamType] = kTRUE; arrayTimes[3+ibeamType] = timeBeamTypeEnd[ibeamType]; } } } else { Printf(Form("Invalid pointer for the first entry for Beam Type %s after the first valid one, not considering anything after what has already been found",fgkLHCDataPoints[indexBeamTypeString+ibeamType])); } } } } delete beamArray; } else{ AliError(Form("Beam Type %s array not found in LHC Data file!!!",fgkLHCDataPoints[indexBeamTypeString+ibeamType])); } } Printf(Form("Setting combined beam type to %s",combinedBeamType.Data())); grpobj->SetBeamType(combinedBeamType); Printf(Form("Setting combined beam type form LHC to %s",combinedBeamTypeFromLHC.Data())); grpobj->SetBeamTypeFromLHC(combinedBeamTypeFromLHC); // Setting minTimeLHCValidity if (flagBeamMode == kTRUE || flagMachineMode == kTRUE || flagBeam == kTRUE || flagBeamType[0] == kTRUE || flagBeamType[1] == kTRUE){ Double_t minTimeLHCValidity= TMath::MinElement(5,arrayTimes); Printf("WARNING: Setting MaxTimeLHCValidity to %f",minTimeLHCValidity); grpobj->SetMaxTimeLHCValidity(minTimeLHCValidity); } /* // Old way to determine the Maximum Time during which the LHC info is valid if (timeBeamModeEnd!=0 || timeMachineModeEnd!=0 || timeBeamEnd !=0){ Double_t minTimeLHCValidity; if (flagBeamMode == kFALSE && flagMachineMode == kFALSE && flagBeam == kTRUE){ // flagBeam only true --> it is the only one that changed minTimeLHCValidity = timeBeamEnd; } else if (flagBeamMode == kFALSE && flagMachineMode == kTRUE && flagBeam == kFALSE){ // flagMachineMode only true minTimeLHCValidity = timeMachineModeEnd; } else if (flagBeamMode == kTRUE && flagMachineMode == kFALSE && flagBeam == kFALSE){ // flagBeamMode only true minTimeLHCValidity = timeBeamModeEnd; } else if (flagBeamMode == kFALSE && flagMachineMode == kTRUE && flagBeam == kTRUE){ // flagBeam and flagMachineMode only true minTimeLHCValidity= TMath::Min(timeBeamEnd,timeMachineModeEnd); } else if (flagBeamMode == kTRUE && flagMachineMode == kFALSE && flagBeam == kTRUE){ // flagBeam and flagBeamMode only true minTimeLHCValidity= TMath::Min(timeBeamEnd,timeBeamModeEnd); } else if (flagBeamMode == kTRUE && flagMachineMode == kTRUE && flagBeam == kFALSE){ // flagMachineMode and flagBeamMode only true minTimeLHCValidity= TMath::Min(timeMachineModeEnd,timeBeamModeEnd); } else { Double_t arrayTimes[3] = {timeBeamModeEnd,timeMachineModeEnd,timeBeamEnd};// flagMachineMode and flagBeamMode and flagBeam minTimeLHCValidity= TMath::MinElement(3,arrayTimes); } Printf("WARNING: Setting MaxTimeLHCValidity to %f",minTimeLHCValidity)); grpobj->SetMaxTimeLHCValidity(minTimeLHCValidity); } */ // Data Quality Flag --> storing start and end values of periods within the run during which the value was found to be FALSE Printf("*************Data Quality Flag "); TObjArray* dataQualityArray = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[8]); Int_t nDataQuality = -1; Double_t timeDataQualityStart = -1; // min validity for Data Quality Flag Int_t indexDataQuality = -1; // index of first measurement used to set Data Quality Flag Bool_t foundDataQualityStart = kFALSE; // flag to be set in case an entry for the Data Quality Flag is found before (or at) SOR if (dataQualityArray){ nDataQuality = dataQualityArray->GetEntries(); if (nDataQuality==0){ Printf("No Data Quality Flag found, leaving it empty"); } else{ for (Int_t iDataQuality = 0; iDataQuality<nDataQuality; iDataQuality++){ AliDCSArray* dataQuality = (AliDCSArray*)dataQualityArray->At(iDataQuality); if (dataQuality){ if (dataQuality->GetTimeStamp()<=timeStart && dataQuality->GetTimeStamp()>=timeDataQualityStart){// taking always the very last entry: if two measurements have the same timestamp, the last one is taken timeDataQualityStart = dataQuality->GetTimeStamp(); indexDataQuality = iDataQuality; foundDataQualityStart = kTRUE; } else{ // we suppose here that if the first measurement is not before SOR, then none will be (they MUST be in chronological order!!!) break; } } } if (!foundDataQualityStart){ // The Data Quality Flag should be found and TRUE at the start of the run. For the time being, if it is not found, don't do anything, but it means there is a problem.. Printf("No value for the Data Quality Flag found before start of run, the Data Quality Flag will remain empty"); } else { // counting how many FALSE values there are Bool_t foundEndOfFalse = kFALSE; Int_t nFalse = 0; for (Int_t iDataQuality = indexDataQuality; iDataQuality < nDataQuality; iDataQuality ++){ AliDCSArray* dataQuality = (AliDCSArray*)dataQualityArray->At(iDataQuality); Printf("dataQuality->GetTimeStamp() = %f, timeDataQualityStart = %f, timeEnd = %f", dataQuality->GetTimeStamp(), timeDataQualityStart, timeEnd ); if (dataQuality->GetTimeStamp()>=timeDataQualityStart && dataQuality->GetTimeStamp()<=timeEnd){ // considering only values between the first valid and the end of the run Bool_t dataQualityFlag = dataQuality->GetBool(0); Printf("DataQuality = %d (set at %f)",(Int_t)dataQualityFlag,dataQuality->GetTimeStamp()); if (dataQualityFlag != kTRUE){ if (iDataQuality == indexDataQuality) { // the first Data Quality value should be TRUE, but ignoring the problem now... Printf("ERROR: The first value for the Data Quality MUST be TRUE! Ignoring for now..."); } nFalse++; } } } Printf(Form("Found %d FALSE values for the Data Quality Flag",nFalse)); Double_t falses[nFalse*2]; // dimensioning this to the maximum possible, as if each false value was followed by a true one --> the false periods correspond to the number of falses Int_t iDataQuality = indexDataQuality; if (nFalse > 0){ Int_t iFalse = 0; // filling the info about the periods when the flag was set to FALSE // starting, like for the other DPS, from the measurement closest to SOR (the index of which is iDataQuality) while (iDataQuality < nDataQuality){ Printf("iDataQuality = %d",iDataQuality); AliDCSArray* dataQuality = (AliDCSArray*)dataQualityArray->At(iDataQuality); if (dataQuality->GetTimeStamp()>=timeDataQualityStart && dataQuality->GetTimeStamp()<=timeEnd){ // considering only values between the first valid and the end of the run Bool_t dataQualityFlag = dataQuality->GetBool(0); Printf("DataQuality = %d (set at %f)",(Int_t)dataQualityFlag,dataQuality->GetTimeStamp()); if (dataQualityFlag == kTRUE){ // found TRUE value, continuing iDataQuality++; continue; } else{ /* // the check was already done before if (iDataQuality == indexDataQuality) { // the first Data Quality value should be TRUE, but ignoring the problem now... Printf("ERROR: The first value for the Data Quality MUST be TRUE! Ignoring for now..."); } */ falses[iFalse*2] = dataQuality->GetTimeStamp(); foundEndOfFalse = kFALSE; Int_t iDataQualityNext = iDataQuality+1; while (iDataQualityNext < nDataQuality){ AliDCSArray* dataQualityNext = (AliDCSArray*)dataQualityArray->At(iDataQualityNext); if (dataQualityNext->GetTimeStamp()>timeDataQualityStart && dataQualityNext->GetTimeStamp()<=timeEnd && dataQualityNext->GetTimeStamp() > dataQuality->GetTimeStamp()){ // considering only values between the first valid and the end of the run, and subsequent to the current value Bool_t dataQualityFlagNext = dataQualityNext->GetBool(0); Printf("DataQualityNext = %d (set at %f)",(Int_t)dataQualityFlagNext,dataQualityNext->GetTimeStamp()); if (dataQualityFlagNext == kTRUE){ // found TRUE value, first FALSE period completed foundEndOfFalse = kTRUE; falses[iFalse*2+1] = dataQualityNext->GetTimeStamp(); iFalse++; break; } iDataQualityNext++; } } if (!foundEndOfFalse) { Printf("Please, note that the last FALSE value lasted until the end of the run"); falses[iFalse*2+1] = timeEnd; iFalse++; break; } iDataQuality = iDataQualityNext+1; } } } grpobj->SetNFalseDataQualityFlag(iFalse); grpobj->SetFalseDataQualityFlagPeriods(falses); } } } delete dataQualityArray; } else{ Printf("ERROR: Data Quality Flag array not found in LHC Data file!!!"); } // Processing data to go to AliLHCData object AliLHCData* dt = new AliLHCData(fileName.Data(),timeStart,timeEnd); // storing AliLHCData in OCDB if (dt){ Printf(Form("Filled %d records to AliLHCData object",dt->GetData().GetEntriesFast())); AliCDBMetaData md; md.SetResponsible("Ruben Shahoyan"); md.SetComment("LHC data from the GRP preprocessor."); Bool_t result = kTRUE; AliCDBId id("GRP/GRP/LHCData", 0, AliCDBRunRange::Infinity()); result = cdb->Put(dt, id, &md); delete dt; if (!result){ Printf("Problems in storing LHC Data - but not going into Error"); } } // processing LHC Phase TObjArray *beam1phase = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[4]); TObjArray *beam2phase = lhcReader.ReadSingleLHCDP(fileName.Data(),fgkLHCDataPoints[5]); if (beam1phase == 0x0 || beam2phase == 0x0){ Printf(Form("Problems in retrieving LHC Clock data from LHC file")); return 4; } AliLHCClockPhase *phaseObj = ProcessLHCClockPhase(beam1phase,beam2phase,timeEnd); delete beam1phase; delete beam2phase; if (phaseObj){ Printf(Form("LHC Phase found")); AliCDBMetaData mdPhase; mdPhase.SetResponsible("Cvetan Cheshkov"); mdPhase.SetComment("LHC Clock Phase"); Bool_t result = kTRUE; AliCDBId id("GRP/Calib/LHCClockPhase", 0, AliCDBRunRange::Infinity()); result = cdb->Put(phaseObj, id, &mdPhase); delete phaseObj; if (!result) return 3; } else return 4; return 0; }
/********************************************************************************** * Project : TMVA - a ROOT-integrated toolkit for multivariate data analysis * * Package : TMVA * * Root Macro: TMVAClassification * * * * This macro provides examples for the training and testing of the * * TMVA classifiers. * * * * As input data is used a toy-MC sample consisting of four Gaussian-distributed * * and linearly correlated input variables. * * * * The methods to be used can be switched on and off by means of booleans, or * * via the prompt command, for example: * * * * root -l ./TMVAClassification.C\(\"Fisher,Likelihood\"\) * * * * (note that the backslashes are mandatory) * * If no method given, a default set of classifiers is used. * * * * The output file "TMVA.root" can be analysed with the use of dedicated * * macros (simply say: root -l <macro.C>), which can be conveniently * * invoked through a GUI that will appear at the end of the run of this macro. * * Launch the GUI via the command: * * * * root -l ./TMVAGui.C * * * **********************************************************************************/ void TMVAClassification( TString myMethodList = "") { TTree *signal = (TTree *)gDirectory->Get("VertexG"); if (! signal) { std::cout << "No signal TTree" << std::endl; return;} TTree *background = (TTree *)gDirectory->Get("VertexB"); if (! background) { std::cout << "No background TTree" << std::endl; return;} //--------------------------------------------------------------- // This loads the library TMVA::Tools::Instance(); // Default MVA methods to be trained + tested std::map<std::string, int> Use; // --- Cut optimisation Use["Cuts"] = 1; Use["CutsD"] = 1; Use["CutsPCA"] = 0; Use["CutsGA"] = 0; Use["CutsSA"] = 0; // // --- 1-dimensional likelihood ("naive Bayes estimator") Use["Likelihood"] = 1; Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings) Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings) Use["LikelihoodKDE"] = 0; Use["LikelihoodMIX"] = 0; // // --- Mutidimensional likelihood and Nearest-Neighbour methods Use["PDERS"] = 1; Use["PDERSD"] = 0; Use["PDERSPCA"] = 0; Use["PDEFoam"] = 1; Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting Use["KNN"] = 1; // k-nearest neighbour method // // --- Linear Discriminant Analysis Use["LD"] = 1; // Linear Discriminant identical to Fisher Use["Fisher"] = 0; Use["FisherG"] = 0; Use["BoostedFisher"] = 0; // uses generalised MVA method boosting Use["HMatrix"] = 0; // // --- Function Discriminant analysis Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm Use["FDA_SA"] = 0; Use["FDA_MC"] = 0; Use["FDA_MT"] = 0; Use["FDA_GAMT"] = 0; Use["FDA_MCMT"] = 0; // // --- Neural Networks (all are feed-forward Multilayer Perceptrons) Use["MLP"] = 0; // Recommended ANN Use["MLPBFGS"] = 0; // Recommended ANN with optional training method Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH Use["TMlpANN"] = 0; // ROOT's own ANN // // --- Support Vector Machine Use["SVM"] = 1; // // --- Boosted Decision Trees Use["BDT"] = 1; // uses Adaptive Boost Use["BDTG"] = 0; // uses Gradient Boost Use["BDTB"] = 0; // uses Bagging Use["BDTD"] = 0; // decorrelation + Adaptive Boost Use["BDTF"] = 0; // allow usage of fisher discriminant for node splitting Use["myBDTD"] = 1; // mine // // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules") Use["RuleFit"] = 1; // --------------------------------------------------------------- std::cout << std::endl; std::cout << "==> Start TMVAClassification" << std::endl; // Select methods (don't look at this code - not of interest) if (myMethodList != "") { for (std::map<std::string, int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0; std::vector<TString> mlist = TMVA::gTools().SplitString( myMethodList, ',' ); for (size_t i = 0; i < mlist.size(); i++) { std::string regMethod(mlist[i]); if (Use.find(regMethod) == Use.end()) { std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl; for (std::map<std::string, int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " "; std::cout << std::endl; return; } Use[regMethod] = 1; } } // -------------------------------------------------------------------------------------------------- // --- Here the preparation phase begins // Create a ROOT output file where TMVA will store ntuples, histograms, etc. TString outfileName( "TMVA.root" ); TFile *outputFile = TFile::Open( outfileName, "RECREATE" ); // Create the factory object. Later you can choose the methods // whose performance you'd like to investigate. The factory is // the only TMVA object you have to interact with // // The first argument is the base of the name of all the // weightfiles in the directory weight/ // // The second argument is the output file for the training results // All TMVA output can be suppressed by removing the "!" (not) in // front of the "Silent" argument in the option string TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile, "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" ); // If you wish to modify default settings // (please check "src/Config.h" to see all available global options) // (TMVA::gConfig().GetVariablePlotting()).fTimesRMS = 8.0; // (TMVA::gConfig().GetIONames()).fWeightFileDir = "myWeightDirectory"; // load the signal and background event samples from ROOT trees std::cout << " starts ... " << std::endl; // global event weights per tree (see below for setting event-wise weights) // Float_t w; double signalWeight = 1.0; double backgroundWeight = 1.0; std::cout << " signalWeight = " << signalWeight << " backWeight = " << backgroundWeight << std::endl; factory->AddSignalTree( signal, signalWeight ); factory->AddBackgroundTree( background, backgroundWeight ); TString separator(":"); TString Vnames(vnames); TObjArray *array = Vnames.Tokenize(separator); std::vector<std::string> inputVars; TIter next(array); TObjString *objs; while ((objs = (TObjString *) next())) { // std::cout << objs->GetString() << std::endl; TString name(objs->GetString()); if (name == "BEMC") continue; if (name == "noBEMC") continue; factory->AddVariable(name, 'F'); } // This would set individual event weights (the variables defined in the // expression need to exist in the original TTree) // for signal : factory->SetSignalWeightExpression("weight1*weight2"); // for background: factory->SetBackgroundWeightExpression("weight1*weight2"); // commented JB : 04/26 ?? //factory->dSetBackgroundWeightExpression("weight"); // Apply additional cuts on the signal and background samples (can be different) TCut mycuts = ""; TCut mycutb = ""; // Tell the factory how to use the training and testing events // // If no numbers of events are given, half of the events in the tree are used // for training, and the other half for testing: // factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" ); // To also specify the number of testing events, use: //factory->PrepareTrainingAndTestTree( mycuts,mycutb,"NSigTrain=9000:NBkgTrain=50000:NSigTest=9000:NBkgTest=50000:SplitMode=Random:!V" ); factory->PrepareTrainingAndTestTree( mycuts, mycutb, "nTrain_Signal=4900:nTrain_Background=49000:nTest_Signal=4900:nTest_Background=49000:SplitMode=Random:!V"); // for KFVertex // factory->PrepareTrainingAndTestTree( mycuts, mycutb,"nTrain_Signal=20000:nTrain_Background=40000:nTest_Signal=20000:nTest_Background=40000:SplitMode=Random:!V"); // for PPV // ---- Book MVA methods // // Please lookup the various method configuration options in the corresponding cxx files, eg: // src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html // it is possible to preset ranges in the option string in which the cut optimisation should be done: // "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable // Cut optimisation if (Use["Cuts"]) factory->BookMethod( TMVA::Types::kCuts, "Cuts", "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" ); if (Use["CutsD"]) factory->BookMethod( TMVA::Types::kCuts, "CutsD", "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" ); if (Use["CutsPCA"]) factory->BookMethod( TMVA::Types::kCuts, "CutsPCA", "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" ); if (Use["CutsGA"]) factory->BookMethod( TMVA::Types::kCuts, "CutsGA", "H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" ); if (Use["CutsSA"]) factory->BookMethod( TMVA::Types::kCuts, "CutsSA", "!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" ); // Likelihood ("naive Bayes estimator") if (Use["Likelihood"]) factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood", "H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" ); // Decorrelated likelihood if (Use["LikelihoodD"]) factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodD", "!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" ); // PCA-transformed likelihood if (Use["LikelihoodPCA"]) factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodPCA", "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" ); // Use a kernel density estimator to approximate the PDFs if (Use["LikelihoodKDE"]) factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodKDE", "!H:!V:!TransformOutput:PDFInterpol=KDE:KDEtype=Gauss:KDEiter=Adaptive:KDEFineFactor=0.3:KDEborder=None:NAvEvtPerBin=50" ); // Use a variable-dependent mix of splines and kernel density estimator if (Use["LikelihoodMIX"]) factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodMIX", "!H:!V:!TransformOutput:PDFInterpolSig[0]=KDE:PDFInterpolBkg[0]=KDE:PDFInterpolSig[1]=KDE:PDFInterpolBkg[1]=KDE:PDFInterpolSig[2]=Spline2:PDFInterpolBkg[2]=Spline2:PDFInterpolSig[3]=Spline2:PDFInterpolBkg[3]=Spline2:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" ); // Test the multi-dimensional probability density estimator // here are the options strings for the MinMax and RMS methods, respectively: // "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" ); // "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" ); if (Use["PDERS"]) factory->BookMethod( TMVA::Types::kPDERS, "PDERS", "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600" ); if (Use["PDERSD"]) factory->BookMethod( TMVA::Types::kPDERS, "PDERSD", "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=Decorrelate" ); if (Use["PDERSPCA"]) factory->BookMethod( TMVA::Types::kPDERS, "PDERSPCA", "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=PCA" ); // Multi-dimensional likelihood estimator using self-adapting phase-space binning if (Use["PDEFoam"]) factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam", "!H:!V:SigBgSeparate=F:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" ); if (Use["PDEFoamBoost"]) factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoamBoost", "!H:!V:Boost_Num=30:Boost_Transform=linear:SigBgSeparate=F:MaxDepth=4:UseYesNoCell=T:DTLogic=MisClassificationError:FillFoamWithOrigWeights=F:TailCut=0:nActiveCells=500:nBin=20:Nmin=400:Kernel=None:Compress=T" ); // K-Nearest Neighbour classifier (KNN) if (Use["KNN"]) factory->BookMethod( TMVA::Types::kKNN, "KNN", "H:nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" ); // H-Matrix (chi2-squared) method if (Use["HMatrix"]) factory->BookMethod( TMVA::Types::kHMatrix, "HMatrix", "!H:!V:VarTransform=None" ); // Linear discriminant (same as Fisher discriminant) if (Use["LD"]) factory->BookMethod( TMVA::Types::kLD, "LD", "H:!V:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" ); // Fisher discriminant (same as LD) if (Use["Fisher"]) factory->BookMethod( TMVA::Types::kFisher, "Fisher", "H:!V:Fisher:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" ); // Fisher with Gauss-transformed input variables if (Use["FisherG"]) factory->BookMethod( TMVA::Types::kFisher, "FisherG", "H:!V:VarTransform=Gauss" ); // Composite classifier: ensemble (tree) of boosted Fisher classifiers if (Use["BoostedFisher"]) factory->BookMethod( TMVA::Types::kFisher, "BoostedFisher", "H:!V:Boost_Num=20:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=0.2:!Boost_DetailedMonitoring" ); // Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA) if (Use["FDA_MC"]) factory->BookMethod( TMVA::Types::kFDA, "FDA_MC", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:SampleSize=100000:Sigma=0.1" ); if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options]) factory->BookMethod( TMVA::Types::kFDA, "FDA_GA", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=300:Cycles=3:Steps=20:Trim=True:SaveBestGen=1" ); if (Use["FDA_SA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options]) factory->BookMethod( TMVA::Types::kFDA, "FDA_SA", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" ); if (Use["FDA_MT"]) factory->BookMethod( TMVA::Types::kFDA, "FDA_MT", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" ); if (Use["FDA_GAMT"]) factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" ); if (Use["FDA_MCMT"]) factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" ); // TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons if (Use["MLP"]) factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" ); if (Use["MLPBFGS"]) factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" ); if (Use["MLPBNN"]) factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators // CF(Clermont-Ferrand)ANN if (Use["CFMlpANN"]) factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:... // Tmlp(Root)ANN if (Use["TMlpANN"]) factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:... // Support Vector Machine if (Use["SVM"]) factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" ); // Boosted Decision Trees if (Use["BDTG"]) // Gradient Boost factory->BookMethod( TMVA::Types::kBDT, "BDTG", "!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:NNodesMax=5" ); if (Use["BDT"]) // Adaptive Boost factory->BookMethod( TMVA::Types::kBDT, "BDT", "!H:!V:NTrees=850:nEventsMin=150:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" ); if (Use["BDTB"]) // Bagging factory->BookMethod( TMVA::Types::kBDT, "BDTB", "!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" ); if (Use["BDTD"]) // Decorrelation + Adaptive Boost factory->BookMethod( TMVA::Types::kBDT, "BDTD", "!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" ); if (Use["myBDTD"]) // Decorrelation + Adaptive Boost factory->BookMethod( TMVA::Types::kBDT, "BDTDTEST", "!H:!V:NTrees=1000:nEventsMin=400:MaxDepth=6:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" ); if (Use["BDTF"]) // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables factory->BookMethod( TMVA::Types::kBDT, "BDTMitFisher", "!H:!V:NTrees=50:nEventsMin=150:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" ); // RuleFit -- TMVA implementation of Friedman's method if (Use["RuleFit"]) factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit", "H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" ); // For an example of the category classifier usage, see: TMVAClassificationCategory // TMVA::IMethod* category = factory->BookMethod( TMVA::Types::kCategory,"Category","" ); // -------------------------------------------------------------------------------------------------- // ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events #if 0 factory->OptimizeAllMethods("SigEffAt001", "Scan"); factory->OptimizeAllMethods("ROCIntegral", "GA"); #endif // -------------------------------------------------------------------------------------------------- // ---- Now you can tell the factory to train, test, and evaluate the MVAs // Train MVAs using the set of training events factory->TrainAllMethods(); // ---- Evaluate all MVAs using the set of test events factory->TestAllMethods(); // ----- Evaluate and compare performance of all configured MVAs factory->EvaluateAllMethods(); // -------------------------------------------------------------- // Save the output outputFile->Close(); std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; std::cout << "==> TMVAClassification is done!" << std::endl; delete factory; }
void CopySubdir(const char * oldfile, const char * newfile){ TDirectory *dirtracking; bool ok_tracking=false; TDirectory *dirsimhit; bool ok_simhit=false; TDirectory *dirrechits; bool ok_rechits=false; TDirectory *dirdigis; bool ok_digis=false; TDirectory *dirTP; bool ok_TP=false; TDirectory *dirtrackingrechits; bool ok_trackingrechits=false; TFile *oldf = TFile::Open(oldfile); if (oldf->cd("DQMData/Run 1/Tracking")) { oldf->cd("DQMData/Run 1/Tracking"); dirtracking=gDirectory; ok_tracking=true; } if (oldf->cd("DQMData/Run 1/TrackerHitsV")) { oldf->cd("DQMData/Run 1/TrackerHitsV"); dirsimhit=gDirectory; ok_simhit=true; } if (oldf->cd("DQMData/Run 1/TrackerRecHitsV")) { oldf->cd("DQMData/Run 1/TrackerRecHitsV"); dirrechits=gDirectory; ok_rechits=true; } if (oldf->cd("DQMData/Run 1/TrackerDigisV")) { oldf->cd("DQMData/Run 1/TrackerDigisV"); dirdigis=gDirectory; ok_digis=true; } if (oldf->cd("DQMData/Run 1/TrackingMCTruthV")) { oldf->cd("DQMData/Run 1/TrackingMCTruthV"); dirTP=gDirectory; ok_TP=true; } if (oldf->cd("DQMData/Run 1/RecoTrackV")) { oldf->cd("DQMData/Run 1/RecoTrackV"); dirtrackingrechits=gDirectory; ok_trackingrechits=true; } TFile *newf =new TFile(newfile,"RECREATE"); TDirectory *dirnew=newf->mkdir("DQMData"); dirnew=dirnew->mkdir("Run 1"); dirnew->cd(); if (ok_tracking) CopyDir(dirtracking); if (ok_simhit) CopyDir(dirsimhit); if (ok_rechits) CopyDir(dirrechits); if (ok_digis) CopyDir(dirdigis); if (ok_TP) CopyDir(dirTP); if (ok_trackingrechits) CopyDir(dirtrackingrechits); TList* new_list = oldf->GetListOfKeys() ; newf->cd(); TIter newkey_iter( new_list) ; TKey* new_key ; TObject* new_obj ; while ( new_key = (TKey*) newkey_iter() ) { new_obj = new_key->ReadObj() ; if (strcmp(new_obj->IsA()->GetName(),"TObjString")==0) { TObjString * cversion = (TObjString*) new_obj; if(cversion->GetString().Contains("CMSSW")){ cversion->Write(); break; } } } }
void Draweff() { int sth=1, Gth=0; TFile *f = TFile::Open(outG); if(sth==0) { TString dirname = "std"; } else if(sth==1) { TString dirname ="Gri055"; } else { TString dirname ="Gri101"; } gStyle->SetErrorX(0); TString name; TObjString* dataname = (TObjString*)f->Get(Form("dataname")); TObjString* histoname = (TObjString*)f->Get(Form("histoname")); if(Gth==0) name = "G0"; else if(Gth<nGlau) name = Form("Glau_%d",Gth); else name = Form("bin_%d",Gth-nGlau+1); TObjString* Glaubername = (TObjString*)f->Get(Form("%s/%s/Glaubername",dirname.Data(),name.Data())); TVectorD* xmin = (TVectorD*)f->Get(Form("%s/%s/xmin",dirname.Data(),name.Data())); TVectorD* xmax = (TVectorD*)f->Get(Form("%s/%s/xmax",dirname.Data(),name.Data())); TVectorD* mubest = (TVectorD*)f->Get(Form("%s/%s/mubest",dirname.Data(),name.Data())); TVectorD* kbest = (TVectorD*)f->Get(Form("%s/%s/kbest",dirname.Data(),name.Data())); TVectorD* Ndf = (TVectorD*)f->Get(Form("%s/%s/Ndf",dirname.Data(),name.Data())); TVectorD* chis = (TVectorD*)f->Get(Form("%s/%s/chis",dirname.Data(),name.Data())); TVectorD *kpoint = (TVectorD*)f->Get(Form("%s/%s/kpoint",dirname.Data(),name.Data())); TFile *fdata = TFile::Open(dataname->GetString()); TH1D *histo_obs = (TH1D*)fdata->Get(histoname->GetString()); histo_obs->Sumw2(); TFile *fGlauber = TFile::Open(Glaubername->GetString()); int binnum = histo_obs->GetNbinsX(); double Minx = histo_obs->GetXaxis()->GetXmin(); double Maxx = histo_obs->GetXaxis()->GetXmax(); double binsize = (Double_t)(Maxx-Minx)/binnum; int xbinmin=(int)(((*xmin)[0]-Minx)/binsize); int xbinmax=(int)(((*xmax)[0]-Minx)/binsize); TH1D *histo_exp = new TH1D("histo_exp","Simulated distribution;Multiplicity;Event Fraction",binnum,Minx,Maxx); histo_exp->Sumw2(); Int_t ibin; TH1D *histo_obs_norm = (TH1D*)histo_obs->Clone(); histo_obs_norm->Scale(1/histo_obs->Integral(xbinmin,xbinmax)); TF1 *NBD_fun = new TF1("NBD_fun","[0]*TMath::Gamma(x+[1])/(TMath::Gamma(x+1)*TMath::Gamma([1]))*TMath::Power([2]/[1],x)/TMath::Power([2]/[1]+1,x+[1])",0,100); NBD_fun->SetParameter(0,1); //[0]: Normalized constant NBD_fun->SetParameter(1,(*kbest)[0]); //[1]: k value NBD_fun->SetParameter(2,(*mubest)[0]); //[2]: mu value TTree *t = (TTree*) fGlauber->Get("nt_p_Pb"); Long_t Nevent; Nevent = (Long_t) t->GetEntries(); Long_t Ev; Int_t Bino; Double_t Para, Bi_Para, Mult; Float_t Ncoll; t->SetBranchAddress("Ncoll",&Ncoll); for(Ev=0; Ev<Nevent; Ev++) { if(Ev%100000==0) cout<<"Have run "<<Ev<<" events"<<endl; t->GetEntry(Ev); Para = 0; //make sure that Para doesn't accumulate through loops for(Bino=0; Bino<Ncoll; Bino++) { Bi_Para = NBD_fun->GetRandom(); Para += Bi_Para; } histo_exp->Fill(Para); } Double_t SumEvent, scale; SumEvent = histo_exp->Integral(xbinmin,xbinmax); scale = 1/SumEvent; TH1D *histo_exp_norm = (TH1D*) histo_exp->Clone(); histo_exp_norm->Scale(scale); TCanvas *c1 = new TCanvas(); gStyle->SetOptStat(kFALSE); double hfbin[]= {0,1,2,3,4,6,8,10,13,16,20,25,30,40,55,70,90}; int nhfbin = 16; rehisto_obs_norm = (TH1D*)histo_obs_norm->Rebin(nhfbin,"rehisto_obs_norm",hfbin); normalizeByBinWidth(rehisto_obs_norm); rehisto_exp_norm = (TH1D*)histo_exp_norm->Rebin(nhfbin,"rehisto_exp_norm",hfbin); normalizeByBinWidth(rehisto_exp_norm); TH1D* ratio = (TH1D*)rehisto_obs_norm->Clone("ratio"); ratio->Divide(rehisto_exp_norm); ratio->SetMaximum(1.2); ratio->SetMinimum(0); ratio->GetXaxis()->SetTitle("HF #Sigma E_{T} |#eta|>4"); ratio->GetYaxis()->SetTitle("ratio"); TFile *fDSeff = TFile::Open("/afs/cern.ch/work/q/qixu/private/RpA/GlobalEvent/CentrDep/pPbHijing_EffCorr_forNBD.root"); TFile *ftreff = TFile::Open("/afs/cern.ch/user/q/qixu/CMSSW_6_2_5/src/Centrality/Correction/pPbHist_Hijing_TrandEs.root"); TH1D* hbef = (TH1D*)ftreff->Get("hHFEnergy4"); TH1D* rehbef = (TH1D*)hbef->Rebin(nhfbin,"rehHFEnergy4",hfbin); TH1D* haft = (TH1D*)ftreff->Get("hHFEnergy4_tr"); TH1D* rehaft = (TH1D*)haft->Rebin(nhfbin,"rehHFEnergy4_tr",hfbin); TGraphAsymmErrors *gtreff = new TGraphAsymmErrors(); gtreff->BayesDivide(rehaft,rehbef); TGraphAsymmErrors *geff = (TGraphAsymmErrors*)fDSeff->Get("regEffHF4"); for(int i=0; i<geff->GetN(); i++) { geff->SetPointEXlow(i,0); geff->SetPointEXhigh(i,0); gtreff->SetPointEXlow(i,0); gtreff->SetPointEXhigh(i,0); } ratio->SetTitle(""); ratio->SetLineColor(1); ratio->SetMarkerStyle(24); ratio->SetMarkerColor(1); ratio->SetMarkerSize(1.5); ratio->Draw("P"); geff->SetMarkerStyle(33); geff->SetMarkerColor(2); geff->SetMarkerSize(1.5); geff->Draw("Psame"); gtreff->SetMarkerStyle(21); gtreff->SetMarkerColor(4); gtreff->SetMarkerSize(1.3); gtreff->Draw("Psame"); TLegend *leg = new TLegend(0.60, 0.2, 0.78, 0.4); leg->SetFillColor(10); leg->SetFillStyle(0); leg->SetBorderSize(0.035); leg->SetTextFont(42); leg->SetTextSize(0.045); leg->AddEntry(ratio,"data/fit","p"); leg->AddEntry(geff,"DS efficiency","p"); leg->AddEntry(gtreff,"Event selection efficiency","p"); leg->Draw("same"); TLine *l = new TLine(0,1,90,1); l->SetLineStyle(2); l->Draw("same"); c1->SaveAs(Form("%sratiovseff.png",dirname.Data())); }
void NBD::fit(){ TFile *fdata = TFile::Open(dataname.GetName()); TH1D *histo_obs = (TH1D*)fdata->Get(histoname.GetName()); TFile *fGlauber = TFile::Open(Glaubername.GetName()); TH1D *histo_exp = (TH1D*)histo_obs->Clone(); TF1 *NBD_fun = new TF1("NBD_fun","[0]*TMath::Gamma(x+[1])/(TMath::Gamma(x+1)*TMath::Gamma([1]))*TMath::Power([2]/[1],x)/TMath::Power([2]/[1]+1,x+[1])",0,100); UInt_t iniseed = gRandom->GetSeed(); // reproduce the "random" numbers std::vector<double> muvector, kvector, chisvector, ndfvector; double mu,k; for(mu=mumin;mu<=mumax;mu+=mustep){ for(k=kmin;k<=kmax;k+=kstep){ // if(npar%50==0) cout<<"Have run "<<npar<<" parameter sets"<<endl; NBD_fun->SetParameter(0,1); //[0]: Normalized constant NBD_fun->SetParameter(1,k); //[1]: k value NBD_fun->SetParameter(2,mu); //[2]: mu value TTree *t = (TTree*)fGlauber->Get("nt_Pb_Pb"); Float_t Ncoll, Npart, B; Long_t Nevent; t->SetBranchAddress("Ncoll",&Ncoll); t->SetBranchAddress("Npart",&Npart); t->SetBranchAddress("B",&B); Nevent = (Long_t) t->GetEntries(); Long_t Ev; Int_t Bino; Double_t Para, Bi_Para; gRandom->SetSeed(iniseed); histo_exp->Reset("M"); for (Ev=0; Ev<Nevent; Ev++){ //if(Ev%100000==0) cout<<"\t"<<"Have run "<<Ev<<" events"<<endl; t->GetEntry(Ev); Para = 0; //make sure that Para doesn't accumulate through loops for(Bino=0; Bino<Ncoll; Bino++){ // Bi_Para = unr.SampleDiscr(); Bi_Para = NBD_fun->GetRandom(); Para += Bi_Para; } histo_exp->Fill(Para); } double ndf; double chi_square = chisquare(histo_obs,histo_exp,xmin[0],xmax[0],ndf); if(chi_square>=0){ chisvector.push_back(chi_square); muvector.push_back(mu); kvector.push_back(k); ndfvector.push_back(ndf); } cout<<mu<<"\t"<<k<<"\t"<<chi_square<<"\t"<<ndf<<endl;//<<"\t"<<ndf<<"\t"<<p<<endl; } } double *amu = &muvector[0]; double *ak = &kvector[0]; double *achis = &chisvector[0]; int loc = TMath::LocMin(chisvector.size(),achis); mubest[0] = muvector[loc]; kbest[0] = kvector[loc]; chis[0] = chisvector[loc]; Ndf[0] = ndfvector[loc]; cout<<"{"<<mubest[0]<<","<<kbest[0]<<"}"<<endl; cout<<chis[0]<<"\t"<<Ndf[0]<<endl; Grgrid = new TGraph2D("Grgrid","",chisvector.size(),amu,ak,achis); }
//_______________________________________________________________________________________________ // // Initialize plugin with all needed for AliEn // Bool_t SetupForAlien() { if (!plugin) { ::Error("SetupPlugin::SetupForAlien()", "Initialize plugin first"); return kFALSE; } // create names if (!AssignNames()) { ::Error("SetupPlugin::SetupForAlien()", "Failed name initializations"); return kFALSE; } // API version plugin->SetAPIVersion(apiVersion.Data()); // merging detauls plugin->SetMergeViaJDL(); plugin->SetMaxMergeFiles(maxMergeFiles); plugin->SetMaxMergeStages(maxMergeStages); // output paths plugin->SetGridWorkingDir(workDir.Data()); plugin->SetGridOutputDir(outDir.Data()); plugin->SetDefaultOutputs(kTRUE); // excutable plugin->SetExecutableCommand("aliroot -q -b"); plugin->SetExecutableArgs(">& std.log"); plugin->SetExecutable(fileSH.Data()); // automatically created files plugin->SetAnalysisMacro(fileC.Data()); plugin->SetJDLName(fileJDL.Data()); // JDL parameters plugin->SetSplitMaxInputFileNumber(split); plugin->SetMaxInitFailed(maxInitFailed); plugin->SetMasterResubmitThreshold(resubmitThr); plugin->SetTTL(TTL); plugin->SetPrice(price); plugin->SetInputFormat(inputFormat.Data()); plugin->SetSplitMode(splitMode.Data()); if (jobTag.Length() > 0) plugin->SetJobTag(jobTag.Data()); // input definition if (alienInputRuns) { plugin->SetOutputToRunNo(kTRUE); plugin->SetNtestFiles(nTestFiles); plugin->SetNrunsPerMaster(nRunsPerMaster); plugin->SetRunPrefix(runPrefix.Data()); plugin->SetGridDataDir(runPath.Data()); plugin->SetDataPattern(runPattern.Data()); plugin->AddRunList(runList.Data()); } else { TObjArray *list = xmlList.Tokenize(" "); TObjArrayIter next(list); TObjString *os = 0x0; while ( (os = (TObjString*)next()) ) { plugin->AddDataFile(os->GetString().Data()); } plugin->SetOutputToRunNo(kFALSE); } return kTRUE; }