void fitter( const string workDirName="Test", // Working directory // Select the type of datasets to fit bool fitData = true, // Fits Data if true, otherwise fits MC bool fitPbPb = true, // Fits PbPb datasets bool fitPP = true, // Fits PP datasets // Select the type of object to fit bool incJpsi = true, // Includes Jpsi model bool incPsi2S = true, // Includes Psi(2S) model bool incBkg = true, // Includes Background model // Select the fitting options bool cutCtau = false, // Apply prompt ctau cuts bool doSimulFit = false, // Do simultaneous fit bool wantPureSMC = false, // Flag to indicate if we want to fit pure signal MC int numCores = 2, // Number of cores used for fitting // Select the drawing options bool setLogScale = true, // Draw plot with log scale bool incSS = false, // Include Same Sign data bool zoomPsi = false, // Zoom Psi(2S) peak on extra pad int nBins = 54 // Number of bins used for plotting ) { // ------------------------------------------------------------------------------- // STEP 0: INITIALIZE THE FITTER WORK ENVIROMENT // The work enviroment is divided as follows: /* main |-> Macros: Contain all the macros |-> Input |-> <WorkDir> : Contain Input File, Bin and Parameter List for a given work directory (e.g. 20160201) |-> Output |-> <WorkDir> : Contain Output Plots and Results for a given work directory (e.g. 20160201) |-> DataSet : Contain all the datasets (MC and Data) */ if (!checkSettings(fitData, fitPbPb, fitPP, incJpsi, incPsi2S, incBkg, cutCtau, doSimulFit, wantPureSMC, setLogScale, zoomPsi, incSS, numCores, nBins)) { return; } map<string,string> DIR; if(!iniWorkEnv(DIR, workDirName)){ return; } // ------------------------------------------------------------------------------- // STEP 1: CREATE/LOAD THE ROODATASETS /* Input : List of TTrees with format: TAG <tab> FILE_NAME Output: Collection of RooDataSets splitted by tag name, including OS and SS dimuons. */ const string InputTrees = DIR["input"] + "InputTrees.txt"; map<string, vector<string> > InputFileCollection; if(!getInputFileNames(InputTrees, InputFileCollection)){ return; } TObjArray* aDSTAG = new TObjArray(); // Array to store the different tags in the list of trees aDSTAG->SetOwner(true); map<string, RooWorkspace> Workspace; for(map<string, vector<string> >::iterator FileCollection=InputFileCollection.begin(); FileCollection!=InputFileCollection.end(); ++FileCollection) { // Get the file tag which has the following format: DSTAG_COLL , i.e. DATA_PP string FILETAG = FileCollection->first; string DSTAG = FILETAG; if (FILETAG.size()) { DSTAG.erase(DSTAG.find("_")); } else { cout << "[ERROR] FILETAG is empty!" << endl; } // Extract the filenames vector<string> InputFileNames = FileCollection->second; string OutputFileName; // If we have data, check if the user wants to fit data if ( (FILETAG.find("DATA")!=std::string::npos) && fitData==true ) { if ( (FILETAG.find("PP")!=std::string::npos) && !fitPP ) continue; // If we find PP, check if the user wants PP if ( (FILETAG.find("PbPb")!=std::string::npos) && !fitPbPb ) continue; // If we find PbPb, check if the user wants PbPb OutputFileName = DIR["dataset"] + "DATASET_" + FILETAG + ".root"; if(!tree2DataSet(Workspace[DSTAG], InputFileNames, FILETAG, OutputFileName)){ return; } if (!aDSTAG->FindObject(DSTAG.c_str())) aDSTAG->Add(new TObjString(DSTAG.c_str())); } // If we find MC, check if the user wants to fit MC if ( (FILETAG.find("MC")!=std::string::npos) && fitData==false ) { if ( (FILETAG.find("PP")!=std::string::npos) && !fitPP ) continue; // If we find PP, check if the user wants PP if ( (FILETAG.find("PbPb")!=std::string::npos) && !fitPbPb ) continue; // If we find PbPb, check if the user wants PbPb if ( (FILETAG.find("JPSI")!=std::string::npos) && !incJpsi ) continue; // If we find Jpsi MC, check if the user wants to include Jpsi if ( (FILETAG.find("PSI2S")!=std::string::npos) && !incPsi2S ) continue; // If we find Psi2S MC, check if the user wants to include Psi2S OutputFileName = DIR["dataset"] + "DATASET_" + FILETAG + ".root"; if(!tree2DataSet(Workspace[DSTAG], InputFileNames, FILETAG, OutputFileName)){ return; } if (!aDSTAG->FindObject(DSTAG.c_str())) aDSTAG->Add(new TObjString(DSTAG.c_str())); if (wantPureSMC) { OutputFileName = DIR["dataset"] + "DATASET_" + FILETAG + "_PureS" + ".root"; if(!tree2DataSet(Workspace[Form("%s_PureS",DSTAG.c_str())], InputFileNames, FILETAG, OutputFileName)){ return; } } } } if (Workspace.size()==0) { cout << "[ERROR] No onia tree files were found matching the user's input settings!" << endl; return; } // ------------------------------------------------------------------------------- // STEP 2: LOAD THE INITIAL PARAMETERS /* Input : List of initial parameters with format PT <tab> RAP <tab> CEN <tab> iniPar ... Output: two vectors with one entry per kinematic bin filled with the cuts and initial parameters */ string InputFile; vector< struct KinCuts > cutVector; vector< map<string, string> > parIniVector; if (fitPbPb && incBkg) { // Add initial parameters for PbPb background models InputFile = (DIR["input"] + "InitialParam_MASS_BKG_PbPb.csv"); if (!addParameters(InputFile, cutVector, parIniVector, true)) { return; } } if (fitPbPb && incJpsi) { // Add initial parameters for PbPb jpsi models InputFile = (DIR["input"] + "InitialParam_MASS_JPSI_PbPb.csv"); if (!addParameters(InputFile, cutVector, parIniVector, true)) { return; } } if (fitPbPb && incPsi2S) { // Add initial parameters for PbPb psi(2S) models InputFile = (DIR["input"] + "InitialParam_MASS_PSI2S_PbPb.csv"); if (!addParameters(InputFile, cutVector, parIniVector, true)) { return; } } if (fitPP && incBkg) { // Add initial parameters for PP background models InputFile = (DIR["input"] + "InitialParam_MASS_BKG_PP.csv"); if (!addParameters(InputFile, cutVector, parIniVector, false)) { return; } } if (fitPP && incJpsi) { // Add initial parameters for PP jpsi models InputFile = (DIR["input"] + "InitialParam_MASS_JPSI_PP.csv"); if (!addParameters(InputFile, cutVector, parIniVector, false)) { return; } } if (fitPP && incPsi2S) { // Add initial parameters for PP psi(2S) models InputFile = (DIR["input"] + "InitialParam_MASS_PSI2S_PP.csv"); if (!addParameters(InputFile, cutVector, parIniVector, false)) { return; } } // ------------------------------------------------------------------------------- // STEP 3: FIT THE DATASETS /* Input : -> The cuts and initial parameters per kinematic bin -> The workspace with the full datasets included. Output: -> Plots (png, pdf and root format) of each fit. -> The local workspace used for each fit. */ TIter nextDSTAG(aDSTAG); string outputDir = DIR["output"]; for (unsigned int i=0; i<cutVector.size(); i++) { nextDSTAG.Reset(); TObjString* soDSTAG(0x0); while ( (soDSTAG = static_cast<TObjString*>(nextDSTAG.Next())) ) { TString DSTAG = static_cast<TString>(soDSTAG->GetString()); if (Workspace.count(DSTAG.Data())>0) { // DATA/MC datasets were loaded if (doSimulFit) { // If do simultaneous fits, then just fits once if (!fitCharmonia( Workspace[DSTAG.Data()], cutVector.at(i), parIniVector.at(i), outputDir, // Select the type of datasets to fit DSTAG.Data(), false, // dummy flag when fitting simultaneously since both PP and PbPb are used // Select the type of object to fit incJpsi, // Includes Jpsi model incPsi2S, // Includes Psi(2S) model incBkg, // Includes Background model // Select the fitting options cutCtau, // Apply prompt ctau cuts true, // Do simultaneous fit wantPureSMC, // Flag to indicate if we want to fit pure signal MC numCores, // Number of cores used for fitting // Select the drawing options setLogScale, // Draw plot with log scale incSS, // Include Same Sign data zoomPsi, // Zoom Psi(2S) peak on extra pad nBins, // Number of bins used for plotting false // Compute the mean PT (NEED TO FIX) ) ) { return; } } else { // If don't want simultaneous fits, then fit PbPb or PP separately if ( DSTAG.Contains("MCJPSI") ) { incJpsi = true; incPsi2S = false; } if ( DSTAG.Contains("MCPSI2S") ) { incJpsi = false; incPsi2S = true; } if (fitPbPb) { if (!fitCharmonia( Workspace[DSTAG.Data()], cutVector.at(i), parIniVector.at(i), outputDir, // Select the type of datasets to fit DSTAG.Data(), true, // In this case we are fitting PbPb // Select the type of object to fit incJpsi, // Includes Jpsi model incPsi2S, // Includes Psi(2S) model incBkg, // Includes Background model // Select the fitting options cutCtau, // Apply prompt ctau cuts false, // Do simultaneous fit false, // Flag to indicate if we want to fit pure signal MC numCores, // Number of cores used for fitting // Select the drawing options setLogScale, // Draw plot with log scale incSS, // Include Same Sign data zoomPsi, // Zoom Psi(2S) peak on extra pad nBins, // Number of bins used for plotting false // Compute the mean PT (NEED TO FIX) ) ) { return; } if (DSTAG.Contains("MC") && wantPureSMC) { if (!fitCharmonia( Workspace[Form("%s_PureS",DSTAG.Data())], cutVector.at(i), parIniVector.at(i), outputDir, // Select the type of datasets to fit DSTAG.Data(), true, // In this case we are fitting PbPb // Select the type of object to fit incJpsi, // Includes Jpsi model incPsi2S, // Includes Psi(2S) model incBkg, // Includes Background model // Select the fitting options cutCtau, // Apply prompt ctau cuts false, // Do simultaneous fit true, // Flag to indicate if we want to fit pure signal MC numCores, // Number of cores used for fitting // Select the drawing options setLogScale, // Draw plot with log scale incSS, // Include Same Sign data zoomPsi, // Zoom Psi(2S) peak on extra pad nBins, // Number of bins used for plotting false // Compute the mean PT (NEED TO FIX) ) ) { return; } } } if (fitPP) { if (!fitCharmonia( Workspace[DSTAG.Data()], cutVector.at(i), parIniVector.at(i), outputDir, // Select the type of datasets to fit DSTAG.Data(), false, // In this case we are fitting PP // Select the type of object to fit incJpsi, // Includes Jpsi model incPsi2S, // Includes Psi(2S) model incBkg, // Includes Background model // Select the fitting options cutCtau, // Apply prompt ctau cuts false, // Do simultaneous fit false, // Flag to indicate if we want to fit pure signal MC numCores, // Number of cores used for fitting // Select the drawing options setLogScale, // Draw plot with log scale incSS, // Include Same Sign data zoomPsi, // Zoom Psi(2S) peak on extra pad nBins, // Number of bins used for plotting false // Compute the mean PT (NEED TO FIX) ) ) { return; } if (DSTAG.Contains("MC") && wantPureSMC) { if (!fitCharmonia( Workspace[Form("%s_PureS",DSTAG.Data())], cutVector.at(i), parIniVector.at(i), outputDir, // Select the type of datasets to fit DSTAG.Data(), false, // In this case we are fitting PP // Select the type of object to fit incJpsi, // Includes Jpsi model incPsi2S, // Includes Psi(2S) model incBkg, // Includes Background model // Select the fitting options cutCtau, // Apply prompt ctau cuts false, // Do simultaneous fit true, // Flag to indicate if we want to fit pure signal MC numCores, // Number of cores used for fitting // Select the drawing options setLogScale, // Draw plot with log scale incSS, // Include Same Sign data zoomPsi, // Zoom Psi(2S) peak on extra pad nBins, // Number of bins used for plotting false // Compute the mean PT (NEED TO FIX) ) ) { return; } } } } } else { cout << "[ERROR] The workspace for " << DSTAG.Data() << " was not found!" << endl; return; } } } delete aDSTAG; };
void ClusterDensityFromQA(const char* qaFile="") { if ( TString(qaFile).BeginsWith("alien")) { TGrid::Connect("alien://"); } TFile* f = TFile::Open(qaFile); std::vector<IntegrationRange> rangesLow; std::vector<IntegrationRange> rangesHigh; rangesLow.push_back(IntegrationRange{-10,10, 80, 85}); rangesLow.push_back(IntegrationRange{-10,10, 80, 85}); rangesLow.push_back(IntegrationRange{-10,10, 95,100}); rangesLow.push_back(IntegrationRange{-10,10, 95,100}); for ( int i = 0; i < 4; ++i ) { IntegrationRange ref = rangesLow[i]; TObjArray* a = static_cast<TObjArray*>(f->Get("MUON_QA/expert")); TH2* h = static_cast<TH2*>(a->FindObject(Form("hClusterHitMapInCh%d",i+1))); // derive other symetric ranges from that one std::vector<IntegrationRange> ranges; double ysize = ref.ymax - ref.ymin; double xsize = (ref.xmax - ref.xmin)/2.0; ranges.push_back(ref); ranges.push_back(IntegrationRange{ref.xmin,ref.xmax,-ref.ymax,-ref.ymin}); ranges.push_back(IntegrationRange{ref.ymin+ysize/2.0,ref.ymax,ref.xmin,ref.xmax}); TCanvas* c = new TCanvas(Form("Chamber%d",i+1),Form("Chamber%d",i+1)); h->Draw("colz"); std::cout << "CHAMBER " << i+1 << " LOW = "; for ( auto r : ranges ) { double count = h->Integral( h->GetXaxis()->FindBin(r.xmin), h->GetXaxis()->FindBin(r.xmax), h->GetYaxis()->FindBin(r.ymin), h->GetYaxis()->FindBin(r.ymax) ); std::cout << " " << count << "(" << r.Surface() << " cm^2)"; std::vector<double> x = { r.xmin,r.xmax,r.xmax,r.xmin,r.xmin }; std::vector<double> y = { r.ymax,r.ymax,r.ymin,r.ymin,r.ymax }; TPolyLine* l = new TPolyLine(5,&x[0],&y[0]); l->SetLineColor(1); l->SetLineStyle(9); l->Draw(); } std::cout << std::endl; } }
void simall(Int_t nEvents = 1, TObjArray& fDetList, Bool_t fVis=kFALSE, TString fMC="TGeant3", TString fGenerator="mygenerator", Bool_t fUserPList= kFALSE ) { TString dir = getenv("VMCWORKDIR"); TString simdir = dir + "/macros"; TString sim_geomdir = dir + "/geometry"; gSystem->Setenv("GEOMPATH",sim_geomdir.Data()); TString sim_confdir = dir + "gconfig"; gSystem->Setenv("CONFIG_DIR",sim_confdir.Data()); // Output files TString OutFile = "simout.root"; TString ParFile = "simpar.root"; // In general, the following parts need not be touched // ======================================================================== // ---- Debug option ------------------------------------------------- gDebug = 0; // ------------------------------------------------------------------------ // ----- Timer -------------------------------------------------------- TStopwatch timer; timer.Start(); // ------------------------------------------------------------------------ // ---- Load libraries ------------------------------------------------- gROOT->LoadMacro("$VMCWORKDIR/gconfig/basiclibs.C"); basiclibs(); gSystem->Load("libGenVector"); gSystem->Load("libGeoBase"); gSystem->Load("libFairDB"); gSystem->Load("libParBase"); gSystem->Load("libBase"); gSystem->Load("libMCStack"); gSystem->Load("libField"); gSystem->Load("libGen"); //---- Load specific libraries --------------------------------------- gSystem->Load("libEnsarbase"); gSystem->Load("libEnsarGen"); gSystem->Load("libEnsarData"); gSystem->Load("libEnsarMyDet"); // ----- Create simulation run ---------------------------------------- FairRunSim* run = new FairRunSim(); run->SetName(fMC.Data()); // Transport engine run->SetOutputFile(OutFile.Data()); // Output file FairRuntimeDb* rtdb = run->GetRuntimeDb(); // R3B Special Physics List in G4 case if ( (fUserPList == kTRUE ) && (fMC.CompareTo("TGeant4") == 0) ){ run->SetUserConfig("g4Config.C"); run->SetUserCuts("SetCuts.C"); } // ----- Create media ------------------------------------------------- //run->SetMaterials("media_r3b.geo"); // Materials // Magnetic field map type // Int_t fFieldMap = 0; // Global Transformations //- Two ways for a Volume Rotation are supported //-- 1) Global Rotation (Euler Angles definition) //-- This represent the composition of : first a rotation about Z axis with //-- angle phi, then a rotation with theta about the rotated X axis, and //-- finally a rotation with psi about the new Z axis. Double_t phi,theta,psi; //-- 2) Rotation in Ref. Frame of the Volume //-- Rotation is Using Local Ref. Frame axis angles Double_t thetaX,thetaY,thetaZ; //- Global Translation Lab. frame. Double_t tx,ty,tz; // ----- Create geometry -------------------------------------------- if (fDetList.FindObject("MYDET") ) { //My Detector definition EnsarDetector* mydet = new EnsarMyDet("MyDet", kTRUE); // Global position of the Module phi = 0.0; // (deg) theta = 0.0; // (deg) psi = 0.0; // (deg) // Rotation in Ref. Frame. thetaX = 0.0; // (deg) thetaY = 0.0; // (deg) thetaZ = 0.0; // (deg) // Global translation in Lab tx = 0.0; // (cm) ty = 0.0; // (cm) tz = 0.0; // (cm) mydet->SetRotAnglesXYZ(thetaX,thetaY,thetaZ); mydet->SetTranslation(tx,ty,tz); run->AddModule(mydet); } // ----- Create PrimaryGenerator -------------------------------------- // 1 - Create the Main API class for the Generator FairPrimaryGenerator* primGen = new FairPrimaryGenerator(); if (fGenerator.CompareTo("mygenerator") == 0 ) { // 2- Define the generator Double_t pdgId=211; // pion beam Double_t theta1= 0.; // polar angle distribution Double_t theta2= 7.; Double_t momentum=.8; // 10 GeV/c Int_t multiplicity = 50; // multiplicity (nb particles per event) FairBoxGenerator* boxGen = new FairBoxGenerator(pdgId,multiplicity); boxGen->SetThetaRange ( theta1, theta2); boxGen->SetPRange (momentum,momentum*2.); boxGen->SetPhiRange (0.,360.); boxGen->SetXYZ(0.0,0.0,-1.5); // add the box generator primGen->AddGenerator(boxGen); } run->SetGenerator(primGen); //-------Set visualisation flag to true------------------------------------ if (fVis==kTRUE){ run->SetStoreTraj(kTRUE); }else{ run->SetStoreTraj(kFALSE); } // ----- Initialize simulation run ------------------------------------ run->Init(); // ------ Increase nb of step Int_t nSteps = -15000; gMC->SetMaxNStep(nSteps); // ----- Runtime database --------------------------------------------- Bool_t kParameterMerged = kTRUE; FairParRootFileIo* parOut = new FairParRootFileIo(kParameterMerged); parOut->open(ParFile.Data()); rtdb->setOutput(parOut); rtdb->saveOutput(); rtdb->print(); // ----- Start run ---------------------------------------------------- if (nEvents>0) run->Run(nEvents); // ----- Finish ------------------------------------------------------- timer.Stop(); Double_t rtime = timer.RealTime(); Double_t ctime = timer.CpuTime(); cout << endl << endl; cout << "Macro finished succesfully." << endl; cout << "Output file is " << OutFile << endl; cout << "Parameter file is " << ParFile << endl; cout << "Real time " << rtime << " s, CPU time " << ctime << "s" << endl << endl; // ------------------------------------------------------------------------ cout << " Test passed" << endl; cout << " All ok " << endl; }
// // *** Configuration script for phi->KK analysis with 2010 runs *** // // A configuration script for RSN package needs to define the followings: // // (1) decay tree of each resonance to be studied, which is needed to select // true pairs and to assign the right mass to all candidate daughters // (2) cuts at all levels: single daughters, tracks, events // (3) output objects: histograms or trees // Bool_t RsnConfigPhiTPC ( AliRsnAnalysisTask *task, Bool_t isMC, Bool_t isMix, Bool_t useCentrality, AliRsnCutSet *eventCuts ) { if (!task) ::Error("RsnConfigPhiTPC", "NULL task"); // we define here a suffix to differentiate names of different setups for the same resonance // and we define also the name of the list of tracks we want to select for the analysis // (if will fail if no lists with this name were added to the RsnInputHandler) const char *suffix = "tpc"; const char *listName = "kaonTPC"; Bool_t useCharged = kTRUE; Int_t listID = -1; // find the index of the corresponding list in the RsnInputHandler AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager(); AliMultiInputEventHandler *multi = dynamic_cast<AliMultiInputEventHandler*>(mgr->GetInputEventHandler()); if (multi) { TObjArray *array = multi->InputEventHandlers(); AliRsnInputHandler *rsn = (AliRsnInputHandler*)array->FindObject("rsnInputHandler"); if (rsn) { AliRsnDaughterSelector *sel = rsn->GetSelector(); listID = sel->GetID(listName, useCharged); } } if (listID >= 0) ::Info ("RsnConfigPhiTPC.C", "Required list '%s' stays in position %d", listName, listID); else { ::Error("RsnConfigPhiTPC.C", "Required list '%s' absent in handler!", listName); return kFALSE; } // ---------------------------------------------------------------------------------------------- // -- DEFINITIONS ------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- // PAIR DEFINITIONS: // this contains the definition of particle species and charge for both daughters of a resonance, // which are used for the following purposes: // --> species is used to assign the mass to the daughter (e.g. for building invariant mass) // --> charge is used to select what tracks to use when doing the computation loops // When a user wants to compute a like-sign background, he must define also a pair definition // for each like-sign: in case of charged track decays, we need one for ++ and one for -- // Last two arguments are necessary only in some cases (but it is not bad to well initialize them): // --> PDG code of resonance, which is used for selecting true pairs, when needed // --> nominal resonance mass, which is used for computing quantities like Y or Mt AliRsnPairDef *phi_kaonP_kaonM = new AliRsnPairDef(AliRsnDaughter::kKaon, '+', AliRsnDaughter::kKaon, '-', 333, 1.019455); AliRsnPairDef *phi_kaonP_kaonP = new AliRsnPairDef(AliRsnDaughter::kKaon, '+', AliRsnDaughter::kKaon, '+', 333, 1.019455); AliRsnPairDef *phi_kaonM_kaonM = new AliRsnPairDef(AliRsnDaughter::kKaon, '-', AliRsnDaughter::kKaon, '-', 333, 1.019455); // PAIR LOOPS: // these are the objects which drive the computations and fill the output histograms // each one requires to be initialized with an AliRsnPairDef object, which provided masses, // last argument tells if the pair is for mixing or not (this can be also set afterwards, anyway) const Int_t nPairs = 5; Bool_t addPair[nPairs] = {1, 1, 1, 1, 1}; AliRsnLoopPair *phiLoop[nPairs]; phiLoop[0] = new AliRsnLoopPair(Form("%s_phi_kaonP_kaonM" , suffix), phi_kaonP_kaonM, kFALSE); phiLoop[1] = new AliRsnLoopPair(Form("%s_phi_kaonP_kaonM_true", suffix), phi_kaonP_kaonM, kFALSE); phiLoop[2] = new AliRsnLoopPair(Form("%s_phi_kaonP_kaonM_mix" , suffix), phi_kaonP_kaonM, kTRUE ); phiLoop[3] = new AliRsnLoopPair(Form("%s_phi_kaonP_kaonP" , suffix), phi_kaonP_kaonP, kFALSE); phiLoop[4] = new AliRsnLoopPair(Form("%s_phi_kaonM_kaonM" , suffix), phi_kaonM_kaonM, kFALSE); // set additional option for true pairs // 1) we select only pairs coming from the same mother, which must have the right PDG code (from pairDef) // 2) we select only pairs decaying according to the right channel (from pairDef species+charge definitions) phiLoop[1]->SetOnlyTrue(kTRUE); phiLoop[1]->SetCheckDecay(kTRUE); // don't add true pairs if not MC if (!isMC) addPair[1] = 0; addPair[0] = !isMix; addPair[1] = !isMix; addPair[2] = isMix; addPair[3] = !isMix; addPair[4] = !isMix; // ---------------------------------------------------------------------------------------------- // -- COMPUTED VALUES & OUTPUTS ----------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- // All values which should be computed are defined here and passed to the computation objects, // since they define all that is computed bye each one, and, in case one output is a histogram // they define the binning and range for that value // // NOTE: // --> multiplicity bins have variable size Double_t mult[] = { 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22., 23., 24., 25., 30., 35., 40., 50., 60., 70., 80., 90., 100., 120., 140., 160., 180., 200., 500.}; Int_t nmult = sizeof(mult) / sizeof(mult[0]); AliRsnValuePair *axisIM = new AliRsnValuePair("IM" , AliRsnValuePair::kInvMass ); AliRsnValuePair *axisRes = new AliRsnValuePair("RES", AliRsnValuePair::kInvMassRes); AliRsnValuePair *axisPt = new AliRsnValuePair("PT" , AliRsnValuePair::kPt ); AliRsnValuePair *axisY = new AliRsnValuePair("Y" , AliRsnValuePair::kY); AliRsnValueEvent*axisCentV0 = new AliRsnValueEvent("CNT" , AliRsnValueEvent::kCentralityV0); AliRsnValueEvent*axisMultESD = new AliRsnValueEvent("MESD", AliRsnValueEvent::kMultESDCuts ); AliRsnValueEvent*axisMultSPD = new AliRsnValueEvent("MSPD", AliRsnValueEvent::kMultSPD ); AliRsnValueEvent*axisMultTRK = new AliRsnValueEvent("MTRK", AliRsnValueEvent::kMult ); AliRsnValueEvent*axisMultMC = new AliRsnValueEvent("MMC" , AliRsnValueEvent::kMultMC ); axisIM ->SetBins(500, 0.9, 1.4); axisRes ->SetBins(-0.5, 0.5, 0.001); axisPt ->SetBins(50, 0.0, 5.0); axisY ->SetBins(1, -0.5, 0.5); axisCentV0 ->SetBins(20, 0.0, 100.0); axisMultESD->SetBins(nmult, mult); axisMultSPD->SetBins(nmult, mult); axisMultTRK->SetBins(nmult, mult); axisMultMC ->SetBins(nmult, mult); // create outputs: // we define one for true pairs, where we add resolution, and another without it, for all others // it seems that it is much advantageous to use sparse histograms when adding more than 2 axes AliRsnListOutput *out[2]; out[0] = new AliRsnListOutput("res" , AliRsnListOutput::kHistoSparse); out[1] = new AliRsnListOutput("nores", AliRsnListOutput::kHistoSparse); // add values to outputs: // if centrality is required, we add it only, otherwise we add all multiplicities // other axes (invmass, pt) are always added for (Int_t i = 0; i < 2; i++) { out[i]->AddValue(axisIM); out[i]->AddValue(axisPt); out[i]->AddValue(axisY); if (useCentrality) { ::Info("RsnConfigPhiTPC.C", "Adding centrality axis"); out[i]->AddValue(axisCentV0); } else { ::Info("RsnConfigPhiTPC.C", "Adding multiplicity axes"); //out[i]->AddValue(axisMultESD); //out[i]->AddValue(axisMultSPD); out[i]->AddValue(axisMultTRK); if (isMC) out[i]->AddValue(axisMultMC); } } // resolution only in the first out[0]->AddValue(axisRes); // ---------------------------------------------------------------------------------------------- // -- ADD SETTINGS TO LOOPS AND LOOPS TO TASK --------------------------------------------------- // ---------------------------------------------------------------------------------------------- for (Int_t ip = 0; ip < nPairs; ip++) { // skip pairs not to be added if (!addPair[ip]) continue; // assign list IDs phiLoop[ip]->SetListID(0, listID); phiLoop[ip]->SetListID(1, listID); // assign event cuts phiLoop[ip]->SetEventCuts(eventCuts); // assign outputs if (ip != 1) phiLoop[ip]->AddOutput(out[1]); else phiLoop[ip]->AddOutput(out[0]); // add to task task->AddLoop(phiLoop[ip]); } return kTRUE; }
//_________________________________________________________________________________________ Int_t checkPullTree(TString pathTree, TString pathNameThetaMap, TString pathNameSigmaMap, TString mapSuffix, const Int_t collType /*0: pp, 1: pPb, 2: PbPb*/, const Bool_t plotPull = kTRUE, const Double_t downScaleFactor = 1, TString pathNameSplinesFile = "", TString prSplinesName = "", TString fileNameTree = "bhess_PIDetaTree.root", TString treeName = "fTree") { const Bool_t isNonPP = collType != 0; const Double_t massProton = AliPID::ParticleMass(AliPID::kProton); Bool_t recalculateExpecteddEdx = pathNameSplinesFile != ""; TFile* f = 0x0; f = TFile::Open(Form("%s/%s", pathTree.Data(), fileNameTree.Data())); if (!f) { std::cout << "Failed to open tree file \"" << Form("%s/%s", pathTree.Data(), fileNameTree.Data()) << "\"!" << std::endl; return -1; } // Extract the data Tree TTree* tree = dynamic_cast<TTree*>(f->Get(treeName.Data())); if (!tree) { std::cout << "Failed to load data tree!" << std::endl; return -1; } // Extract the splines, if desired TSpline3* splPr = 0x0; if (recalculateExpecteddEdx) { std::cout << "Loading splines to recalculate expected dEdx!" << std::endl << std::endl; TFile* fSpl = TFile::Open(pathNameSplinesFile.Data()); if (!fSpl) { std::cout << "Failed to open spline file \"" << pathNameSplinesFile.Data() << "\"!" << std::endl; return 0x0; } TObjArray* TPCPIDResponse = (TObjArray*)fSpl->Get("TPCPIDResponse"); if (!TPCPIDResponse) { splPr = (TSpline3*)fSpl->Get(prSplinesName.Data()); // If splines are in file directly, without TPCPIDResponse object, try to load them if (!splPr) { std::cout << "Failed to load object array from spline file \"" << pathNameSplinesFile.Data() << "\"!" << std::endl; return 0x0; } } else { splPr = (TSpline3*)TPCPIDResponse->FindObject(prSplinesName.Data()); if (!splPr) { std::cout << "Failed to load splines from file \"" << pathNameSplinesFile.Data() << "\"!" << std::endl; return 0x0; } } } else std::cout << "Taking dEdxExpected from Tree..." << std::endl << std::endl; // Extract the correction maps TFile* fMap = TFile::Open(pathNameThetaMap.Data()); if (!fMap) { std::cout << "Failed to open thetaMap file \"" << pathNameThetaMap.Data() << "\"! Will not additionally correct data...." << std::endl; } TH2D* hMap = 0x0; if (fMap) { hMap = dynamic_cast<TH2D*>(fMap->Get(Form("hRefined%s", mapSuffix.Data()))); if (!hMap) { std::cout << "Failed to load theta map!" << std::endl; return -1; } } TFile* fSigmaMap = TFile::Open(pathNameSigmaMap.Data()); if (!fSigmaMap) { std::cout << "Failed to open simgaMap file \"" << pathNameSigmaMap.Data() << "\"!" << std::endl; return -1; } TH2D* hThetaMapSigmaPar1 = dynamic_cast<TH2D*>(fSigmaMap->Get("hThetaMapSigmaPar1")); if (!hThetaMapSigmaPar1) { std::cout << "Failed to load sigma map for par 1!" << std::endl; return -1; } Double_t c0 = -1; TNamed* c0Info = dynamic_cast<TNamed*>(fSigmaMap->Get("c0")); if (!c0Info) { std::cout << "Failed to extract c0 from file with sigma map!" << std::endl; return -1; } TString c0String = c0Info->GetTitle(); c0 = c0String.Atof(); printf("Loaded parameter 0 for sigma: %f\n\n", c0); if (plotPull) std::cout << "Plotting pull..." << std::endl << std::endl; else std::cout << "Plotting delta'..." << std::endl << std::endl; Long64_t nTreeEntries = tree->GetEntriesFast(); Double_t dEdx = 0.; // Measured dE/dx Double_t dEdxExpected = 0.; // Expected dE/dx according to parametrisation Double_t tanTheta = 0.; // Tangens of (local) theta at TPC inner wall Double_t pTPC = 0.; // Momentum at TPC inner wall UShort_t tpcSignalN = 0; // Number of clusters used for dEdx UChar_t pidType = 0; Int_t fMultiplicity = 0; //Double_t phiPrime = 0; // Only activate the branches of interest to save processing time tree->SetBranchStatus("*", 0); // Disable all branches tree->SetBranchStatus("pTPC", 1); tree->SetBranchStatus("dEdx", 1); tree->SetBranchStatus("dEdxExpected", 1); tree->SetBranchStatus("tanTheta", 1); tree->SetBranchStatus("tpcSignalN", 1); tree->SetBranchStatus("pidType", 1); //tree->SetBranchStatus("phiPrime", 1); if (isNonPP) tree->SetBranchStatus("fMultiplicity", 1); tree->SetBranchAddress("dEdx", &dEdx); tree->SetBranchAddress("dEdxExpected", &dEdxExpected); tree->SetBranchAddress("tanTheta", &tanTheta); tree->SetBranchAddress("tpcSignalN", &tpcSignalN); tree->SetBranchAddress("pTPC", &pTPC); tree->SetBranchAddress("pidType", &pidType); //tree->SetBranchAddress("phiPrime", &phiPrime); if (isNonPP) tree->SetBranchAddress("fMultiplicity", &fMultiplicity); // Output file TDatime daTime; TString savefileName = Form("%s%s_checkPullSigma_%04d_%02d_%02d__%02d_%02d.root", fileNameTree.ReplaceAll(".root", "").Data(), recalculateExpecteddEdx ? "_recalcdEdx" : "", daTime.GetYear(), daTime.GetMonth(), daTime.GetDay(), daTime.GetHour(), daTime.GetMinute()); TFile* fSave = TFile::Open(Form("%s/%s", pathTree.Data(), savefileName.Data()), "recreate"); if (!fSave) { std::cout << "Failed to open save file \"" << Form("%s/%s", pathTree.Data(), savefileName.Data()) << "\"!" << std::endl; return -1; } const Double_t pBoundLow = 0.1; const Double_t pBoundUp = 5; const Int_t nBins1 = TMath::Ceil(180 / downScaleFactor); const Int_t nBins2 = TMath::Ceil(100 / downScaleFactor); const Int_t nBins3 = TMath::Ceil(60 / downScaleFactor); const Int_t nPbinsForMap = nBins1 + nBins2 + nBins3; Double_t binsPforMap[nPbinsForMap + 1]; Double_t binWidth1 = (1.0 - pBoundLow) / nBins1; Double_t binWidth2 = (2.0 - 1.0 ) / nBins2; Double_t binWidth3 = (pBoundUp - 2.0) / nBins3; for (Int_t i = 0; i < nBins1; i++) { binsPforMap[i] = pBoundLow + i * binWidth1; } for (Int_t i = nBins1, j = 0; i < nBins1 + nBins2; i++, j++) { binsPforMap[i] = 1.0 + j * binWidth2; } for (Int_t i = nBins1 + nBins2, j = 0; i < nBins1 + nBins2 + nBins3; i++, j++) { binsPforMap[i] = 2.0 + j * binWidth3; } binsPforMap[nPbinsForMap] = pBoundUp; TH2D* hPull = new TH2D("hPull", "Pull vs. p_{TPC} integrated over tan(#Theta);p_{TPC} (GeV/c);Pull", nPbinsForMap, binsPforMap, plotPull ? 120 : 240, plotPull ? -6 : -0.6, plotPull ? 6 : 0.6); TH2D* hPullAdditionalCorr = (TH2D*)hPull->Clone("hPullAdditionalCorr"); hPullAdditionalCorr->SetTitle("Pull vs. p_{TPC} integrated over tan(#Theta) with additional dEdx correction w.r.t. tan(#Theta)"); /* const Int_t nThetaHistos = 3; TH2D* hPullTheta[nThetaHistos]; TH2D* hPullAdditionalCorrTheta[nThetaHistos]; Double_t tThetaLow[nThetaHistos] = { 0.0, 0.4, 0.9 }; Double_t tThetaHigh[nThetaHistos] = { 0.1, 0.5, 1.0 }; */ const Int_t nThetaHistos = 10; TH2D* hPullTheta[nThetaHistos]; TH2D* hPullAdditionalCorrTheta[nThetaHistos]; Double_t tThetaLow[nThetaHistos] = { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9 }; Double_t tThetaHigh[nThetaHistos] = { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 }; for (Int_t i = 0; i < nThetaHistos; i++) { hPullTheta[i] = new TH2D(Form("hPullTheta_%d", i), Form("Pull vs. p_{TPC} for %.2f <= |tan(#Theta)| < %.2f;p_{TPC} (GeV/c);Pull", tThetaLow[i], tThetaHigh[i]), nPbinsForMap, binsPforMap, plotPull ? 120 : 240, plotPull ? -6 : -0.6, plotPull ? 6 : 0.6); hPullAdditionalCorrTheta[i] = new TH2D(Form("hPullAdditionalCorrTheta_%d", i), Form("Pull vs. p_{TPC} for %.2f <= |tan(#Theta)| < %.2f with additional dEdx correction w.r.t. tan(#Theta);p_{TPC} (GeV/c);Pull", tThetaLow[i], tThetaHigh[i]), nPbinsForMap, binsPforMap, plotPull ? 120 : 240, plotPull ? -6 : -0.6, plotPull ? 6 : 0.6); } TF1 corrFuncMult("corrFuncMult", "[0] + [1]*TMath::Max([4], TMath::Min(x, [3])) + [2] * TMath::Power(TMath::Max([4], TMath::Min(x, [3])), 2)", 0., 0.2); TF1 corrFuncMultTanTheta("corrFuncMultTanTheta", "[0] * (x -[2]) + [1] * (x * x - [2] * [2])", -1.5, 1.5); TF1 corrFuncSigmaMult("corrFuncSigmaMul", "TMath::Max(0, [0] + [1]*TMath::Min(x, [3]) + [2] * TMath::Power(TMath::Min(x, [3]), 2))", 0., 0.2); // LHC13b.pass2 if (isNonPP) printf("Using corr Parameters for 13b.pass2\n!"); corrFuncMult.SetParameter(0, -5.906e-06); corrFuncMult.SetParameter(1, -5.064e-04); corrFuncMult.SetParameter(2, -3.521e-02); corrFuncMult.SetParameter(3, 2.469e-02); corrFuncMult.SetParameter(4, 0); corrFuncMultTanTheta.SetParameter(0, -5.32e-06); corrFuncMultTanTheta.SetParameter(1, 1.177e-05); corrFuncMultTanTheta.SetParameter(2, -0.5); corrFuncSigmaMult.SetParameter(0, 0.); corrFuncSigmaMult.SetParameter(1, 0.); corrFuncSigmaMult.SetParameter(2, 0.); corrFuncSigmaMult.SetParameter(3, 0.); /* OK, but PID task was not very satisfying corrFuncMult.SetParameter(0, -6.27187e-06); corrFuncMult.SetParameter(1, -4.60649e-04); corrFuncMult.SetParameter(2, -4.26450e-02); corrFuncMult.SetParameter(3, 2.40590e-02); corrFuncMult.SetParameter(4, 0); corrFuncMultTanTheta.SetParameter(0, -5.338e-06); corrFuncMultTanTheta.SetParameter(1, 1.220e-05); corrFuncMultTanTheta.SetParameter(2, -0.5); corrFuncSigmaMult.SetParameter(0, 7.89237e-05); corrFuncSigmaMult.SetParameter(1, -1.30662e-02); corrFuncSigmaMult.SetParameter(2, 8.91548e-01); corrFuncSigmaMult.SetParameter(3, 1.47931e-02); */ /* // LHC11a10a if (isNonPP) printf("Using corr Parameters for 11a10a\n!"); corrFuncMult.SetParameter(0, 6.90133e-06); corrFuncMult.SetParameter(1, -1.22123e-03); corrFuncMult.SetParameter(2, 1.80220e-02); corrFuncMult.SetParameter(3, 0.1); corrFuncMult.SetParameter(4, 6.45306e-03); corrFuncMultTanTheta.SetParameter(0, -2.85505e-07); corrFuncMultTanTheta.SetParameter(1, -1.31911e-06); corrFuncMultTanTheta.SetParameter(2, -0.5); corrFuncSigmaMult.SetParameter(0, -4.29665e-05); corrFuncSigmaMult.SetParameter(1, 1.37023e-02); corrFuncSigmaMult.SetParameter(2, -6.36337e-01); corrFuncSigmaMult.SetParameter(3, 1.13479e-02); */ /* OLD without saturation and large error for negative slopes corrFuncSigmaMult.SetParameter(0, -4.79684e-05); corrFuncSigmaMult.SetParameter(1, 1.49938e-02); corrFuncSigmaMult.SetParameter(2, -7.15269e-01); corrFuncSigmaMult.SetParameter(3, 1.06855e-02); */ /* OLD very good try, but with fewer pBins for the fitting corrFuncMult.SetParameter(0, 6.88365e-06); corrFuncMult.SetParameter(1, -1.22324e-03); corrFuncMult.SetParameter(2, 1.81625e-02); corrFuncMult.SetParameter(3, 0.1); corrFuncMult.SetParameter(4, 6.36890e-03); corrFuncMultTanTheta.SetParameter(0, -2.85505e-07); corrFuncMultTanTheta.SetParameter(1, -1.31911e-06); corrFuncMultTanTheta.SetParameter(2, -0.5); corrFuncSigmaMult.SetParameter(0, -4.28401e-05); corrFuncSigmaMult.SetParameter(1, 1.24812e-02); corrFuncSigmaMult.SetParameter(2, -5.28531e-01); corrFuncSigmaMult.SetParameter(3, 1.25147e-02); */ /*OLD good try corrFuncMult.SetParameter(0, 7.50321e-06); corrFuncMult.SetParameter(1, -1.25250e-03); corrFuncMult.SetParameter(2, 1.85437e-02); corrFuncMult.SetParameter(3, 0.1); corrFuncMult.SetParameter(4, 6.21192e-03); corrFuncMultTanTheta.SetParameter(0, -1.43112e-07); corrFuncMultTanTheta.SetParameter(1, -1.53e-06); corrFuncMultTanTheta.SetParameter(2, 0.3); corrFuncSigmaMult.SetParameter(0, -2.54019e-05); corrFuncSigmaMult.SetParameter(1, 8.68883e-03); corrFuncSigmaMult.SetParameter(2, -3.36176e-01); corrFuncSigmaMult.SetParameter(3, 1.29230e-02); */ /* // LHC10h.pass2 if (isNonPP) printf("Using corr Parameters for 10h.pass2\n!"); corrFuncMult.SetParameter(0, 3.21636e-07); corrFuncMult.SetParameter(1, -6.65876e-04); corrFuncMult.SetParameter(2, 1.28786e-03); corrFuncMult.SetParameter(3, 1.47677e-02); corrFuncMult.SetParameter(4, 0.); corrFuncMultTanTheta.SetParameter(0, 7.23591e-08); corrFuncMultTanTheta.SetParameter(1, 2.7469e-06); corrFuncMultTanTheta.SetParameter(2, -0.5); corrFuncSigmaMult.SetParameter(0, -1.22590e-05); corrFuncSigmaMult.SetParameter(1, 6.88888e-03); corrFuncSigmaMult.SetParameter(2, -3.20788e-01); corrFuncSigmaMult.SetParameter(3, 1.07345e-02); */ /*OLD bad try corrFuncMult.SetParameter(0, 2.71514e-07); corrFuncMult.SetParameter(1, -6.92031e-04); corrFuncMult.SetParameter(2, 3.56042e-03); corrFuncMult.SetParameter(3, 1.47497e-02); corrFuncMult.SetParameter(4, 0.); corrFuncMultTanTheta.SetParameter(0, 8.53204e-08); corrFuncMultTanTheta.SetParameter(1, 2.85591e-06); corrFuncMultTanTheta.SetParameter(2, -0.5); corrFuncSigmaMult.SetParameter(0, -6.82477e-06); corrFuncSigmaMult.SetParameter(1, 4.97051e-03); corrFuncSigmaMult.SetParameter(2, -1.64954e-01); corrFuncSigmaMult.SetParameter(3, 9.21061e-03); */ //TODO NOW TF1* fShapeSmallP = new TF1("fShapeSmallP", "pol5", -0.4, 0.4); fShapeSmallP->SetParameters(1.01712, -0.0202725, -0.260692, 0.261623, 0.671854, -1.14014); for (Long64_t i = 0; i < nTreeEntries; i++) { tree->GetEntry(i); if (dEdx <= 0 || dEdxExpected <= 0 || tpcSignalN <= 10) continue; /* Double_t pT = pTPC*TMath::Sin(-TMath::ATan(tanTheta)+TMath::Pi()/2.0); if ((phiPrime > 0.072/pT+TMath::Pi()/18.0-0.035 && phiPrime < 0.07/pT/pT+0.1/pT+TMath::Pi()/18.0+0.035)) continue; */ if (pidType != kMCid) { if (pidType == kTPCid && pTPC > 0.6) continue; if (pidType == kTPCandTOFid && (pTPC < 0.6 || pTPC > 2.0)) continue; if ((collType == 2) && pidType == kTPCandTOFid && pTPC > 1.0) continue;// Only V0's in case of PbPb above 1.0 GeV/c if (pidType == kV0idPlusTOFrejected) //TODO NOW NEW continue; } if (recalculateExpecteddEdx) { dEdxExpected = 50. * splPr->Eval(pTPC / massProton); //WARNING: What, if MIP is different from 50.? Seems not to be used (tested for pp, MC_pp, PbPb and MC_PbPb), but can in principle happen } //TODO NOW /* if (TMath::Abs(tanTheta) <= 0.4) { Double_t p0 = fShapeSmallP->Eval(tanTheta) - 1.0; // Strength of the correction Double_t p1 = -9.0; // How fast the correction is turned off Double_t p2 = -0.209; // Turn off correction around 0.2 GeV/c Double_t p3 = 1.0; // Delta' for large p should be 1 Double_t corrFactor = TMath::Erf((pTPC + p2) * p1) * p0 + p3 + p0; // Add p0 to have 1 for p3 = 1 and large pTPC dEdxExpected *= corrFactor; }*/ /*TODO old unsuccessful try Double_t thetaGlobalTPC = -TMath::ATan(tanTheta) + TMath::Pi() / 2.; Double_t pTtpc = pTPC * TMath::Sin(thetaGlobalTPC); Double_t pTtpcInv = (pTtpc > 0) ? 1. / pTtpc : 0; Double_t p0 = 1.0; Double_t p1 = 1./ 0.5;//TODO 2.0; Double_t p2 = -0.2;//TODO 0.1 Double_t pTcorrFactor = p0 + (pTtpcInv > p1) * p2 * (pTtpcInv - p1); dEdxExpected *= pTcorrFactor; */ // From the momentum (via dEdxExpected) and the tanTheta of the track, the expected dEdx can be calculated (correctedDeDxExpected). // If the splines are correct, this should give in average the same value as dEdx. // Now valid: Maps created from corrected data with splines adopted to corrected data, so lookup should be for dEdxExpected=dEdxSplines (no further // eta correction) or the corrected dEdx from the track (which should ideally be = dEdxSplines) // Tested with corrected data for LHC10d.pass2: using dEdx for the lookup (which is the corrected value and should ideally be = dEdxSplines): // Results almost the same. Maybe slightly better for dEdxExpected. // No longer valid: Note that the maps take always the uncorrected dEdx w.r.t. // tanTheta, so that correctedDeDxExpected is needed here normally. However, the information for the correction will be lost at some point. // Therefore, dEdxExpected can be used instead and should provide a good approximation. Double_t c1FromSigmaMap = hThetaMapSigmaPar1->GetBinContent(getBinX(hThetaMapSigmaPar1, tanTheta), getBinY(hThetaMapSigmaPar1, 1./dEdxExpected)); Double_t expectedSigma = dEdxExpected * TMath::Sqrt( c0 * c0 + (c1FromSigmaMap * c1FromSigmaMap) / tpcSignalN); Double_t pull = (dEdx - dEdxExpected) / (plotPull ? expectedSigma: dEdxExpected); // Fill pull histo hPull->Fill(pTPC, pull); Double_t tanThetaAbs = TMath::Abs(tanTheta); for (Int_t j = 0; j < nThetaHistos; j++) { if (tanThetaAbs >= tThetaLow[j] && tanThetaAbs < tThetaHigh[j]) { hPullTheta[j]->Fill(pTPC, pull); } } if (!hMap) continue; Double_t correctionFactor = 1.; if (isNonPP) { // 1. Correct eta dependence correctionFactor = hMap->GetBinContent(getBinX(hMap, tanTheta), getBinY(hMap, 1./dEdxExpected)); // 2. Correct for multiplicity dependence: Double_t multCorrectionFactor = 1.; if (fMultiplicity > 0) { Double_t relSlope = corrFuncMult.Eval(1. / (dEdxExpected * correctionFactor)); relSlope += corrFuncMultTanTheta.Eval(tanTheta); multCorrectionFactor = 1. + relSlope * fMultiplicity; } c1FromSigmaMap = hThetaMapSigmaPar1->GetBinContent(getBinX(hThetaMapSigmaPar1, tanTheta), getBinY(hThetaMapSigmaPar1, 1./dEdxExpected)); // Multiplicity dependence of sigma depends on the real dEdx at zero multiplicity, i.e. the eta (only) corrected dEdxExpected value has to be used // since all maps etc. have been created for ~zero multiplicity Double_t relSigmaSlope = corrFuncSigmaMult.Eval(1. / (dEdxExpected * correctionFactor)); Double_t multSigmaCorrectionFactor = 1. + relSigmaSlope * fMultiplicity; dEdxExpected *= correctionFactor * multCorrectionFactor; expectedSigma = dEdxExpected * TMath::Sqrt( c0 * c0 + (c1FromSigmaMap * c1FromSigmaMap) / tpcSignalN); expectedSigma *= multSigmaCorrectionFactor; pull = (dEdx - dEdxExpected) / (plotPull ? expectedSigma: dEdxExpected); } else { correctionFactor = hMap->GetBinContent(getBinX(hMap, tanTheta), getBinY(hMap, 1./dEdxExpected)); c1FromSigmaMap = hThetaMapSigmaPar1->GetBinContent(getBinX(hThetaMapSigmaPar1, tanTheta), getBinY(hThetaMapSigmaPar1, 1./dEdxExpected)); dEdxExpected *= correctionFactor; // If data is not corrected, but the sigma map is for corrected data, re-do analysis with corrected dEdx expectedSigma = dEdxExpected * TMath::Sqrt( c0 * c0 + (c1FromSigmaMap * c1FromSigmaMap) / tpcSignalN); pull = (dEdx - dEdxExpected) / (plotPull ? expectedSigma: dEdxExpected); } pull = (dEdx - dEdxExpected) / (plotPull ? expectedSigma: dEdxExpected); hPullAdditionalCorr->Fill(pTPC, pull); for (Int_t j = 0; j < nThetaHistos; j++) { if (tanThetaAbs >= tThetaLow[j] && tanThetaAbs < tThetaHigh[j]) { hPullAdditionalCorrTheta[j]->Fill(pTPC, pull); } } } /* // Mean, Sigma, chi^2/NDF of pull of different theta bins and all in one plot TCanvas* canvPullMean = new TCanvas("canvPullMean", "canvPullMean", 100,10,1380,800); canvPullMean->SetLogx(kTRUE); canvPullMean->SetGridx(kTRUE); canvPullMean->SetGridy(kTRUE); TCanvas* canvPullSigma = new TCanvas("canvPullSigma", "canvPullSigma", 100,10,1380,800); canvPullSigma->SetLogx(kTRUE); canvPullSigma->SetGridx(kTRUE); canvPullSigma->SetGridy(kTRUE); TCanvas* canvPullChi2 = new TCanvas("canvPullChi2", "canvPullChi2", 100,10,1380,800); canvPullChi2->SetLogx(kTRUE); canvPullChi2->SetGridx(kTRUE); canvPullChi2->SetGridy(kTRUE); TCanvas* canvPull[nThetaHistos + 1]; for (Int_t i = 0, j = nThetaHistos; i < nThetaHistos + 1; i++, j--) { canvPull[i] = new TCanvas(Form("canvPull_%d", i), "canvPull", 100,10,1380,800); canvPull[i]->cd(); canvPull[i]->SetLogx(kTRUE); canvPull[i]->SetLogz(kTRUE); canvPull[i]->SetGrid(kTRUE, kTRUE); TH2D* hTemp = 0x0; TString thetaString = ""; if (i == nThetaHistos) { hTemp = hPull; thetaString = "tan(#Theta) integrated"; } else { hTemp = hPullTheta[i]; thetaString = Form("%.2f #leq |tan(#Theta)| < %.2f", tThetaLow[i], tThetaHigh[i]); } normaliseHisto(hTemp); hTemp->FitSlicesY(); hTemp->GetYaxis()->SetNdivisions(12); hTemp->GetXaxis()->SetMoreLogLabels(kTRUE); TH1D* hTempMean = (TH1D*)gDirectory->Get(Form("%s_1", hTemp->GetName())); hTempMean->SetTitle(Form("mean(pull), %s", thetaString.Data())); hTempMean->GetXaxis()->SetMoreLogLabels(kTRUE); hTempMean->SetLineWidth(2); hTempMean->SetMarkerStyle(20); TH1D* hTempSigma = (TH1D*)gDirectory->Get(Form("%s_2", hTemp->GetName())); hTempSigma->SetTitle(Form("#sigma(pull), %s", thetaString.Data())); hTempSigma->GetXaxis()->SetMoreLogLabels(kTRUE); hTempSigma->SetLineColor(kMagenta); hTempSigma->SetMarkerStyle(20); hTempSigma->SetMarkerColor(kMagenta); hTempSigma->SetLineWidth(2); TH1D* hTempChi2 = (TH1D*)gDirectory->Get(Form("%s_chi2", hTemp->GetName())); hTempChi2->SetTitle(Form("#chi^{2} / NDF (pull), %s", thetaString.Data())); hTempChi2->GetXaxis()->SetMoreLogLabels(kTRUE); hTempChi2->SetLineColor(kMagenta + 2); hTempChi2->SetMarkerStyle(20); hTempChi2->SetMarkerColor(kMagenta + 2); hTempChi2->SetLineWidth(2); hTemp->DrawCopy("colz"); hTempMean->DrawCopy("same"); hTempSigma->DrawCopy("same"); hTempChi2->Scale(-1./10.); hTempChi2->DrawCopy("same"); hTempChi2->Scale(-10.); canvPullMean->cd(); hTempMean->SetLineColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempMean->SetMarkerColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempMean->DrawCopy((i == 0 ? "" : "same")); canvPullSigma->cd(); hTempSigma->SetLineColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempSigma->SetMarkerColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempSigma->DrawCopy((i == 0 ? "" : "same")); canvPullChi2->cd(); hTempChi2->SetLineColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempChi2->SetMarkerColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempChi2->DrawCopy((i == 0 ? "" : "same")); } canvPullMean->BuildLegend(); canvPullSigma->BuildLegend(); canvPullChi2->BuildLegend(); */ // Histograms with additional correction TCanvas* canvPullMeanCorr = 0x0; TCanvas* canvPullSigmaCorr = 0x0; TCanvas* canvPullChi2Corr = 0x0; TCanvas* canvPullCorr[nThetaHistos + 1]; for (Int_t i = 0; i < nThetaHistos + 1; i++) canvPullCorr[i] = 0x0; if (hMap) { // Mean, Sigma, chi^2/NDF of pull of different theta bins and all in one plot canvPullMeanCorr = new TCanvas("canvPullMeanCorr", "canvPullMeanCorr", 100,10,1380,800); canvPullMeanCorr->SetLogx(kTRUE); canvPullMeanCorr->SetGridx(kTRUE); canvPullMeanCorr->SetGridy(kTRUE); canvPullSigmaCorr = new TCanvas("canvPullSigmaCorr", "canvPullSigmaCorr", 100,10,1380,800); canvPullSigmaCorr->SetLogx(kTRUE); canvPullSigmaCorr->SetGridx(kTRUE); canvPullSigmaCorr->SetGridy(kTRUE); canvPullChi2Corr = new TCanvas("canvPullChi2Corr", "canvPullChi2Corr", 100,10,1380,800); canvPullChi2Corr->SetLogx(kTRUE); canvPullChi2Corr->SetGridx(kTRUE); canvPullChi2Corr->SetGridy(kTRUE); for (Int_t i = 0, j = nThetaHistos; i < nThetaHistos + 1; i++, j--) { canvPullCorr[i] = new TCanvas(Form("canvPullCorr_%d", i), "canvPullCorr", 100,10,1380,800); canvPullCorr[i]->cd(); canvPullCorr[i]->SetLogx(kTRUE); canvPullCorr[i]->SetLogz(kTRUE); canvPullCorr[i]->SetGrid(kTRUE, kTRUE); TH2D* hTemp = 0x0; TString thetaString = ""; if (i == nThetaHistos) { hTemp = hPullAdditionalCorr; thetaString = "tan(#Theta) integrated"; } else { hTemp = hPullAdditionalCorrTheta[i]; thetaString = Form("%.2f #leq |tan(#Theta)| < %.2f", tThetaLow[i], tThetaHigh[i]); } normaliseHisto(hTemp); hTemp->FitSlicesY(); hTemp->GetYaxis()->SetNdivisions(12); hTemp->GetXaxis()->SetMoreLogLabels(kTRUE); TH1D* hTempMean = (TH1D*)gDirectory->Get(Form("%s_1", hTemp->GetName())); hTempMean->SetTitle(Form("mean(pull), %s", thetaString.Data())); hTempMean->GetXaxis()->SetMoreLogLabels(kTRUE); hTempMean->SetLineWidth(2); hTempMean->SetMarkerStyle(20); TH1D* hTempSigma = (TH1D*)gDirectory->Get(Form("%s_2", hTemp->GetName())); hTempSigma->SetTitle(Form("#sigma(pull), %s", thetaString.Data())); hTempSigma->GetXaxis()->SetMoreLogLabels(kTRUE); hTempSigma->SetLineColor(kMagenta); hTempSigma->SetMarkerStyle(20); hTempSigma->SetMarkerColor(kMagenta); hTempSigma->SetLineWidth(2); TH1D* hTempChi2 = (TH1D*)gDirectory->Get(Form("%s_chi2", hTemp->GetName())); hTempChi2->SetTitle(Form("#chi^{2} / NDF (pull), %s", thetaString.Data())); hTempChi2->GetXaxis()->SetMoreLogLabels(kTRUE); hTempChi2->SetLineColor(kMagenta + 2); hTempChi2->SetMarkerStyle(20); hTempChi2->SetMarkerColor(kMagenta + 2); hTempChi2->SetLineWidth(2); hTemp->DrawCopy("colz"); hTempMean->DrawCopy("same"); hTempSigma->DrawCopy("same"); hTempChi2->Scale(-1./10.); hTempChi2->DrawCopy("same"); hTempChi2->Scale(-10.); canvPullMeanCorr->cd(); hTempMean->SetLineColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempMean->SetMarkerColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempMean->DrawCopy((i == 0 ? "" : "same")); canvPullSigmaCorr->cd(); hTempSigma->SetLineColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempSigma->SetMarkerColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempSigma->DrawCopy((i == 0 ? "" : "same")); canvPullChi2Corr->cd(); hTempChi2->SetLineColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempChi2->SetMarkerColor(1 + ((j >= 9) ? (39 + 2 * (j - 9)) : j)); hTempChi2->DrawCopy((i == 0 ? "" : "same")); } canvPullMeanCorr->BuildLegend(); canvPullSigmaCorr->BuildLegend(); canvPullChi2Corr->BuildLegend(); } fSave->cd(); /*canvPullMean->Write(); canvPullSigma->Write(); canvPullChi2->Write(); for (Int_t i = 0; i < nThetaHistos + 1; i++) { canvPull[i]->Write(); }*/ canvPullMeanCorr->Write(); canvPullSigmaCorr->Write(); canvPullChi2Corr->Write(); for (Int_t i = 0; i < nThetaHistos + 1; i++) { canvPullCorr[i]->Write(); } TNamed* info = new TNamed(Form("Theta map: %s\n\nSigma map: %s\n\nSplines file: %s\n\nSplines name: %s", pathNameThetaMap.Data(), pathNameSigmaMap.Data(), pathNameSplinesFile.Data(), prSplinesName.Data()), "info"); info->Write(); fSave->Close(); return 0; }
// // Test config macro for RSN package. // It configures: // 1) a monitor for all tracks passing quality cuts // 2) a monitor for all tracks passing quality + PID cuts // 3) an unlike-sign invariant-mass + pt distribution for K+K- pairs // Bool_t RsnConfigPhiKaonTest ( AliRsnAnalysisTask *task, Bool_t isMC ) { if (!task) { ::Error("RsnConfigPhiKaonTest.C", "NULL task"); return kFALSE; } const char *suffix = "test"; // ---------------------------------------------------------------------------------------------- // -- DEFINITIONS ------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- // daughter definition for monitor loops // since it is intended to loop over all 'track' like objects (in the sense that we exclude V0s and cascades), // we initialize it using the constructor that requires an AliRsnDaughter::EType and a charge, but since // we want to loop over both charges, we set it to anything which is not '+' '-' or '0', which are tokens for // selecting only positive, only negative or only neutral AliRsnDaughterDef *tracks = new AliRsnDaughterDef(AliRsnDaughter::kTrack, 0); // definition of pair decay tree for phi resonance // here we *must* specify a particle species and a charge, in order to check the decay tree // last arguments are the PDG code and nominal mass of the resonance, which are needed when // one wants to select true pairs only and/or he wants to compute rapidity or Mt AliRsnPairDef *pairDef = new AliRsnPairDef(AliRsnDaughter::kKaon, '+', AliRsnDaughter::kKaon, '-', 333, 1.019455); // definition of loop objects: // (a) 1 monitor for all tracks passing quality cuts // (b) 1 monitor for all tracks passing quality+PID cuts // (c) 1 pair filled with all tracks passing same cuts as (b) // (d) 1 pair like (c) but for mixing // (e) 1 pair like (c) but with true pairs only // NOTE: (c) and (d) are instantiated with same settings, they will be made // different after some settings done in second moment AliRsnLoopDaughter *loopQuality = new AliRsnLoopDaughter(Form("%s_mon_quality", suffix), 0, tracks); AliRsnLoopDaughter *loopPID = new AliRsnLoopDaughter(Form("%s_mon_pid" , suffix), 0, tracks); AliRsnLoopPair *loopPhi = new AliRsnLoopPair (Form("%s_unlike" , suffix), pairDef); AliRsnLoopPair *loopPhiMix = new AliRsnLoopPair (Form("%s_unlike" , suffix), pairDef); AliRsnLoopPair *loopPhiTrue = new AliRsnLoopPair (Form("%s_trues" , suffix), pairDef); // set additional option for true pairs (slot [0]) loopPhiTrue->SetOnlyTrue(kTRUE); loopPhiTrue->SetCheckDecay(kTRUE); // set mixing options loopPhi ->SetMixed(kFALSE); loopPhiMix ->SetMixed(kTRUE); loopPhiTrue->SetMixed(kFALSE); // assign the ID of the entry lists to be used by each pair to get selected daughters // in our case, the AliRsnInputHandler contains only one list for selecting kaons Int_t idQuality, idPID; AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager(); AliMultiInputEventHandler *multi = dynamic_cast<AliMultiInputEventHandler*>(mgr->GetInputEventHandler()); if (!multi) { myError("Needed a multi input handler!"); return kFALSE; } TObjArray *array = multi->InputEventHandlers(); AliRsnInputHandler *rsn = (AliRsnInputHandler*)array->FindObject("rsnInputHandler"); if (!rsn) { myError("Needed an RSN event handler"); return kFALSE; } AliRsnDaughterSelector *sel = rsn->GetSelector(); idQuality = sel->GetID("qualityTPC", kTRUE); idPID = sel->GetID("kaonTPC", kTRUE); if (idQuality < 0 || idPID < 0) { myError("List problems"); return kFALSE; } loopQuality->SetListID(idQuality); loopPID ->SetListID(idPID); loopPhi ->SetListID(0, idPID); loopPhi ->SetListID(1, idPID); loopPhiMix ->SetListID(0, idPID); loopPhiMix ->SetListID(1, idPID); loopPhiTrue->SetListID(0, idPID); loopPhiTrue->SetListID(1, idPID); // ---------------------------------------------------------------------------------------------- // -- EVENT CUTS -------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- // primary vertex: // - 2nd argument --> |Vz| range // - 3rd argument --> minimum required number of contributors // - 4th argument --> tells if TPC stand-alone vertexes must be accepted // we switch on the check for pileup AliRsnCutPrimaryVertex *cutVertex = new AliRsnCutPrimaryVertex("cutVertex", 10.0, 0, kFALSE); cutVertex->SetCheckPileUp(kTRUE); // primary vertex is always used AliRsnCutSet *eventCuts = new AliRsnCutSet("eventCuts", AliRsnTarget::kEvent); eventCuts->AddCut(cutVertex); eventCuts->SetCutScheme(cutVertex->GetName()); // add the event cuts to all loops loopQuality->SetEventCuts(eventCuts); loopPID ->SetEventCuts(eventCuts); loopPhi ->SetEventCuts(eventCuts); loopPhi ->SetEventCuts(eventCuts); loopPhiMix ->SetEventCuts(eventCuts); loopPhiMix ->SetEventCuts(eventCuts); loopPhiTrue->SetEventCuts(eventCuts); loopPhiTrue->SetEventCuts(eventCuts); // ---------------------------------------------------------------------------------------------- // -- PAIR CUTS --------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- // for pairs we define a rapidity windows, defined through a cut // --> NOTE: it needs a support AliRsnPairDef from which it takes the mass AliRsnValueStd *valRapidity = new AliRsnValueStd("valY", AliRsnValueStd::kPairY); AliRsnCutValue *cutRapidity = new AliRsnCutValue("cutY", -0.5, 0.5, isMC); valRapidity->SetSupportObject(pairDef); cutRapidity->SetValueObj(valRapidity); // cut set AliRsnCutSet *pairCuts = new AliRsnCutSet("pairCuts", AliRsnTarget::kMother); pairCuts->AddCut(cutRapidity); pairCuts->SetCutScheme(cutRapidity->GetName()); // add cut to pair loops only loopPhi ->SetPairCuts(pairCuts); loopPhi ->SetPairCuts(pairCuts); loopPhiMix ->SetPairCuts(pairCuts); loopPhiMix ->SetPairCuts(pairCuts); loopPhiTrue->SetPairCuts(pairCuts); loopPhiTrue->SetPairCuts(pairCuts); // ---------------------------------------------------------------------------------------------- // -- COMPUTED VALUES & OUTPUTS ----------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- AliRsnValueStd *axisIM = new AliRsnValueStd("IM" , AliRsnValueStd::kPairInvMass , 0.9, 1.4, 0.001); AliRsnValueStd *axisPt = new AliRsnValueStd("PT" , AliRsnValueStd::kPairPt , 0.0, 5.0, 0.1 ); AliRsnValueStd *axisMomTPC = new AliRsnValueStd("pTPC", AliRsnValueStd::kTrackPtpc , 0.0, 5.0, 0.01 ); AliRsnValueStd *axisSigTPC = new AliRsnValueStd("sTPC", AliRsnValueStd::kTrackTPCsignal, 0.0, 500.0, 2.0 ); // output for monitors: // 2D histogram with TPC signal vs TPC momentum AliRsnListOutput *outMonitor = new AliRsnListOutput("mon", AliRsnListOutput::kHistoDefault); outMonitor->AddValue(axisMomTPC); outMonitor->AddValue(axisSigTPC); // output for pairs: // 2D histogram with inv.mass vs pt AliRsnListOutput *outPair = new AliRsnListOutput("pair", AliRsnListOutput::kHistoDefault); outPair->AddValue(axisIM); outPair->AddValue(axisPt); // add outputs to loops loopQuality->AddOutput(outMonitor); loopPID ->AddOutput(outMonitor); loopPhi ->AddOutput(outPair); loopPhiMix ->AddOutput(outPair); loopPhiTrue->AddOutput(outPair); // ---------------------------------------------------------------------------------------------- // -- CONCLUSION -------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------- task->Add(loopQuality); task->Add(loopPID ); task->Add(loopPhi ); task->Add(loopPhiMix ); task->Add(loopPhiTrue); return kTRUE; }
//___________________________________________________________________ Int_t extractPtResolution(TString pathNameData, TString listName, Int_t chargeMode /*kNegCharge = -1, kAllCharged = 0, kPosCharge = 1*/, Double_t lowerCentrality /*= -2*/, Double_t upperCentrality /*= -2*/, Double_t lowerJetPt /*= -1*/ , Double_t upperJetPt/* = -1*/) { if (listName == "") { listName = pathNameData; listName.Replace(0, listName.Last('/') + 1, ""); listName.ReplaceAll(".root", ""); } TString pathData = pathNameData; pathData.Replace(pathData.Last('/'), pathData.Length(), ""); TH1D* hPtResolutionFit[AliPID::kSPECIES] = {0x0, }; TH2D* hPtResolution[AliPID::kSPECIES] = {0x0, }; THnSparse* hPtResolutionRaw[AliPID::kSPECIES] = {0x0, }; TFile* fileData = TFile::Open(pathNameData.Data()); if (!fileData) { printf("Failed to open data file \"%s\"\n", pathNameData.Data()); return -1; } TObjArray* histList = (TObjArray*)(fileData->Get(listName.Data())); if (!histList) { printf("Failed to load list!\n"); return -1; } Double_t actualLowerCentrality = -2; Double_t actualUpperCentrality = -2; Double_t actualLowerJetPt = -1.; Double_t actualUpperJetPt = -1.; Bool_t restrictJetPtAxis = (lowerJetPt >= 0 && upperJetPt >= 0); const Bool_t restrictCentrality = ((lowerCentrality >= -1) && (upperCentrality >= -1)); for (Int_t species = 0; species < AliPID::kSPECIES; species++) { const TString sparseName = Form("fPtResolution_%s", AliPID::ParticleShortName(species)); hPtResolutionRaw[species] = (THnSparse*)histList->FindObject(sparseName.Data()); if (!hPtResolutionRaw[species]) { printf("Failed to load THnSparse for %s: %s!\n", AliPID::ParticleShortName(species), sparseName.Data()); return -1; } // Set proper errors, if not yet calculated if (!hPtResolutionRaw[species]->GetCalculateErrors()) { std::cout << "Re-calculating errors of " << hPtResolutionRaw[species]->GetName() << "..." << std::endl; hPtResolutionRaw[species]->Sumw2(); Long64_t nBinsPtResolutionRaw = hPtResolutionRaw[species]->GetNbins(); Double_t binContent = 0; for (Long64_t bin = 0; bin < nBinsPtResolutionRaw; bin++) { binContent = hPtResolutionRaw[species]->GetBinContent(bin); hPtResolutionRaw[species]->SetBinError(bin, TMath::Sqrt(binContent)); } } // Integral(lowerCentBinLimit, uppCentBinLimit) will not be restricted if these values are kept const Int_t lowerCentralityBinLimit = restrictCentrality ? hPtResolutionRaw[species]->GetAxis(kPtResCentrality)->FindBin(lowerCentrality + 0.001) : -1; const Int_t upperCentralityBinLimit = restrictCentrality ? hPtResolutionRaw[species]->GetAxis(kPtResCentrality)->FindBin(upperCentrality - 0.001) : -2; if (restrictCentrality) { actualLowerCentrality = hPtResolutionRaw[species]->GetAxis(kPtResCentrality)->GetBinLowEdge(lowerCentralityBinLimit); actualUpperCentrality = hPtResolutionRaw[species]->GetAxis(kPtResCentrality)->GetBinLowEdge(upperCentralityBinLimit); hPtResolutionRaw[species]->GetAxis(kPtResCentrality)->SetRange(lowerCentralityBinLimit, upperCentralityBinLimit); } const Bool_t restrictCharge = (chargeMode != kAllCharged); Int_t lowerChargeBinLimit = -1; Int_t upperChargeBinLimit = -2; if (restrictCharge) { // Add subtract a very small number to avoid problems with values right on the border between to bins if (chargeMode == kNegCharge) { lowerChargeBinLimit = hPtResolutionRaw[species]->GetAxis(kPtResCharge)->FindBin(-1. + 0.001); upperChargeBinLimit = hPtResolutionRaw[species]->GetAxis(kPtResCharge)->FindBin(0. - 0.001); } else if (chargeMode == kPosCharge) { lowerChargeBinLimit = hPtResolutionRaw[species]->GetAxis(kPtResCharge)->FindBin(0. + 0.001); upperChargeBinLimit = hPtResolutionRaw[species]->GetAxis(kPtResCharge)->FindBin(1. - 0.001); } // Check if the values look reasonable if (lowerChargeBinLimit <= upperChargeBinLimit && lowerChargeBinLimit >= 1 && upperChargeBinLimit <= hPtResolutionRaw[species]->GetAxis(kPtResCharge)->GetNbins()) { // OK } else { std::cout << std::endl; std::cout << "Requested charge range out of limits or upper and lower limit are switched!" << std::endl; return -1; } hPtResolutionRaw[species]->GetAxis(kPtResCharge)->SetRange(lowerChargeBinLimit, upperChargeBinLimit); } // If desired, restrict jetPt axis Int_t lowerJetPtBinLimit = -1; Int_t upperJetPtBinLimit = -1; if (restrictJetPtAxis) { // Add subtract a very small number to avoid problems with values right on the border between to bins lowerJetPtBinLimit = hPtResolutionRaw[species]->GetAxis(kPtResJetPt)->FindBin(lowerJetPt + 0.001); upperJetPtBinLimit = hPtResolutionRaw[species]->GetAxis(kPtResJetPt)->FindBin(upperJetPt - 0.001); // Check if the values look reasonable if (lowerJetPtBinLimit <= upperJetPtBinLimit && lowerJetPtBinLimit >= 1 && upperJetPtBinLimit <= hPtResolutionRaw[species]->GetAxis(kPtResJetPt)->GetNbins()) { actualLowerJetPt = hPtResolutionRaw[species]->GetAxis(kPtResJetPt)->GetBinLowEdge(lowerJetPtBinLimit); actualUpperJetPt = hPtResolutionRaw[species]->GetAxis(kPtResJetPt)->GetBinUpEdge(upperJetPtBinLimit); restrictJetPtAxis = kTRUE; } else { std::cout << std::endl; std::cout << "Requested jet pT range out of limits or upper and lower limit are switched!" << std::endl; return -1; } } std::cout << "jet pT: "; if (restrictJetPtAxis) { std::cout << actualLowerJetPt << " - " << actualUpperJetPt << std::endl; hPtResolutionRaw[species]->GetAxis(kPtResJetPt)->SetRange(lowerJetPtBinLimit, upperJetPtBinLimit); } else { std::cout << "All" << std::endl; } hPtResolution[species] = hPtResolutionRaw[species]->Projection(kPtResGenPt, kPtResRecPt, "e"); hPtResolution[species]->SetName(Form("hPtResolution_%s", AliPID::ParticleShortName(species))); hPtResolution[species]->SetTitle(Form("%s", AliPID::ParticleLatexName(species))); hPtResolution[species]->SetStats(kFALSE); hPtResolution[species]->SetLineColor(getLineColorAliPID(species)); hPtResolution[species]->SetMarkerColor(getLineColorAliPID(species)); normaliseHist(hPtResolution[species]); TObjArray aSlices; hPtResolution[species]->FitSlicesY(0, 0, -1, 0, "QNR", &aSlices); TH1D* hMean = (TH1D*)(aSlices.At(1)); TH1D* hSigma = (TH1D*)(aSlices.At(2)); hPtResolutionFit[species] = new TH1D(*hSigma); hPtResolutionFit[species]->SetName(Form("hPtResolutionFit_%s", AliPID::ParticleShortName(species))); hPtResolutionFit[species]->SetTitle(Form("%s", AliPID::ParticleLatexName(species))); hPtResolutionFit[species]->SetLineColor(getLineColorAliPID(species)); hPtResolutionFit[species]->SetMarkerColor(getLineColorAliPID(species)); hPtResolutionFit[species]->Divide(hSigma, hMean); } // Save results to file TString chargeString = ""; if (chargeMode == kPosCharge) chargeString = "_posCharge"; else if (chargeMode == kNegCharge) chargeString = "_negCharge"; TString saveFileName = pathNameData; saveFileName.Replace(0, pathNameData.Last('/') + 1, ""); TString savePath = pathNameData; savePath.ReplaceAll(Form("/%s", saveFileName.Data()), ""); saveFileName.Prepend("output_extractedPTResolution_"); TString centralityString = restrictCentrality ? Form("_centrality_%.0f_%.0f.root", actualLowerCentrality, actualUpperCentrality) : "_centrality_all"; TString jetPtString = restrictJetPtAxis ? Form("_jetPt_%.0f_%.0f.root", actualLowerJetPt, actualUpperJetPt) : ""; saveFileName.ReplaceAll(".root", Form("%s%s%s.root", centralityString.Data(), jetPtString.Data(), chargeString.Data())); TString saveFilePathName = Form("%s/%s", savePath.Data(), saveFileName.Data()); TFile* saveFile = TFile::Open(saveFilePathName.Data(), "RECREATE"); if (!saveFile) { printf("Failed to save results to file \"%s\"!\n", saveFilePathName.Data()); return -1; } saveFile->cd(); for (Int_t species = 0; species < AliPID::kSPECIES; species++) { if (hPtResolution[species]) hPtResolution[species]->Write(); if (hPtResolutionFit[species]) hPtResolutionFit[species]->Write(); } TNamed* settings = new TNamed( Form("Settings: Data file \"%s\", lowerCentrality %.3f, upperCentrality %.3f, lowerJetPt %.1f, upperJetPt %.1f\n", pathNameData.Data(), lowerCentrality, upperCentrality, lowerJetPt, upperJetPt), ""); settings->Write(); saveFile->Close(); return 0; }
void printCalibStat(Int_t run, const char * fname, TTreeSRedirector * pcstream){ // // Dump the statistical information about all histograms in the calibration files // into the statistical tree, print on the screen (log files) as well // // // 1. Default dump for all histograms // Information to dump: // stat =Entries, Mean, MeanError, RMS, MaxBin // Branch naming convention: // <detName>_<hisName><statName> // // 2. Detector statistical information - to be implemented by expert // - First version implemented by MI // // TFile *fin = TFile::Open(fname); if (!fin) return; const Int_t kMaxHis=10000; TList * keyList = fin->GetListOfKeys(); Int_t nkeys=keyList->GetEntries(); Double_t *hisEntries = new Double_t[kMaxHis]; Double_t *hisMean = new Double_t[kMaxHis]; Double_t *hisMeanError = new Double_t[kMaxHis]; Double_t *hisRMS = new Double_t[kMaxHis]; Double_t *hisMaxBin = new Double_t[kMaxHis]; Int_t counter=0; if (pcstream) (*pcstream)<<"calibStatAll"<<"run="<<run; for (Int_t ikey=0; ikey<nkeys; ikey++){ TObject * object = fin->Get(keyList->At(ikey)->GetName()); if (!object) continue; if (object->InheritsFrom("TCollection")==0) continue; TSeqCollection *collection = (TSeqCollection*)object; Int_t nentries= collection->GetEntries(); for (Int_t ihis=0; ihis<nentries; ihis++){ TObject * ohis = collection->At(ihis); if (!ohis) continue; if (ohis->InheritsFrom("TH1")==0) continue; TH1* phis = (TH1*)ohis; hisEntries[counter]=phis->GetEntries(); Int_t idim=1; if (ohis->InheritsFrom("TH2")) idim=2; if (ohis->InheritsFrom("TH3")) idim=3; hisMean[counter]=phis->GetMean(idim); hisMeanError[counter]=phis->GetMeanError(idim); hisRMS[counter]=phis->GetRMS(idim); hisMaxBin[counter]=phis->GetXaxis()->GetBinCenter(phis->GetMaximumBin()); if (pcstream) (*pcstream)<<"calibStatAll"<< Form("%s_%sEntries=",keyList->At(ikey)->GetName(), phis->GetName())<<hisEntries[counter]<< Form("%s_%sMean=",keyList->At(ikey)->GetName(), phis->GetName())<<hisMean[counter]<< Form("%s_%sMeanError=",keyList->At(ikey)->GetName(), phis->GetName())<<hisMeanError[counter]<< Form("%s_%sRMS=",keyList->At(ikey)->GetName(), phis->GetName())<<hisRMS[counter]<< Form("%s_%sMaxBin=",keyList->At(ikey)->GetName(), phis->GetName())<<hisMaxBin[counter]; //printf("Histo:\t%s_%s\t%f\t%d\n",keyList->At(ikey)->GetName(), phis->GetName(), hisEntries[counter],idim); counter++; } delete object; } // // Expert dump example (MI first iteration): // // 0.) TOF dump // Int_t tofEvents=0; Int_t tofTracks=0; TList * TOFCalib = (TList*)fin->Get("TOFHistos"); if (TOFCalib) { TH1 *histoEvents = (TH1*)TOFCalib->FindObject("hHistoVertexTimestamp"); TH1 *histoTracks = (TH1*)TOFCalib->FindObject("hHistoDeltatTimestamp"); if (histoEvents && histoTracks){ tofEvents = TMath::Nint(histoEvents->GetEntries()); tofTracks = TMath::Nint(histoTracks->GetEntries()); } delete TOFCalib; } printf("Monalisa TOFevents\t%d\n",tofEvents); if (pcstream) (*pcstream)<<"calibStatAll"<<"TOFevents="<<tofEvents; printf("Monalisa TOFtracks\t%d\n",tofTracks); if (pcstream) (*pcstream)<<"calibStatAll"<<"TOFtracks="<<tofTracks; // // 1.) TPC dump - usefull events/tracks for the calibration // Int_t tpcEvents=0; Int_t tpcTracks=0; TObject* obj = dynamic_cast<TObject*>(fin->Get("TPCCalib")); TObjArray* array = dynamic_cast<TObjArray*>(obj); TDirectory* dir = dynamic_cast<TDirectory*>(obj); AliTPCcalibTime * calibTime = NULL; if (dir) { calibTime = dynamic_cast<AliTPCcalibTime*>(dir->Get("calibTime")); } else if (array){ calibTime = (AliTPCcalibTime *)array->FindObject("calibTime"); } if (calibTime) { tpcEvents = TMath::Nint(calibTime->GetTPCVertexHisto(0)->GetEntries()); tpcTracks = TMath::Nint(calibTime->GetResHistoTPCITS(0)->GetEntries()); } printf("Monalisa TPCevents\t%d\n",tpcEvents); if (pcstream) (*pcstream)<<"calibStatAll"<<"TPCevents="<<tpcEvents; printf("Monalisa TPCtracks\t%d\n",tpcTracks); if (pcstream) (*pcstream)<<"calibStatAll"<<"TPCtracks="<<tpcTracks; // // 2. TRD dump // Int_t trdEvents=0; Int_t trdTracks=0; TList * TRDCalib = (TList*)fin->Get("TRDCalib"); if (TRDCalib) { TH1 *histoEvents = (TH1*)TRDCalib->FindObject("NEventsInput_AliTRDCalibTask"); TH1 *histoTracks = (TH1*)TRDCalib->FindObject("AbsoluteGain_AliTRDCalibTask"); if (histoEvents && histoTracks){ trdEvents= TMath::Nint(histoEvents->GetEntries()); trdTracks= TMath::Nint(histoTracks->GetEntries()); } delete TRDCalib; } printf("Monalisa TRDevents\t%d\n",trdEvents); if (pcstream) (*pcstream)<<"calibStatAll"<<"TRDevents="<<trdEvents; printf("Monalisa TRDtracks\t%d\n",trdTracks); if (pcstream) (*pcstream)<<"calibStatAll"<<"TRDtracks="<<trdTracks; // // 3. T0 dump // Int_t T0Events=0; TList * T0Calib = (TList*)fin->Get("T0Calib"); if (T0Calib) { TH1 *histoEvents = (TH1*) T0Calib->FindObject("fTzeroORAplusORC"); if (histoEvents){ T0Events= TMath::Nint(histoEvents->GetEntries()); } delete T0Calib; } printf("Monalisa T0events\t%d\n",T0Events); if (pcstream) (*pcstream)<<"calibStatAll"<<"T0events="<<T0Events; // // 4. Mean vertex - dump // Not present in CPass1 /* Int_t meanVertexEvents=0; TList * meanVertexCalib = (TList*)fin->Get("MeanVertex"); if (meanVertexCalib) { TH1 *histoEvents = (TH1*) meanVertexCalib->FindObject("hTRKVertexX"); if (histoEvents){ meanVertexEvents = TMath::Nint(histoEvents->GetEntries()); } delete meanVertexCalib; } printf("Monalisa MeanVertexevents\t%d\n",meanVertexEvents); if (pcstream) (*pcstream)<<"calibStatAll"<<"MeanVertexevents="<<meanVertexEvents; */ // // 5. SDD dump // Int_t sddEvents=0; Int_t sddTracks=0; TList * SDDCalib = (TList*)fin->Get("clistSDDCalib"); if (SDDCalib) { TH1 *histoEvents = (TH1*) SDDCalib->FindObject("hNEvents"); if (histoEvents ){ sddEvents = TMath::Nint(histoEvents->GetBinContent(4)); sddTracks = TMath::Nint(histoEvents->GetBinContent(5)); } delete SDDCalib; } printf("Monalisa SDDevents\t%d\n",sddEvents); if (pcstream) (*pcstream)<<"calibStatAll"<<"SDDevents="<<sddEvents; printf("Monalisa SDDtracks\t%d\n",sddTracks); if (pcstream) (*pcstream)<<"calibStatAll"<<"SDDtracks="<<sddTracks; // // 6. AD dump // Int_t adEvents=0; TDirectory *adDir = (TDirectory*)fin->Get("ADCalib"); if (adDir) { TList *adList = (TList*) adDir->Get("ADCalibListHist"); if (adList) { TH2* adHistInt0 = (TH2*) adList->FindObject("hCh00_bc10_int0"); if (adHistInt0) adEvents += TMath::Nint(adHistInt0->GetEntries()); TH2* adHistInt1 = (TH2*) adList->FindObject("hCh00_bc10_int1"); if (adHistInt1) adEvents += TMath::Nint(adHistInt1->GetEntries()); delete adList; } } printf("Monalisa ADevents\t%d\n",adEvents); if (pcstream) (*pcstream)<<"calibStatAll"<<"ADevents="<<adEvents; // if (pcstream) (*pcstream)<<"calibStatAll"<<"\n"; delete fin; }
void sofiaall(TString InFiles = "runlist.dat", TObjArray& fDetList ) { // Output files TString outFile = "./r3bunpack.root"; // In general, the following parts need not be touched // ======================================================================== // ---- Debug option ------------------------------------------------- gDebug = 0; // ------------------------------------------------------------------------ // ----- Timer -------------------------------------------------------- TStopwatch timer; timer.Start(); // ------------------------------------------------------------------------ // ---- Load libraries ------------------------------------------------- gROOT->LoadMacro("$VMCWORKDIR/gconfig/basiclibs.C"); basiclibs(); gSystem->Load("libFairTools"); gSystem->Load("libGeoBase"); gSystem->Load("libParBase"); gSystem->Load("libBase"); gSystem->Load("libR3BMbs"); gSystem->Load("libMbsAPI"); gSystem->Load("libSOFIAMCStack"); gSystem->Load("libR3BRootEvent"); gSystem->Load("libR3BLANDEvent"); gSystem->Load("libR3BUnpack"); //gSystem->Load("libR3BMusicEvent"); //gSystem->Load("libR3BProEvent"); //gSystem->Load("libR3BCrateEvent"); // ----- Create analysis run ---------------------------------------- FairRunAna *fRun= new FairRunAna(); fRun->SetOutputFile(outFile); // ---- Load MBS MBSUnpack *MBSunpack= new MBSUnpack("MBS unpack", InFiles); fRun->AddTask(MBSunpack); //SOFIA Crate 1 if (fDetList.FindObject("Crate1") ) { CrateUnpack *CRATEunpack= new CrateUnpack("unpack"); fRun->AddTask(CRATEunpack); } //SOFIA Crate 2 if (fDetList.FindObject("Crate2") ) { MUSICUnpack *MUSICunpack= new MUSICUnpack("MUSIC unpack"); fRun->AddTask(MUSICunpack); } // SOFIA Tof for LCP if (fDetList.FindObject("LCP_TOF") ) { ProUnpack *PROunpack= new ProUnpack("PROTON unpack"); fRun->AddTask(PROunpack); } // SOFIA Land Detector if (fDetList.FindObject("LAND") ) { LANDUnpack *LANDunpack= new LANDUnpack("LAND unpack"); fRun->AddTask(LANDunpack); } // ----- Initialize analysis run -------------------------------------- fRun->Init(); fRun->RunOnLmdFiles(); // ----- Finish ------------------------------------------------------- timer.Stop(); Double_t rtime = timer.RealTime(); Double_t ctime = timer.CpuTime(); cout << endl << endl; cout << "Macro finished succesfully." << endl; cout << "Real time " << rtime/60.0 << " min, CPU time " << ctime/60.0 << "min" << endl << endl; // ------------------------------------------------------------------------ cout << " Test passed" << endl; cout << " All ok " << endl; }
void TriggerInputsForMuonEventCuts ( TString runListFilename, TString selectedInputs="", TString defaultStorage = "raw://" ) { AliCDBManager::Instance()->SetDefaultStorage(defaultStorage.Data()); TObjArray inputsList; inputsList.SetOwner(); TObjArray* selectedInputsList = selectedInputs.Tokenize(","); // Read input run list ifstream inFile(runListFilename.Data()); TString srun = ""; if ( inFile.is_open() ) { while ( ! inFile.eof() ) { srun.ReadLine(inFile,kFALSE); if ( ! srun.IsDigit() ) continue; // For each run, read trigger inputs from OCDB Int_t runNumber = srun.Atoi(); AliCDBManager::Instance()->SetRun(runNumber); // Get trigger class configuration AliCDBEntry* entry = AliCDBManager::Instance()->Get("GRP/CTP/Config"); if ( ! entry ) continue; THashList* runInputs = new THashList(); runInputs->SetOwner(); runInputs->SetUniqueID((UInt_t)runNumber); AliTriggerConfiguration* trigConf = (AliTriggerConfiguration*)entry->GetObject(); const TObjArray& trigInputsArray = trigConf->GetInputs(); AliTriggerInput* trigInput = 0x0; TIter next(&trigInputsArray); while ( ( trigInput = static_cast<AliTriggerInput*>(next()) ) ) { if ( selectedInputsList->GetEntriesFast() > 0 && ! selectedInputsList->FindObject(trigInput->GetName()) ) continue; Int_t inputId = (Int_t)TMath::Log2(trigInput->GetMask()); TObjString* currInput = new TObjString(trigInput->GetName()); currInput->SetUniqueID(inputId); runInputs->Add(currInput); } inputsList.Add(runInputs); } inFile.close(); } delete selectedInputsList; // Loop on the trigger inputs // and group runs with an equal list of inputs Int_t nentries = inputsList.GetEntries(); TArrayI checkMask(nentries); checkMask.Reset(1); for ( Int_t irun=0; irun<nentries; irun++ ) { if ( checkMask[irun] == 0 ) continue; THashList* currList = static_cast<THashList*>(inputsList.At(irun)); TString runRange = Form("Run range: %u", currList->GetUniqueID()); for ( Int_t jrun=irun+1; jrun<nentries; jrun++ ) { if ( checkMask[jrun] == 0 ) continue; THashList* checkList = static_cast<THashList*>(inputsList.At(jrun)); Bool_t isDifferent = kFALSE; for ( Int_t itrig=0; itrig<currList->GetEntries(); itrig++ ) { TObjString* currInput = static_cast<TObjString*>(currList->At(itrig)); TObject* checkInput = checkList->FindObject(currInput->GetName()); if ( ! checkInput || checkInput->GetUniqueID() != currInput->GetUniqueID() ) { isDifferent = kTRUE; break; } } // loop on trigger inputs if ( isDifferent ) continue; checkMask[jrun] = 0; runRange += Form(",%u", checkList->GetUniqueID()); } // loop on runs TString outString = "\nSetTrigInputsMap(\""; for ( Int_t itrig=0; itrig<currList->GetEntries(); itrig++ ) { TObjString* currInput = static_cast<TObjString*>(currList->At(itrig)); outString += Form("%s:%u,",currInput->GetString().Data(), currInput->GetUniqueID()); } outString.Append("\");\n"); outString.ReplaceAll(",\"","\""); outString += runRange; printf("%s\n", outString.Data()); } // loop on runs }
void extractJetTrack(){ //TString filename = "dj_RECOPAT_18_1_rhi"; TString filename = "dj_HCPR-GoodTrk1123_All0"; //TString filename = "dj_HydjetQ_DJQ80_F10GSR_GoodTrk1123"; //TString filename = "dj_HydjetQ_DJUQ80_F10GSR_GoodTrk1123"; TFile *f = new TFile(Form("/home/sungho/sctch101/data/jettrack/%s.root",filename.Data())); TTree *djtree = (TTree*) f->Get("djcalo/djTree"); bool debug = false; maxSampling = 10; // max number of event-by-event histogram int count=0; // variables float minpt = 0.9; float centMin = 0, centMax = 10; float njet_min = 100; float njeteta_max = 2.0; float ajet_min = 50; float ajeteta_max = 2.0; // event number of interest targetEvtNum.push_back(1490824); targetEvtNum.push_back(2084186); targetEvtNum.push_back(2983992); // prepare for output files, histograms, etc; prepareHist(); // event-by-event Nevt = djtree->GetEntries(); float cent = 0; djtree->SetBranchAddress("cent",¢); int runNum = 0, lumNum = 0, evtNum = 0; djtree->SetBranchAddress("run",&runNum); djtree->SetBranchAddress("lumi",&lumNum); djtree->SetBranchAddress("evt",&evtNum); // tracks and jet int nTrk = 0; djtree->SetBranchAddress("evtnp",&nTrk); // each jet float njeteta=0, njetphi=0, njet=0; // near side jet float ajeteta=0, ajetphi=0, ajet=0; // away side jet djtree->SetBranchAddress("nljeta",&njeteta); djtree->SetBranchAddress("nljphi",&njetphi); djtree->SetBranchAddress("nljet",&njet); djtree->SetBranchAddress("aljeta",&ajeteta); djtree->SetBranchAddress("aljphi",&ajetphi); djtree->SetBranchAddress("aljet",&ajet); // each tracks djtree->SetBranchAddress("ppt",&trkpt); djtree->SetBranchAddress("peta",&trketa); djtree->SetBranchAddress("pphi",&trkphi); for(Long_t i=0;i<djtree->GetEntries();i++){ djtree->GetEntry(i); if((i%100)==0) cout<<"counting every 100 events = "<<i<<endl; if(debug) cout<<"Evt: "<<evtNum<<" Near side jet Et = "<<njet<<" number of tracks = "<<nTrk<<endl; if(cent<centMin || cent>centMax) continue; // centrality if(njet<njet_min || njet>500) continue; // near side jet et cut if(fabs(njeteta)>njeteta_max) continue; // near side jet eta cut if(ajet<ajet_min) continue; if(fabs(ajeteta)>ajeteta_max) continue; float dphi = nljphi - aljphi; if(fabs(dphi)>=(TMath::Pi())) dphi = 2.*TMath::Pi() - fabs(dphi); if(dphi>(TMath::Pi()*(5/6)) continue; // dphi cut for back-to-back jets for(Long_t j=0;j<2;j++){ float jet = (j==0) ? njet : ajet; float jeta = (j==0) ? njeteta : ajeteta; float jphi = (j==0) ? njetphi : ajetphi; // randomize //float jeta = (j==0) ? rdn.Uniform(-2,2) : rdn.Uniform(-2,2); //float jphi = 0; //if(j==0) jphi = rdn.Uniform(0,TMath::Pi()); //else jphi = -1.*jphi; //back to back if(hdEtadPhiJetArray.FindObject(Form("hdEtadPhiJet_%d",count)) && count<maxSampling) ((TH2F*) hdEtadPhiJetArray.FindObject(Form("hdEtadPhiJet_%d",count)))->Fill(jeta,jphi); for(int z=0;z<targetEvtNum.size();z++){ if(runNum!=151969) continue; // harcoded if(evtNum==targetEvtNum[z] && hdEtadPhiJetTagArray.FindObject(Form("hdEtadPhiJetTag_%d",evtNum))) //redundent ((TH2F*) hdEtadPhiJetTagArray.FindObject(Form("hdEtadPhiJetTag_%d",evtNum)))->Fill(jeta,jphi); } // pre-loop to determine pt sum ---------------------------------------------------------- float ptSum_N = 0.0, ptSum_A = 0.0; for(Long_t n=0;n<nTrk;n++){ if(trkpt[n]<minpt) continue; // low pt cut float deta_jt_pre = fabs(jeta-trketa[n]); float dphi_jt_pre = fabs(jphi-trkphi[n]); if(dphi_jt_pre>=(TMath::Pi())) dphi_jt_pre = 2.0*TMath::Pi() - fabs(dphi_jt_pre); float dR = TMath::Sqrt(deta_jt_pre*deta_jt_pre+dphi_jt_pre*dphi_jt_pre); if(fabs(trketa[n])<5.0){ if(j==0 && dR<0.8) ptSum_N = ptSum_N + trkpt[n]; if(j==1 && dR<0.8) ptSum_A = ptSum_A + trkpt[n]; } } // end of pre-loop --------------------------------------------------------------------- if(j==0) { hdNdJetEt_NJet->Fill(jet,jet), hdNdTrkSPt_NJet->Fill(jet,ptSum_N); hdNPtEtRatio_NJet->Fill(jet,ptSum_N/jet), hdNPtEtAsymm_NJet->Fill(jet,fabs(jet-ptSum_N)/(jet+ptSum_N)); }else{ hdNdJetEt_AJet->Fill(jet,jet), hdNdTrkSPt_AJet->Fill(jet,ptSum_A); hdNPtEtRatio_AJet->Fill(jet,ptSum_A/jet), hdNPtEtAsymm_AJet->Fill(jet,fabs(jet-ptSum_A)/(jet+ptSum_A)); } // track loop for(Long_t k=0;k<nTrk;k++){ if(trkpt[k]<minpt) continue; // low pt cut float deta_jt = fabs(jeta-trketa[k]); float dphi_jt = fabs(jphi-trkphi[k]); if(fabs(dphi_jt)>=(TMath::Pi())) dphi_jt = 2.0*TMath::Pi() - dphi_jt; // by convention dPhi < Pi float dr_jt = TMath::Sqrt(deta_jt*deta_jt+dphi_jt*dphi_jt); float deta_jt_v2 = (jeta-trketa[k]); // can be negative float dphi_jt_v2 = (jphi-trkphi[k]); if(fabs(trketa[k])<5.0){ if(j==0 && dphi_jt<(0.5*(TMath::Pi()))) hdNdPt_NJet->Fill(trkpt[k]), hdNdPt_NJet_vbin->Fill(trkpt[k]), hdNdZ_NJet->Fill(trkpt[k]/ptSum_N); if(j==1 && dphi_jt<(0.5*(TMath::Pi()))) hdNdPt_AJet->Fill(trkpt[k]), hdNdPt_AJet_vbin->Fill(trkpt[k]), hdNdZ_AJet->Fill(trkpt[k]/ptSum_A); if(j==0 && fabs(dr_jt)<0.8) { hdNdNR_NJet->Fill(dr_jt/0.8), hdNdNRW_NJet->Fill(dr_jt/0.8,trkpt[k]); //hdNPtEtRatio_NJet->Fill(jet,ptSum_N/jet), hdNPtEtAsymm_NJet->Fill(jet,fabs(jet-ptSum_N)/(jet+ptSum_N)); } if(j==1 && fabs(dr_jt)<0.8) { hdNdNR_AJet->Fill(dr_jt/0.8), hdNdNRW_AJet->Fill(dr_jt/0.8,trkpt[k]); //hdNPtEtRatio_AJet->Fill(jet,ptSum_A/jet), hdNPtEtAsymm_AJet->Fill(jet,fabs(jet-ptSum_A)/(jet+ptSum_A)); } } //if(trkpt[k]<minpt) continue; // low pt cut if(debug) cout<<" trk pt = "<<trkpt[k]<<endl; if(hdEtadPhiTrkArray.FindObject(Form("hdEtadPhiTrk_%d",count)) && count<50){ ((TH2F*) hdEtadPhiTrkArray.FindObject(Form("hdEtadPhiTrk_%d",count)))->Fill(trketa[k],trkphi[k]); ((TH2F*) hdEtadPhiTrkWArray.FindObject(Form("hdEtadPhiTrkW_%d",count)))->Fill(trketa[k],trkphi[k],trkpt[k]); // with weight } if(j==0 && dphi_jt<(0.5*(TMath::Pi()))) hdN_dEtadPhiTrkNJet->Fill(deta_jt_v2,dphi_jt_v2); if(j==1 && dphi_jt<(0.5*(TMath::Pi()))) hdN_dEtadPhiTrkAJet->Fill(deta_jt_v2,dphi_jt_v2); for(int z=0;z<targetEvtNum.size();z++){ if(runNum!=151969) continue; // harcoded if(evtNum==targetEvtNum[z] && hdEtadPhiTrkTagArray.FindObject(Form("hdEtadPhiTrkTag_%d",evtNum))) { ((TH2F*) hdEtadPhiTrkTagArray.FindObject(Form("hdEtadPhiTrkTag_%d",evtNum)))->Fill(trketa[k],trkphi[k]); ((TH2F*) hdEtadPhiTrkWTagArray.FindObject(Form("hdEtadPhiTrkWTag_%d",evtNum)))->Fill(trketa[k],trkphi[k],trkpt[k]); // with weight } } if(j==0) hdN_dPhiTrkNJet->Fill(dphi_jt), hdN_dEtaTrkNJet->Fill(deta_jt), hdN_dRTrkNJet->Fill(dr_jt); else hdN_dPhiTrkAJet->Fill(dphi_jt), hdN_dEtaTrkAJet->Fill(deta_jt), hdN_dRTrkAJet->Fill(dr_jt); for(Long_t l=0;l<nTrk;l++){ if(trkpt[l]<minpt) continue; if(l==k) continue; // to remove auto-correlation float deta_tt = fabs(trketa[k]-trketa[l]); float dphi_tt = fabs(trkphi[k]-trkphi[l]); if(fabs(dphi_tt)>=(TMath::Pi())) dphi_tt = 2.0*TMath::Pi() - fabs(dphi_tt); // by convention dPhi < Pi float dr_tt = TMath::Sqrt(deta_tt*deta_tt+dphi_tt*dphi_tt); hdN_dPhiTrkTrk_dPhiTrkJet->Fill(dphi_tt,dphi_jt); hdN_dEtaTrkTrk_dEtaTrkJet->Fill(deta_tt,deta_jt); hdN_dRTrkTrk_dRTrkJet->Fill(dr_tt,dr_jt); hdN_dPhiTrkTrk_dRTrkJet->Fill(dphi_tt,dr_jt); } } // end of track loop } // end of jet loop count++; } // end of event loop // save in output files saveHistRoot(Form("./rootOutput/ANAv2_RandomJet_%s_minpT%1.1f_centFrom%1.1fto%1.1f.root",filename.Data(),minpt,centMin,centMax)); }
//get the RooDataSet, multiply each roorealvar by the event_weight void get_roodset_from_ttree(TDirectoryFile *f, TString treename, RooDataSet* &roodset){ cout << "Creating roodset from file: " << f->GetName() << " with tree: " << treename.Data() << endl; TTree *t = NULL; assert(roodset==NULL); f->GetObject(treename.Data(),t); if (!t) {cout << "Impossible to find TTree " << treename.Data() << endl; return;} TObjArray *objs = t->GetListOfBranches(); //disables all branches t->SetBranchStatus("*",0); float v_rooisopv1; float v_rooisopv2; float v_rooisowv1; float v_rooisowv2; float v_roovar1; float v_roovar2; float v_roopt1; float v_roosieie1; float v_rooeta1; float v_roopt2; float v_roosieie2; float v_rooeta2; float v_roodiphopt; float v_roodiphomass; float v_roorho; float v_roosigma; float v_roonvtx; float v_rooweight; TBranch *b_roovar1; TBranch *b_roovar2; TBranch *b_rooisopv1; TBranch *b_rooisopv2; TBranch *b_rooisowv1; TBranch *b_rooisowv2; TBranch *b_roopt1; TBranch *b_roosieie1; TBranch *b_rooeta1; TBranch *b_roopt2; TBranch *b_roosieie2; TBranch *b_rooeta2; TBranch *b_roodiphopt; TBranch *b_roodiphomass; TBranch *b_roorho; TBranch *b_roosigma; TBranch *b_roonvtx; TBranch *b_rooweight; const int nvars = 18; float* ptrs[nvars]={&v_roovar1,&v_roovar2,&v_rooisopv1,&v_rooisopv2,&v_rooisowv1,&v_rooisowv2,&v_roopt1,&v_roosieie1,&v_rooeta1,&v_roopt2,&v_roosieie2,&v_rooeta2,&v_roodiphopt,&v_roodiphomass,&v_roorho,&v_roosigma,&v_roonvtx,&v_rooweight}; TBranch** branches[nvars]={&b_roovar1,&b_roovar2,&b_rooisopv1,&b_rooisopv2,&b_rooisowv1,&b_rooisowv2,&b_roopt1,&b_roosieie1,&b_rooeta1,&b_roopt2,&b_roosieie2,&b_rooeta2,&b_roodiphopt,&b_roodiphomass,&b_roorho,&b_roosigma,&b_roonvtx,&b_rooweight}; RooRealVar* rooptrs[nvars]={roovar1,roovar2,rooisopv1,rooisopv2,rooisowv1,rooisowv2,roopt1,roosieie1,rooeta1,roopt2,roosieie2,rooeta2,roodiphopt,roodiphomass,roorho,roosigma,roonvtx,rooweight}; bool status[nvars]; RooArgSet args; for (int i=0; i<nvars; i++){ status[i]=0; TString name = rooptrs[i]->GetName(); TObject *obj = objs->FindObject(name.Data()); if (!obj) continue; t->SetBranchStatus(name.Data(),1); status[i]=1; t->SetBranchAddress(name.Data(),ptrs[i],branches[i]); args.add(*(rooptrs[i])); } TString newname = Form("roo_%s",t->GetName()); roodset = new RooDataSet(newname.Data(),newname.Data(),args,WeightVar(*rooweight) ); for (int j=0; j<t->GetEntries(); j++){ t->GetEntry(j); for (int i=0; i<nvars; i++){ if (!status[i]) continue; rooptrs[i]->setVal(*(ptrs[i])); } roodset->add(args,v_rooweight); } cout << "Imported roodset " << newname.Data() << " from TTree " << t->GetName() << endl; roodset->Print(); }