void histogramHandlerTest(const char* input="./"){ if (!input) { cerr << "please specify input or run without arguments" << endl; return; } ///////////////////////////////////////////////////////////////////////////////////////////////// // // init the HLT system in order to define the analysis chain below // AliHLTSystem* gHLT=AliHLTPluginBase::GetInstance(); /////////////////////////////////////////////////////////////////////////////////////////////////// // // define the analysis chain to be run // int iMinSlice=0; int iMaxSlice=35; int iMinPart=0; int iMaxPart=5; TString histogramHandlerInput1; TString histogramHandlerInput2; TString histogramHandlerOutput1; TString histogramHandlerOutput2; TString rootFileWriterInput1; TString rootFileWriterInput2; for (int slice=iMinSlice; slice<=iMaxSlice; slice++) { for (int part=iMinPart; part<=iMaxPart; part++) { TString clusterFinderOutput1; TString clusterFinderOutput2; TString clusterHistoInput1; TString clusterHistoInput2; TString clusterHistoOutput1; TString clusterHistoOutput2; TString arg, publisher; // digit publisher components int ddlno=768; if (part>1) ddlno+=72+4*slice+(part-2); else ddlno+=2*slice+part; arg.Form("-minid %d -datatype 'DDL_RAW ' 'TPC ' -dataspec 0x%02x%02x%02x%02x", ddlno, slice, slice, part, part); publisher.Form("DP_%02d_%d", slice, part); AliHLTConfiguration pubconf(publisher.Data(), "AliRawReaderPublisher", NULL , arg.Data()); // first clusterfinder clusterFinderOutput1.Form("CF1_%02d_%d", slice, part); AliHLTConfiguration cfconf(clusterFinderOutput1.Data(), "TPCClusterFinderDecoder", publisher.Data(), "-timebins 446");//-timebins set to simulated data if (clusterHistoInput1.Length()>0) clusterHistoInput1+=" "; clusterHistoInput1+=clusterFinderOutput1; // second clusterfinder clusterFinderOutput2.Form("CF2_%02d_%d", slice, part); AliHLTConfiguration cfconf(clusterFinderOutput2.Data(), "TPCClusterFinderDecoder", publisher.Data(), "-timebins 446");//-timebins set to simulated data if (clusterHistoInput2.Length()>0) clusterHistoInput2+=" "; clusterHistoInput2+=clusterFinderOutput2; // first cluster histo component clusterHistoOutput1.Form("CH1_%02d_%d", slice, part); AliHLTConfiguration cfconf(clusterHistoOutput1.Data(), "TPCClusterHisto", clusterHistoInput1.Data(), ""); if (histogramHandlerInput1.Length()>0) histogramHandlerInput1+=" "; histogramHandlerInput1+=clusterHistoOutput1; //second cluster histo component clusterHistoOutput2.Form("CH2_%02d_%d", slice, part); AliHLTConfiguration cfconf(clusterHistoOutput2.Data(), "TPCClusterHisto", clusterHistoInput2.Data(), ""); if (histogramHandlerInput1.Length()>0) histogramHandlerInput1+=" "; histogramHandlerInput1+=clusterHistoOutput2; if (histogramHandlerInput2.Length()>0) histogramHandlerInput2+=" "; histogramHandlerInput2+=clusterHistoOutput2; } } // first histogram handler component histogramHandlerOutput1.Form("HH1_%02d_%d", slice, part); AliHLTConfiguration cfconf(histogramHandlerOutput1.Data(), "TPCHistogramHandler", histogramHandlerInput1.Data(), "-use-general"); if (rootFileWriterInput1.Length()>0) rootFileWriterInput1+=" "; rootFileWriterInput1+=histogramHandlerOutput1; // second histogram handler component histogramHandlerOutput2.Form("HH2_%02d_%d", slice, part); AliHLTConfiguration cfconf(histogramHandlerOutput2.Data(), "TPCHistogramHandler", histogramHandlerInput2.Data(), "-use-general"); if (rootFileWriterInput2.Length()>0) rootFileWriterInput2+=" "; rootFileWriterInput2+=histogramHandlerOutput2; AliHLTConfiguration rootFileWriter1("RootFileWriter1", "ROOTFileWriter", rootFileWriterInput1.Data() , "-datafile histogramHandlerFile1"); AliHLTConfiguration rootFileWriter2("RootFileWriter2", "ROOTFileWriter", rootFileWriterInput2.Data() , "-datafile histogramHandlerFile2"); /////////////////////////////////////////////////////////////////////////////////////////////////// // // Init and run the reconstruction // All but HLT reconstruction is switched off // AliReconstruction rec; rec.SetInput(input); rec.SetRunVertexFinder(kFALSE); rec.SetRunLocalReconstruction("HLT"); rec.SetRunTracking(""); rec.SetLoadAlignFromCDB(0); rec.SetRunQA(":"); //rec.SetFillESD("HLT"); rec.SetFillESD(""); rec.SetFillTriggerESD(false); rec.SetOption("HLT", "libAliHLTUtil.so libAliHLTRCU.so libAliHLTTPC.so loglevel=0x7c chains=RootFileWriter1,RootFileWriter2"); rec.Run(); }
// ---------------------------------------------------------------------------- void HLTT0Test(const Char_t *filename, const Char_t *cdbURI, Int_t minEvent=-1, Int_t maxEvent=-1) { printf (" ============================================= \n\n"); printf (" TEST T0 RECONSTRUCTION \n\n"); printf (" ============================================= \n"); if(!gSystem->AccessPathName("galice.root")){ cerr << "AliReconstruction on raw data requires to delete galice.root, "; cerr << "or run at different place." << endl; cerr << "!!! DO NOT DELETE the galice.root of your simulation, "; cerr << "but create a subfolder !!!!" << endl; return; } TGrid::Connect("alien"); if (gSystem->AccessPathName(filename)) { cerr << "can not find file " << filename << endl; return; } // -- connect to the GRID if we use a file or OCDB from the GRID TString struri=cdbURI; TString strfile=filename; if (struri.BeginsWith("raw://") || strfile.Contains("://") && !strfile.Contains("local://")) { TGrid::Connect("alien"); } // -- Set the CDB storage location AliCDBManager * man = AliCDBManager::Instance(); man->SetDefaultStorage(cdbURI); if (struri.BeginsWith("local://")) { // set specific storage for GRP entry // search in the working directory and one level above, the latter // follows the standard simulation setup like e.g. in test/ppbench if (!gSystem->AccessPathName("GRP/GRP/Data")) { man->SetSpecificStorage("GRP/GRP/Data", "local://$PWD"); } else if (!gSystem->AccessPathName("../GRP/GRP/Data")) { man->SetSpecificStorage("GRP/GRP/Data", "local://$PWD/.."); } else { cerr << "can not find a GRP entry, please run the macro in the folder" << endl; cerr << "of a simulated data sample, or specify a GRID OCDB" << endl; HLTT0Test(); return; } } ////////////////////////////////////////////////////////////////////////////////////// // // Reconstruction settings AliReconstruction rec; if (minEvent>=0 || maxEvent>minEvent) { if (minEvent<0) minEvent=0; if (maxEvent<minEvent) maxEvent=minEvent; rec.SetEventRange(minEvent,maxEvent); } rec.SetRunReconstruction("HLT"); rec.SetLoadAlignFromCDB(kFALSE); rec.SetWriteESDfriend(kFALSE); // due to bug ... // StopOnError needs to be disabled rec.SetStopOnError(kFALSE); rec.SetRunVertexFinder(kFALSE); rec.SetInput(filename); // QA options rec.SetRunQA(":") ; //rec.SetQARefDefaultStorage("local://$ALICE_ROOT/QAref") ; ////////////////////////////////////////////////////////////////////////////////////// // // setup the HLT system AliHLTSystem* pHLT=AliHLTPluginBase::GetInstance(); /* // define a data publisher configuration for T0 raw data AliHLTConfiguration publisher("RAW-Publisher", "AliRawReaderPublisher", "", "-equipmentid 3584 " "-datatype 'DDL_RAW ' 'VZRO' " "-dataspec 0x01" ); // define configuration of the T0Reconstruction component AliHLTConfiguration vzeroReco("T0-Reconstruction", "T0Reconstruction", "RAW-Publisher", "" ); // define configuration of the GlobalEsdConverter component AliHLTConfiguration esdConverter("GLOBAL-ESD-Converter", "GlobalEsdConverter", "T0-Reconstruction", "" ); // define configuration for Root file writer of T0 output AliHLTConfiguration rootWriter("RootWriter", "ROOTFileWriter", "T0-Reconstruction GLOBAL-ESD-Converter", "-directory analysis -datafile vzeroRec" ); */ // set option for the HLT module in AliReconstruction // arguments // - ignore-hltout : ignore the HLTOUT payload from the HLT DDLs // - libraries to be used as plugins // - loglevel=0x7c : Important, Info, Warning, Error, Fatal rec.SetOption("HLT", "ignore-hltout " "libAliHLTUtil.so libAliHLTGlobal.so libAliHLTITS.so libAliHLTT0.so " "loglevel=0x7c " "chains=T0-RECO"//"GLOBAL-ESD-Converter,RootWriter" ); rec.SetRunPlaneEff(kFALSE); // switch off cleanESD rec.SetCleanESD(kFALSE); AliLog::Flush(); rec.Run(); }
/* * Example macro to run the HLT vertexer embedded * into AliRoot reconstruction. The reconstruction is done from the raw data. * * Usage: * <pre> * aliroot -b -q rec-vertexer.C | tee rec-vertexer.log * </pre> * * The chain to be run is defined by the macro given to the parameter * 'config=' * * The macro asumes raw data to be available in the rawx folders, either * simulated or real data. A different input can be specified as parameter * <pre> * aliroot -b -q rec-vertexer.C'("input.root")' * </pre> * * AliHLTTPCDigitReader32Bit is used to read the data * * In the first section, an analysis chain is defined. The scale of the * chain can be defined by choosing the range of sectors and partitions. * * The reconstruction is steered by the AliReconstruction object in the * usual way. * * @ingroup alihlt_global * @author */ void rec_vertexer(const char* input="./") { if(!gSystem->AccessPathName("galice.root")){ cerr << "please delete the galice.root or run at different place." << endl; return; } if (!input) { cerr << "please specify input or run without arguments" << endl; return; } /////////////////////////////////////////////////////////////////////////////////////////////////// // // init the HLT system in order to define the analysis chain below // AliHLTSystem* gHLT=AliHLTPluginBase::GetInstance(); TString option="libAliHLTUtil.so libAliHLTRCU.so libAliHLTTPC.so libAliHLTGlobal.so loglevel=0x7c chains="; /////////////////////////////////////////////////////////////////////////////////////////////////// // // define the analysis chain to be run // int iMinSlice=0; int iMaxSlice=35; int iMinPart=0; int iMaxPart=5; TString writerInput; TString mergerInput; TString histoInput; TString histogramHandlerInputClusterFinder; TString cdumpInput; for (int slice=iMinSlice; slice<=iMaxSlice; slice++) { TString trackerInput; for (int part=iMinPart; part<=iMaxPart; part++) { TString arg, publisher, cf; TString clusterHistoOutput; // raw data publisher components int ddlno=768; if (part>1) ddlno+=72+4*slice+(part-2); else ddlno+=2*slice+part; arg.Form("-minid %d -datatype 'DDL_RAW ' 'TPC ' -dataspec 0x%02x%02x%02x%02x -verbose", ddlno, slice, slice, part, part); publisher.Form("DP_%02d_%d", slice, part); new AliHLTConfiguration(publisher.Data(), "AliRawReaderPublisher", NULL , arg.Data()); // cluster finder components cf.Form("CF_%02d_%d", slice, part); new AliHLTConfiguration(cf.Data(), "TPCClusterFinder32Bit", publisher.Data(), "-solenoidBz -5"); if (trackerInput.Length()>0) trackerInput+=" "; trackerInput+=cf; if (writerInput.Length()>0) writerInput+=" "; writerInput+=cf; if (histoInput.Length()>0) histoInput+=" "; histoInput+=cf; if (cdumpInput.Length()>0) cdumpInput+=" "; cdumpInput+=cf; } TString tracker; // tracker components tracker.Form("TR_%02d", slice); new AliHLTConfiguration(tracker.Data(), "TPCCATracker", trackerInput.Data(), ""); if (writerInput.Length()>0) writerInput+=" "; writerInput+=tracker; if (mergerInput.Length()>0) mergerInput+=" "; mergerInput+=tracker; //add all slice tracks to histo input //if (histoInput.Length()>0) histoInput+=" "; //histoInput+=tracker; } // GlobalMerger component new AliHLTConfiguration("globalmerger","TPCCAGlobalMerger",mergerInput.Data(),""); //add all global tracks to histo input if (histoInput.Length()>0) histoInput+=" "; histoInput+="globalmerger"; // specify whether to write all blocks separately or merge the tracks // and convert to ESD bool writeBlocks=false; // the esd converter configuration new AliHLTConfiguration("esd-converter", "GlobalEsdConverter" , "globalmerger", "-fitTracksToVertex 1"); new AliHLTConfiguration("global-vertexer", "GlobalVertexer" , "esd-converter", ""); new AliHLTConfiguration("v0HistoOut", "V0Histo" , "global-vertexer", ""); new AliHLTConfiguration("GVhistorootfile", "ROOTFileWriter", "global-vertexer" , "-datafile primaryVertexHistograms -concatenate-events -overwrite"); new AliHLTConfiguration("v0historootfile", "ROOTFileWriter", "v0HistoOut" , "-datafile secondaryVertexHistograms -concatenate-events -overwrite"); option+="v0historootfile"; option+=",GVhistorootfile"; /////////////////////////////////////////////////////////////////////////////////////////////////// // // Init and run the reconstruction // All but HLT reconstructio is switched off // AliReconstruction rec; rec.SetDefaultStorage("local://$ALICE_ROOT/OCDB"); // rec.SetDefaultStorage("local://$HOME/HCDB"); //rec.SetSpecificStorage("GRP/GRP/Data", // Form("local://%s",gSystem->pwd())); // rec.SetSpecificStorage("GRP/GRP/Data","local:///opt/HLT-public/OCDB/LHC09c"); // rec.SetSpecificStorage("GRP/CTP/Config","local:///opt/HLT-public/OCDB/LHC09c"); // rec.SetSpecificStorage("GRP/CTP/Config", "local:///opt/HLT-public/rec/LHC09c/"); rec.SetInput(input); rec.SetRunVertexFinder(kFALSE); rec.SetRunReconstruction("HLT"); //add TPC for comparison rec.SetLoadAlignFromCDB(0); rec.SetRunQA(":"); rec.SetOption("HLT",option); // switch off cleanESD rec.SetCleanESD(kFALSE); // rec.SetEventRange(0, 100); rec.Run(); }
/** * @file sampleRawAnalysis.C * @brief Example macro to run the AliHLTSampleRawAnalysisComponent in * AliReconstruction. * * The component subscribes to DDL raw data published by the * AliHLTRawReaderPublisherComponent. The macros requires a raw data file * and a corresponding GRP entry. * * <pre> * Usage: aliroot -b -q -l \ * sampleRawAnalysis.C'("rawfile", "cdb", minEvent, maxEvent)' * * Examples: * sampleRawAnalysis.C'("raw.root", minEvent, MaxEvent)' * sampleRawAnalysis.C'("./", minEvent, MaxEvent)' * sampleRawAnalysis.C'("alien:///alice/data/2010/.../raw/....root")' * * Defaults * cdb="local://$ALICE_ROOT/OCDB" -> take local OCDB from distribution * minEvent=-1 -> no lower event selection * maxEvent=-1 -> no upper event selection * * </pre> * * The input file can be a local raw.root file but also a file from the * GRID. The separate DDL files generated in simulation can be accessed * using AliRawReaderFile by speficying "directory/". * * Since the macro runs AliReconstruction the OCDB needs to be set up, in * particular the GRP entry. If testing with a local OCDB you have to * simulate some events and run the macro in the folder of the simulation. * Also HLT components configure from objects in the OCDB. * * Note: You need a valid GRID token, if you want to access files directly * from the Grid, use 'alien-token-init' of your alien installation. * * @author [email protected] * @ingroup alihlt_tutorial */ void sampleRawAnalysis(const char *filename, const char *cdbURI, int minEvent=-1, int maxEvent=-1) { if(!gSystem->AccessPathName("galice.root")){ cerr << "AliReconstruction on raw data requires to delete galice.root, or run at different place." << endl; cerr << "!!! DO NOT DELETE the galice.root of your simulation, but create a subfolder !!!!" << endl; return; } if (gSystem->AccessPathName(filename)) { cerr << "can not find file " << filename << endl; return; } // connect to the GRID if we use a file or OCDB from the GRID TString struri=cdbURI; TString strfile=filename; if (struri.BeginsWith("raw://") || strfile.Contains("://") && !strfile.Contains("local://")) { TGrid::Connect("alien"); } // Set the CDB storage location AliCDBManager * man = AliCDBManager::Instance(); man->SetDefaultStorage(cdbURI); if (struri.BeginsWith("local://")) { // set specific storage for GRP entry // search in the working directory and one level above, the latter // follows the standard simulation setup like e.g. in test/ppbench if (!gSystem->AccessPathName("GRP/GRP/Data")) { man->SetSpecificStorage("GRP/GRP/Data", "local://$PWD"); } else if (!gSystem->AccessPathName("../GRP/GRP/Data")) { man->SetSpecificStorage("GRP/GRP/Data", "local://$PWD/.."); } else { cerr << "can not find a GRP entry, please run the macro in the folder" << endl; cerr << "of a simulated data sample, or specify a GRID OCDB" << endl; sampleRawAnalysis(); return; } } ////////////////////////////////////////////////////////////////////////////////////// // // Reconstruction settings AliReconstruction rec; if (minEvent>=0 || maxEvent>minEvent) { if (minEvent<0) minEvent=0; if (maxEvent<minEvent) maxEvent=minEvent; rec.SetEventRange(minEvent,maxEvent); } rec.SetRunReconstruction("HLT"); rec.SetLoadAlignFromCDB(kFALSE); rec.SetWriteESDfriend(kFALSE); // due to bug ... // StopOnError needs to be disabled rec.SetStopOnError(kFALSE); rec.SetRunVertexFinder(kFALSE); rec.SetInput(filename); // QA options rec.SetRunQA(":") ; //rec.SetQARefDefaultStorage("local://$ALICE_ROOT/QAref") ; ////////////////////////////////////////////////////////////////////////////////////// // // setup the HLT system AliHLTSystem* pHLT=AliHLTPluginBase::GetInstance(); // define a data publisher configuration // arguments: // 1) id of the configuartion, later used to refer to this configuration // 2) id of the component to run // 3) parents, no parents for publisher components // 4) optional component arguments // publish link #512, the first link (numbered 0) of SSD AliHLTConfiguration publisher("RAW-Publisher", "AliRawReaderPublisher", "", "-minid 512 " "-datatype 'DDL_RAW ' 'ISSD' " "-dataspec 0x01" ); // define configuration of the SampleRawAnalyis component // arguments: // 1) id of the configuartion, later used to refer to this configuration // 2) id of the component to run // 3) parents, here the publisher configuration defined above // 4) optional component arguments AliHLTConfiguration rawanalysis("RAW-Analysis", "SampleRawAnalysis", "RAW-Publisher", "-mandatory1 test " "-verbose" ); // set option for the HLT module in AliReconstruction // arguments // - ignore-hltout : ignore the HLTOUT payload from the HLT DDLs // - libraries to be used as plugins // - loglevel=0x7c : Important, Info, Warning, Error, Fatal // - chains=RAW-Analysis : chains to be run rec.SetOption("HLT", "ignore-hltout " "libAliHLTUtil.so libAliHLTSample.so " "loglevel=0x7c " "chains=RAW-Analysis " ); rec.SetRunPlaneEff(kFALSE); // switch off cleanESD rec.SetCleanESD(kFALSE); AliLog::Flush(); rec.Run(); }
/** * @file cal-hlt-tpc-offline.C * @brief Test macro for the HLT TPC offline calibration. * * The macro runs an HLT chain of TPC analysis, using the offline * algorithms and appropriate wrappers. The final output is * processed by the TPCOfflineCalibration component. * * Usage: * <pre> * aliroot -b -q cal-hlt-tpc-offline.C | tee cal-hlt-tpc-offline.log * </pre> * * The chain to be run is defined by the macro given to the parameter * 'config=' * * The macro asumes raw data to be available in the rawx folders, either * simulated or real data. A different input can be specified as parameter * <pre> * aliroot -b -q cal-hlt-tpc-offline.C'("input.root")' * </pre> * * In the first section, an analysis chain is defined. The scale of the * chain can be defined by choosing the range of sectors and partitions. * * The reconstruction is steered by the AliReconstruction object in the * usual way. * * @ingroup alihlt_tpc * @author Jacek Otwinowski <*****@*****.**>, [email protected] */ void cal_hlt_tpc_offline(const char* input="./") { if (!input) { cerr << "please specify input or run without arguments" << endl; return; } /////////////////////////////////////////////////////////////////////////////////////////////////// // // init the HLT system in order to define the analysis chain below // AliHLTSystem* gHLT=AliHLTPluginBase::GetInstance(); //gHLT.SwitchAliLog(0); /////////////////////////////////////////////////////////////////////////////////////////////////// // // define the analysis chain to be run // bool sectorClusterer=true; // run clusterer on sector or DDL level // check if the AliRawReaderMemory supports multiple buffers TClass* info=TClass::GetClass("AliRawReaderMemory"); TList* methods=info->GetListOfAllPublicMethods(); if (sectorClusterer && !methods->FindObject("AddBuffer")) { cerr << "warning: AliRawReaderMemory does not support multiple buffers, falling back to run clusterer on DDL level" << endl; sectorClusterer=false; } int iMinSlice=0; int iMaxSlice=35; int iMinPart=0; int iMaxPart=5; int DDLNoFromSlicePatch(int, int); TString writerInput; TString trackerInput; TString calibratorInput; for (int slice=iMinSlice; slice<=iMaxSlice; slice++) { TString arg, clustererInput; for (int part=iMinPart; part<=iMaxPart; part++) { TString publisher, cf; // raw data publisher components int ddlno=DDLNoFromSlicePatch(slice, part); arg.Form("-minid %d -datatype 'DDL_RAW ' 'TPC ' -dataspec 0x%02x%02x%02x%02x -verbose", ddlno, slice, slice, part, part); publisher.Form("DP_%02d_%d", slice, part); AliHLTConfiguration pubconf(publisher.Data(), "AliRawReaderPublisher", NULL , arg.Data()); if (!sectorClusterer) { // cluster finder components cf.Form("CF_%02d_%d", slice, part); AliHLTConfiguration cfconf(cf.Data(), "TPCOfflineClusterer", publisher.Data(), ""); if (trackerInput.Length()>0) trackerInput+=" "; trackerInput+=cf; //if (writerInput.Length()>0) writerInput+=" "; //writerInput+=cf; } else { if (clustererInput.Length()>0) clustererInput+=" "; clustererInput+=publisher; } } if (sectorClusterer) { // cluster finder components cf.Form("CF_%02d", slice); AliHLTConfiguration cfconf(cf.Data(), "TPCOfflineClusterer", clustererInput.Data(), ""); if (trackerInput.Length()>0) trackerInput+=" "; trackerInput+=cf; } } // one global tracker component TString tracker; tracker.Form("Global_TR"); AliHLTConfiguration trackerconf(tracker.Data(), "TPCOfflineTrackerCalib", trackerInput.Data(), ""); if (writerInput.Length()>0) writerInput+=" "; calibratorInput+=tracker; // one global calibration component TString calibrator; calibrator.Form("Global_Calib"); AliHLTConfiguration calibconf(calibrator.Data(), "TPCOfflineCalibration", calibratorInput.Data(), ""); if (writerInput.Length()>0) writerInput+=" "; writerInput+=calibrator; // the writer configuration AliHLTConfiguration rootfwconf("sink1", "ROOTFileWriter", writerInput.Data(), "-specfmt=_%d -subdir=out_%d -idfmt=_0x%08x"); //AliHLTConfiguration esdwconf("sink1", "EsdCollector" , writerInput.Data(), "-directory hlt-tpc-offline"); /////////////////////////////////////////////////////////////////////////////////////////////////// // // Init and run the reconstruction // All but HLT reconstructio is switched off // AliReconstruction rec; rec.SetInput(input); rec.SetRunVertexFinder(kFALSE); rec.SetRunLocalReconstruction("HLT"); rec.SetRunTracking(""); rec.SetLoadAlignFromCDB(0); rec.SetFillESD(""); rec.SetRunQA(":"); rec.SetRunGlobalQA(kFALSE); rec.SetFillTriggerESD(kFALSE); rec.SetOption("HLT", "libAliHLTUtil.so libAliHLTRCU.so libANALYSIS.so libANALYSISalice.so libTPCcalib.so libAliHLTTPC.so loglevel=0x7c chains=sink1"); rec.Run(); }
void AliTRDReconstructandFill() { // // This macro fills 2d histo or vectors during the reconstruction // If it is vectors, it fits them directly after the reconstruction // and writes the result in the file coeftest.root // TStopwatch timer; timer.Start(); ////Set the parameters of AliTRDCalibra*************** AliTRDCalibra *calibra = AliTRDCalibra::Instance(); ////What do you want to use? calibra->SetMITracking(); //Offline tracking //calibra->Setmcmtracking(); ////Do you want to try the correction due to the angles of the tracks for mcm tracklets? calibra->SetMcmCorrectAngle(); ////What do you want to fill? calibra->SetCH2dOn();//relative gain calibration calibra->SetPH2dOn();//drift velocity and time0 calibration calibra->SetPRF2dOn();//Pad Response Function calibration ////How do you want to store the infos? calibra->SetVector2d();//vector method calibra->SetHisto2d();//2Dhistograms ////Which mode do you want? calibra->SetNz(2,2);//For the PRF z direction calibra->SetNrphi(2,2);//For The PRF rphi direction calibra->SetNz(0,0);//For the gain z direction calibra->SetNrphi(0,0);//For the gain rphi direction calibra->SetNz(1,3);//For the drift velocity and time0 z direction calibra->SetNrphi(1,3);//For the drift velocity and time 0 rphi direction ////How many bins? calibra->SetNumberBinCharge(100); calibra->SetNumberBinPRF(20); ////Do you want to accept more tracks? calibra->SetProcent(1.2);//For the gain if one group has a signal above 1.2 the other group then fill calibra->SetDifference(10);//For the drift velocity if one group has at least 10 time bins then fill calibra->SetNumberClusters(18);//For mcm tracklets only fill only with tracklet with at least 18 clusters ////Do you want to take only the middle pad for gain or Vdrift? //calibra->SetTraMaxPad(); //Do you want to apply more strict cut on the clusters for the PRF calibration? calibra->SetThresholdClusterPRF1(2);//The neighbors pads must have a signal smaller than 2 ADC counts calibra->SetThresholdClusterPRF2(10);//The 3 pads in the cluster must have a signal above 10 ADC counts ////What do you want to write? calibra->SetWrite(0);//For the gain calibra->SetWrite(1);//For the average pulse height calibra->SetWrite(2);//For the PRF ////If you want to change the name of the file where it is stored (not very good) //calibra->SetWriteName("test.root"); //Begin the reconstruction AliReconstruction rec; rec.SetGAliceFile("galice.root"); rec.SetLoadAlignFromCDB(kFALSE); rec.SetRunHLTTracking(kFALSE); rec.SetFillESD(""); rec.SetFillTriggerESD(kFALSE); rec.SetRunVertexFinder(kFALSE); rec.Run(); timer.Stop(); timer.Print(); calibra->Write2d(); TStopwatch timerfit; timerfit.Start(); ////Fit directly after having filling**** ////Do you want to try with less statistics? calibra->SetMinEntries(10);//If there is at least 10 entries in the histo, it will fit ////Do you want to write the result? calibra->SetWriteCoef(0);//gain calibra->SetWriteCoef(1);//time 0 and drift velocity calibra->SetWriteCoef(2);//PRF ////Do you want to change the name of the file (TRD.coefficient.root)? calibra->SetWriteNameCoef("coeftest.root"); ////Do you want to see something? calibra->SetDebug(1); ////Do you want to fit? calibra->FitPHOnline(); calibra->FitCHOnline(); calibra->FitPRFOnline(); timerfit.Stop(); timerfit.Print(); }