Example #1
0
void Resolution::book(void)
{
    destroy();

    std::string hName;
    std::string hTitle;
    std::string planeName;

    theAnalysisManager_->cd("/");
    theAnalysisManager_->mkdir("Resolution");

    for(unsigned int p=0; p<thePlaneMapping_->getNumberOfPlanes(); p++)
    {
        planeName = thePlaneMapping_->getPlaneName(p);
        theAnalysisManager_->cd("Resolution");
        theAnalysisManager_->mkdir(planeName);

        theAnalysisManager_->mkdir("X Residuals");

        hName  = "hXresiduals_"                               + planeName;
        hTitle = "X residuals  "                              + planeName;
        hXresiduals_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 125, -500, 500)));

        hName  = "hXresidualCalculated_"                      + planeName;
        hTitle = "X residuals calculated from asimmetry fit " + planeName;
        hXresidualCalculated_.push_back(NEW_THREADED(TH1F(hName.c_str(), hTitle.c_str(), 125, -500, 500)));

        hName  = "hXresidualsClusterSize1_"                   + planeName;
        hTitle = "X residuals cluster size 1 "                + planeName;
        hXresidualsClusterSize1_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 125, -500, 500)));

        hName  = "hXresidualsClusterSize2_"                   + planeName;
        hTitle = "X residuals cluster size 2 "                + planeName;
        hXresidualsClusterSize2_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 125, -500, 500)));

        theAnalysisManager_->cd("Resolution/" + planeName);

        theAnalysisManager_->mkdir("Y Residuals");

        hName  = "hYresiduals_"                               + planeName;
        hTitle = "Y residuals "                               + planeName;
        hYresiduals_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 125, -500, 500)));

        hName  = "hYresidualCalculated_"                      + planeName;
        hTitle = "Y residuals calculated from asimmetry fit " + planeName;
        hYresidualCalculated_.push_back(NEW_THREADED(TH1F(hName.c_str(), hTitle.c_str(), 125, -500, 500)));

        hName  = "hYresidualsClusterSize1_"                   + planeName;
        hTitle = "Y residuals cluster size 1 "                + planeName;
        hYresidualsClusterSize1_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 125, -500, 500)));

        hName  = "hYresidualsClusterSize2_"                   + planeName;
        hTitle = "Y residuals cluster size 2 "                + planeName;
        hYresidualsClusterSize2_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 125, -500, 500)));

        if(planeName.find("Dut")!=std::string::npos)
        {
            theAnalysisManager_->cd("Resolution/" + planeName);

            theAnalysisManager_->mkdir("Errors");

            hName  = "hPredictedXErrors_"                     + planeName;
            hTitle = "predicted X errors "                    + planeName;
            hPredictedXErrors_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 10000, 0, 10)));

            hName  = "hPredictedYErrors_"                     + planeName;
            hTitle = "predicted Y errors "                    + planeName;
            hPredictedYErrors_.push_back(NEW_THREADED(TH1F(hName.c_str(),hTitle.c_str(), 10000, 0, 10)));
        }
    }
}
Example #2
0
void check(const char* testName){

   printf("o Checking %s\n",testName);

   TH1::AddDirectory(0); // same name is ok
   gRandom->SetSeed(1); // make all contents identical irrespective of the container

   std::string binFilename(testName); binFilename+=".root";
   std::string xmlFilename(testName); xmlFilename+=".xml";

   std::vector<const char*> filenames  {binFilename.c_str()/*, xmlFilename.c_str()*/}; // NEED XML HERE

   for (auto&& filename : filenames){
      createFile(filename);
   }

   Cont<double> doubleCont {1.,2.,3.,4.}; // need double32_t
   Cont<complex<double>> complexDCont {{1,2},{3,4},{5,6},{7,8},{9,10},{11,12}};
   Cont<complex<float>> complexFCont {{1,2},{3,4},{5,6},{7,8},{9,10},{11,12}};
   Cont<TH1F> histoCont {TH1F("h","ht",100,-2,2), TH1F("h","ht",10,-1.2,1.2)};
   fillHistoCont(histoCont);


   Cont<Cont<TH1F>> contHistoCont NESTEDCONT;
   fillHistoNestedCont(contHistoCont);

   vector<Cont<TH1F>> vecHistoCont NESTEDCONT;
   fillHistoNestedCont(vecHistoCont);

   Cont<vector<TH1F>> contHistoVec NESTEDCONT;
   fillHistoNestedCont(contHistoVec);

   printf("  - RowWise\n");

   // Row wise
   for (auto&& filename : filenames){
      writeReadCheck(doubleCont,"doubleCont",filename);
      writeReadCheck(complexDCont,"complexDCont",filename);
      writeReadCheck(complexFCont,"complexFCont",filename);
      writeReadCheck(histoCont,"histoCont",filename);
      writeReadCheck(contHistoCont,"contHistoCont",filename);
      writeReadCheck(vecHistoCont,"vecHistoCont",filename);
      writeReadCheck(contHistoVec,"contHistoVec",filename);
   }

   // ColumnWise
   printf("  - ColumnWise\n");
   int NEvts=100;
   // Make a backup of the input
   auto doubleContOrig = doubleCont;
   auto complexDContOrig = complexDCont;
   auto complexFContOrig = complexFCont;
   auto histoContOrig = histoCont;
   auto contHistoContOrig = contHistoCont;
   auto vecHistoContOrig = vecHistoCont;
   auto contHistoVecOrig = contHistoVec;

   // Write
   gRandom->SetSeed(1);
   {
      printf("    * Write\n");
      TFile f(binFilename.c_str(),"UPDATE");
      TTree t("t","Test Tree");
      t.Branch("doubleCont_split", &doubleCont,16000,99);
      t.Branch("doubleCont", &doubleCont,16000,0);
      t.Branch("complexDCont_split", &complexDCont,16000,99);
      t.Branch("complexDCont", &complexDCont,16000,0);
      t.Branch("complexFCont_split", &complexFCont,16000,99);
      t.Branch("complexFCont", &complexFCont,16000,0);
      t.Branch("histoCont_split", &histoCont,16000,99);
      t.Branch("histoCont", &histoCont,16000,0);
      t.Branch("contHistoCont_split", &contHistoCont,16000,99);
      t.Branch("contHistoCont", &contHistoCont,16000,0);
      t.Branch("vecHistoCont_split", &vecHistoCont,16000,99);
      t.Branch("vecHistoCont", &vecHistoCont,16000,0);
      t.Branch("contHistoVec_split", &contHistoVec,16000,99);
      t.Branch("contHistoVec", &contHistoVec,16000,0);

      for (int i=0;i<NEvts;++i){
         randomizeCont(doubleCont);
         randomizeCont(complexDCont);
         randomizeCont(complexFCont);
         fillHistoCont(histoCont,10);
         fillHistoNestedCont(contHistoCont,10);
         fillHistoNestedCont(vecHistoCont,10);
         fillHistoNestedCont(contHistoVec,10);
         t.Fill();
      }
      t.Write();
   }
   // And Read
   gRandom->SetSeed(1);
   {
      printf("    * Read\n");
      TFile f(binFilename.c_str());
      TTreeReader reader("t", &f);
      TTreeReaderValue<decltype(doubleCont)> rdoubleCont_split(reader, "doubleCont_split");
      TTreeReaderValue<decltype(doubleCont)> rdoubleCont(reader, "doubleCont");
      TTreeReaderValue<decltype(complexDCont)> rcomplexDCont_split(reader, "complexDCont_split");
      TTreeReaderValue<decltype(complexDCont)> rcomplexDCont(reader, "complexDCont");
      TTreeReaderValue<decltype(complexFCont)> rcomplexFCont_split(reader, "complexFCont_split");
      TTreeReaderValue<decltype(complexFCont)> rcomplexFCont(reader, "complexFCont");
      TTreeReaderValue<decltype(histoCont)> rhistoCont_split(reader, "histoCont_split");
      TTreeReaderValue<decltype(histoCont)> rhistoCont(reader, "histoCont");
      TTreeReaderValue<decltype(contHistoCont)> rcontHistoCont_split(reader, "contHistoCont_split");
      TTreeReaderValue<decltype(contHistoCont)> rcontHistoCont(reader, "contHistoCont");
      TTreeReaderValue<decltype(vecHistoCont)> rvecHistoCont_split(reader, "vecHistoCont_split");
      TTreeReaderValue<decltype(vecHistoCont)> rvecHistoCont(reader, "vecHistoCont");
      TTreeReaderValue<decltype(contHistoVec)> rcontHistoVec_split(reader, "contHistoVec_split");
      TTreeReaderValue<decltype(contHistoVec)> rcontHistoVec(reader, "contHistoVec");
      for (int i=0;i<NEvts;++i){
         // Rebuild original values
         randomizeCont(doubleContOrig);
         randomizeCont(complexDContOrig);
         randomizeCont(complexFContOrig);
         fillHistoCont(histoContOrig,10);
         fillHistoNestedCont(contHistoContOrig,10);
         fillHistoNestedCont(vecHistoContOrig,10);
         fillHistoNestedCont(contHistoVecOrig,10);
         // Now check them
         reader.Next();
         checkObjects("doubleCont_split",doubleContOrig,*rdoubleCont_split);
         checkObjects("doubleCont",doubleContOrig,*rdoubleCont);
         checkObjects("complexDCont_split",complexDContOrig,*rcomplexDCont_split);
         checkObjects("complexDCont",complexDContOrig,*rcomplexDCont);
         checkObjects("complexFCont_split",complexFContOrig,*rcomplexFCont_split);
         checkObjects("complexFCont",complexFContOrig,*rcomplexFCont);
         checkObjects("histoCont_split",histoContOrig,*rhistoCont_split);
         checkObjects("histoCont",histoContOrig,*rhistoCont);
         checkObjects("contHistoCont_split",contHistoContOrig,*rcontHistoCont_split);
         checkObjects("contHistoCont",contHistoContOrig,*rcontHistoCont);
         checkObjects("vecHistoCont_split",vecHistoContOrig,*rvecHistoCont_split);
         checkObjects("vecHistoCont",vecHistoContOrig,*rvecHistoCont);
         checkObjects("contHistoVec_split",contHistoVecOrig,*rcontHistoVec_split);
         checkObjects("contHistoVec",contHistoVecOrig,*rcontHistoVec);
      }
   }



}
bool CalibrationScanAnalysis::checkInput() const {

  // check that we have data
  std::cout << "Checking data integrity." << std::endl;
  std::cout << "Step 1/5" << std::endl;
  if(!summaries_.size()) {
    std::cerr << "Error: No summary histogram found." << std::endl
              << " Did you load any file ? " << std::endl;
    return 0;
  }

  if(summaries_.size()<2) {
    std::cerr << "Error: Only one  summary histogram found." << std::endl
              << " Analysis does not make sense with only one measurement" << std::endl;
    return 0;
  }

  // check that we have the same entries in each record,
  // check that the binning is the same in all histograms
  std::cout << "Step 2/5" << std::endl;
  int nbinsAll = -1;
  std::vector<std::string> namesAll;
  for(SummaryV::const_iterator summary = summaries_.begin(); summary!=summaries_.end(); ++summary) {
    const std::vector<TH1*>& observables = summary->second;
    for( std::vector<TH1*>::const_iterator histo = observables.begin();histo<observables.end();++histo) {
       std::string name = (*histo)->GetName();
       if(summary == summaries_.begin()) {
          namesAll.push_back(name);
       } else {
          if(find(namesAll.begin(),namesAll.end(),name)==namesAll.end()) {
            std::cerr << "Error: Found an histogram that is not common to all inputs: "
                      << name << std::endl;
            return 0;
          }
       }
       int nbins = (*histo)->GetNbinsX();
       if(nbinsAll<0) nbinsAll = nbins;
       if(nbins != nbinsAll) {
         std::cerr << "Error: The number of bins is not the same in all inputs." << std::endl;
// non fatal
//         return 0;
       }
    }
  }

  // check that we have at least 2 histograms with measurements
  std::cout << "Step 3/5" << std::endl;
  if(namesAll.size()<2) {
    std::cerr << "Error: The number of available measurements is smaller than 2." << std::endl;
    return 0;
  }

  // check that the bin labels are all the same
  std::cout << "Step 4/5" << std::endl;
  std::vector<std::string> labelsAll;
  for(SummaryV::const_iterator summary = summaries_.begin(); summary!=summaries_.end(); ++summary) {
    const std::vector<TH1*>& observables = summary->second;
    for( std::vector<TH1*>::const_iterator histo = observables.begin();histo<observables.end();++histo) {
       for(int i = 1;i <= (*histo)->GetNbinsX(); ++i) {
         std::string label = (*histo)->GetXaxis()->GetBinLabel(i);
         if(summary == summaries_.begin() && histo == observables.begin()) {
           labelsAll.push_back(label);
         } else {
           if(labelsAll[i-1] != label) {
*((TH1F*)(*histo)) = TH1F(*(fixHisto(labelsAll,*histo)));
/*
             std::cerr << "Error: Incoherency in bin labels. Bin " << i 
                       << " of " << (*histo)->GetName() << " is " << label
                       << " and not " << labelsAll[i] << "." << std::endl;
             return 0;
*/
           }
         }
       }
    }
  }

  // check that all APVs have an associated geometry
  std::cout << "Step 5/5" << std::endl;
   for(std::vector<std::string>::const_iterator apvLabel = labelsAll.begin();
       apvLabel != labelsAll.end(); ++apvLabel) {
     if(geometries_.find(*apvLabel)==geometries_.end()) {
       std::cerr << "Error: Geometry unknown for APV " << *apvLabel << std::endl;
       // made this a non-fatal error
 //      return 0;
       std::string label = *apvLabel;
       ((CalibrationScanAnalysis*)this)->geometries_[label] = 0; 
     }
   }

  return 1;

}
Int_t mt102_readNtuplesFillHistosAndFit()
{

   // No nuisance for batch execution
   gROOT->SetBatch();

   // Perform the operation sequentially ---------------------------------------
   TChain inputChain("multiCore");
   inputChain.Add("mc101_multiCore_*.root");
   TH1F outHisto("outHisto", "Random Numbers", 128, -4, 4);
   {
      TimerRAII t("Sequential read and fit");
      inputChain.Draw("r >> outHisto");
      outHisto.Fit("gaus");
   }

   // We now go MT! ------------------------------------------------------------

   // The first, fundamental operation to be performed in order to make ROOT
   // thread-aware.
   ROOT::EnableMT();

   // We adapt our parallelisation to the number of input files
   const auto nFiles = inputChain.GetListOfFiles()->GetEntries();
   std::forward_list<UInt_t> workerIDs(nFiles);
   std::iota(std::begin(workerIDs), std::end(workerIDs), 0);


   // We define the histograms we'll fill
   std::vector<TH1F> histograms;
   histograms.reserve(nFiles);
   for (auto workerID : workerIDs){
      histograms.emplace_back(TH1F(Form("outHisto_%u", workerID), "Random Numbers", 128, -4, 4));
   }

   // We define our work item
   auto workItem = [&histograms](UInt_t workerID) {
      TFile f(Form("mc101_multiCore_%u.root", workerID));
      TNtuple *ntuple = nullptr;
      f.GetObject("multiCore", ntuple);
      auto &histo = histograms.at(workerID);
      for (UInt_t index = 0; index < ntuple->GetEntriesFast(); ++index) {
         ntuple->GetEntry(index);
         histo.Fill(ntuple->GetArgs()[0]);
      }
   };

   TH1F sumHistogram("SumHisto", "Random Numbers", 128, -4, 4);

   // Create the collection which will hold the threads, our "pool"
   std::vector<std::thread> workers;

   // We measure time here as well
   {
      TimerRAII t("Parallel execution");

      // Spawn workers
      // Fill the "pool" with workers
      for (auto workerID : workerIDs) {
         workers.emplace_back(workItem, workerID);
      }

      // Now join them
      for (auto&& worker : workers) worker.join();

      // And reduce
      std::for_each(std::begin(histograms), std::end(histograms),
                    [&sumHistogram](const TH1F & h) {
                       sumHistogram.Add(&h);
                    });

      sumHistogram.Fit("gaus",0);
   }

   return 0;

}
Example #5
0
void baseClass::readCutFile()
{
  string s;
  STDOUT("Reading cutFile_ = "<< *cutFile_)

  ifstream is(cutFile_->c_str());
  if(is.good())
    {
      //      STDOUT("Reading file: " << *cutFile_ );
      int id=0;
      while( getline(is,s) )
        {
          STDOUT("read line: " << s);
          if (s[0] == '#') continue;
	  vector<string> v = split(s);
	  map<string, cut>::iterator cc = cutName_cut_.find(v[0]);
	  if( cc != cutName_cut_.end() )
	    {
	      STDOUT("ERROR: variableName = "<< v[0] << " exists already in cutName_cut_. Returning.");
	      return;
	    } 

	  int level_int = atoi( v[5].c_str() );
	  if(level_int == -1)
	    {
	      map<string, preCut>::iterator cc = preCutName_cut_.find(v[0]);
	      if( cc != preCutName_cut_.end() )
		{
		  STDOUT("ERROR: variableName = "<< v[0] << " exists already in preCutName_cut_. Returning.");
		  return;
		} 
	      preCutInfo_ << "### Preliminary cut values: " << s <<endl;
	      preCut thisPreCut;
	      thisPreCut.variableName =     v[0];
	      thisPreCut.value1  = decodeCutValue( v[1] );
	      thisPreCut.value2  = decodeCutValue( v[2] );
	      thisPreCut.value3  = decodeCutValue( v[3] );
	      thisPreCut.value4  = decodeCutValue( v[4] );
	      preCutName_cut_[thisPreCut.variableName]=thisPreCut;
	      continue;
	    }
	  cut thisCut;
	  thisCut.variableName =     v[0];
	  string m1=v[1];
	  string M1=v[2];
	  string m2=v[3];
	  string M2=v[4];
	  if( m1=="-" || M1=="-" ) 
	    {
	      STDOUT("ERROR: minValue1 and maxValue2 have to be provided. Returning."); 
	      return; // FIXME implement exception
	    } 
	  if( (m2=="-" && M2!="-") || (m2!="-" && M2=="-") ) 
	    {
	      STDOUT("ERROR: if any of minValue2 and maxValue2 is -, then both have to be -. Returning");
	      return; // FIXME implement exception
	    }
	  if( m2=="-") m2="+inf";
	  if( M2=="-") M2="-inf";
	  thisCut.minValue1  = decodeCutValue( m1 );
	  thisCut.maxValue1  = decodeCutValue( M1 );
	  thisCut.minValue2  = decodeCutValue( m2 );
	  thisCut.maxValue2  = decodeCutValue( M2 );
	  thisCut.level_int  = level_int;
	  thisCut.level_str  =       v[5];
	  thisCut.histoNBins = atoi( v[6].c_str() );
	  thisCut.histoMin   = atof( v[7].c_str() );
	  thisCut.histoMax   = atof( v[8].c_str() );
	  // Not filled from file
	  thisCut.id=++id;
	  string s1;
	  if(skimWasMade_)
	    {
	      s1 = "cutHisto_skim___________________" + thisCut.variableName;
	    }
	  else
	    {
	      s1 = "cutHisto_noCuts_________________" + thisCut.variableName;
	    }
	  string s2 = "cutHisto_allPreviousCuts________" + thisCut.variableName;
	  string s3 = "cutHisto_allOthrSmAndLwrLvlCuts_" + thisCut.variableName;
	  string s4 = "cutHisto_allOtherCuts___________" + thisCut.variableName;
	  string s5 = "cutHisto_allCuts________________" + thisCut.variableName;
	  thisCut.histo1 = TH1F (s1.c_str(),"", thisCut.histoNBins, thisCut.histoMin, thisCut.histoMax);
	  thisCut.histo2 = TH1F (s2.c_str(),"", thisCut.histoNBins, thisCut.histoMin, thisCut.histoMax);
	  thisCut.histo3 = TH1F (s3.c_str(),"", thisCut.histoNBins, thisCut.histoMin, thisCut.histoMax);
	  thisCut.histo4 = TH1F (s4.c_str(),"", thisCut.histoNBins, thisCut.histoMin, thisCut.histoMax);
	  thisCut.histo5 = TH1F (s5.c_str(),"", thisCut.histoNBins, thisCut.histoMin, thisCut.histoMax);
	  thisCut.histo1.Sumw2();
	  thisCut.histo2.Sumw2();
	  thisCut.histo3.Sumw2();
	  thisCut.histo4.Sumw2();
	  thisCut.histo5.Sumw2();
	  // Filled event by event
	  thisCut.filled = false;
	  thisCut.value = 0;
	  thisCut.passed = false;
	  thisCut.nEvtInput=0;
	  thisCut.nEvtPassed=0;

	  orderedCutNames_.push_back(thisCut.variableName);
	  cutName_cut_[thisCut.variableName]=thisCut;

	}
      STDOUT( "baseClass::readCutFile: Finished reading cutFile: " << *cutFile_ );
    }
  else
    {
      STDOUT("ERROR opening cutFile:" << *cutFile_ );
      exit (1);
    }
  is.close();

}