void TMVAClassificationApplication( TString myMethodList = "" ) 
{   
   //---------------------------------------------------------------
   // default MVA methods to be trained + tested

   // this loads the library
   TMVA::Tools::Instance();

   std::map<std::string,int> Use;

   Use["CutsGA"]          = 0; // other "Cuts" methods work identically
   // ---
   Use["Likelihood"]      = 1;
   Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
   Use["LikelihoodPCA"]   = 0; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
   Use["LikelihoodKDE"]   = 0;
   Use["LikelihoodMIX"]   = 0;
   // ---
   Use["PDERS"]           = 0;
   Use["PDERSD"]          = 0;
   Use["PDERSPCA"]        = 0;
   Use["PDERSkNN"]        = 0; // depreciated until further notice
   Use["PDEFoam"]         = 0;
   // --
   Use["KNN"]             = 0;
   // ---
   Use["HMatrix"]         = 0;
   Use["Fisher"]          = 0;
   Use["FisherG"]         = 0;
   Use["BoostedFisher"]   = 0;
   Use["LD"]              = 0;
   // ---
   Use["FDA_GA"]          = 0;
   Use["FDA_SA"]          = 0;
   Use["FDA_MC"]          = 0;
   Use["FDA_MT"]          = 0;
   Use["FDA_GAMT"]        = 0;
   Use["FDA_MCMT"]        = 0;
   // ---
   Use["MLP"]             = 0; // this is the recommended ANN
   Use["MLPBFGS"]         = 0; // recommended ANN with optional training method
   Use["MLPBNN"]          = 0;  // 
   Use["CFMlpANN"]        = 0; // *** missing
   Use["TMlpANN"]         = 0; 
   // ---
   Use["SVM"]             = 0;
   // ---
   Use["BDT"]             = 1;
   Use["BDTD"]            = 0;
   Use["BDTG"]            = 0;
   Use["BDTB"]            = 0;
   // ---
   Use["RuleFit"]         = 0;
   // ---
   Use["Category"]        = 0;
   // ---
   Use["Plugin"]          = 0;
   // ---------------------------------------------------------------

   std::cout << std::endl;
   std::cout << "==> Start TMVAClassificationApplication" << std::endl;

   if (myMethodList != "") {
      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

      std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
      for (UInt_t i=0; i<mlist.size(); i++) {
         std::string regMethod(mlist[i]);

         if (Use.find(regMethod) == Use.end()) {
            std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
            std::cout << std::endl;
            return;
         }
         Use[regMethod] = 0;
      }
   }

   //
   // create the Reader object
   //
   TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

Float_t Z_rapidity_z;
reader->AddVariable("Z_rapidity_z",&Z_rapidity_z);

Float_t THRUST_2D;
reader->AddVariable("THRUST_2D",&THRUST_2D);

Float_t L1_L2_cosangle;
reader->AddVariable("L1_L2_cosangle",&L1_L2_cosangle);

Float_t TransMass_ZH150_uncl;
reader->AddVariable("TransMass_ZH150_uncl",&TransMass_ZH150_uncl);

Float_t TransMass_ZH150;
reader->AddVariable("TransMass_ZH150",&TransMass_ZH150);

Float_t DeltaPhi_ZH;
reader->AddVariable("DeltaPhi_ZH",&DeltaPhi_ZH);

Float_t DeltaPhi_ZH_uncl;
reader->AddVariable("DeltaPhi_ZH_uncl",&DeltaPhi_ZH_uncl);

Float_t CMAngle;
reader->AddVariable("CMAngle",&CMAngle);

Float_t CS_cosangle;
reader->AddVariable("CS_cosangle",&CS_cosangle);

   // create a set of variables and declare them to the reader
   // - the variable names must corresponds in name and type to 
   // those given in the weight file(s) that you use
   Float_t var1, var2;
   Float_t var3, var4;
//   reader->AddVariable( "myvar1 := var1+var2", &var1 );
//   reader->AddVariable( "myvar2 := var1-var2", &var2 );
//   reader->AddVariable( "var3",                &var3 );
//   reader->AddVariable( "var4",                &var4 );

   //Spectator variables declared in the training have to be added to the reader, too
   Float_t spec1,spec2;
//   reader->AddSpectator( "spec1 := var1*2",   &spec1 );

 float nonsense =0;
//   reader->AddSpectator( "spec2 := var1*3",   &spec2 );

 float nonsense =0;

   Float_t Category_cat1, Category_cat2, Category_cat3;
   if (Use["Category"]){
      // add artificial spectators for distinguishing categories
//      reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );

 float nonsense =0;
//      reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );

 float nonsense =0;
//      reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );

 float nonsense =0;
   }
   //
   // book the MVA methods
   //
   TString dir    = "weights/";
   TString prefix = "TMVAClassification";

   // book method(s)
   for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
         TString methodName = it->first + " method";
         TString weightfile = dir + prefix + "_" + TString(it->first) + ".weights.xml";
         reader->BookMVA( methodName, weightfile ); 
      }
   }
   
   // example how to use your own method as plugin
   if (Use["Plugin"]) {
      // the weight file contains a line 
      // Method         : MethodName::InstanceName

      // if MethodName is not a known TMVA method, it is assumed to be
      // a user implemented method which has to be loaded via the
      // plugin mechanism
      
      // for user implemented methods the line in the weight file can be
      // Method         : PluginName::InstanceName
      // where PluginName can be anything

      // before usage the plugin has to be defined, which can happen
      // either through the following line in .rootrc:
      // # plugin handler          plugin       class            library        constructor format
      // Plugin.TMVA@@MethodBase:  PluginName   MethodClassName  UserPackage    "MethodName(DataSet&,TString)"
      //  
      // or by telling the global plugin manager directly
      gPluginMgr->AddHandler("TMVA@@MethodBase", "PluginName", "MethodClassName", "UserPackage", "MethodName(DataSet&,TString)");
      // the class is then looked for in libUserPackage.so

      // now the method can be booked like any other
      reader->BookMVA( "User method", dir + prefix + "_User.weights.txt" );
   }

   // book output histograms
   UInt_t nbin = 100;
   TH1F *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
   TH1F *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
   TH1F *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0), *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0);
   TH1F *histRf(0), *histSVMG(0), *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

   if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
   if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
   if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
   if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
   if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
   if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
   if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
   if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
   if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
   if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
   if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
   if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
   if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
   if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
   if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
   if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",           "MVA_MLPBFGS",           nbin, -1.25, 1.5 );
   if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",           "MVA_MLPBNN",           nbin, -1.25, 1.5 );
   if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
   if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
   if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
   if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
   if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
   if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
   if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
   if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
   if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
   if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
   if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
   if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
   if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

   // PDEFoam also returns per-event error, fill in histogram, and also fill significance
   if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
   }

   // book example histogram for probability (the other methods are done similarly)
   TH1F *probHistFi(0), *rarityHistFi(0);
   if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
   }

   // Prepare input tree (this must be replaced by your data source)
   // in this example, there is a toy tree with signal and one with background events
   // we'll later on use only the "signal" events for the test in this example.
   //   
   TFile *input(0);
   TString fname = "/tmp/chasco/ORIGINAL//Data_MuEG2011B_1.root";   
   if (!gSystem->AccessPathName( fname )) {
      input = TFile::Open( fname ); // check if file in local directory exists
   } 
   else { 
      input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
   }
   
   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
   
   //
   // prepare the tree
   // - here the variable names have to corresponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //
   TTree* BigTree = (TTree*)input->Get("data");

TFile *tmp  = new TFile( "tmp.root","RECREATE" );

TTree* theTree = BigTree->CopyTree("((cat == 1) + (cat == 2))*(ln==0)*(Cosmic==0)*(fabs(Mass_Z - 91.18)<10)*(Pt_Z>30)*(DeltaPhi_metjet>0.5)*(Pt_J1 < 30)*(pfMEToverPt_Z > 0.4)*(pfMEToverPt_Z < 1.8)*((Pt_Jet_btag_CSV_max > 20)*(btag_CSV_max < 0.244) + (1-(Pt_Jet_btag_CSV_max > 20)))*(sqrt(pow(dilepPROJLong + 1.25*recoilPROJLong + 0.0*uncertPROJLong,2)*(dilepPROJLong + 1.25*recoilPROJLong + 0.0*uncertPROJLong > 0) + 1.0*pow(dilepPROJPerp + 1.25*recoilPROJPerp + 0.0*uncertPROJPerp,2)*(dilepPROJPerp + 1.25*recoilPROJPerp + 0.0*uncertPROJPerp > 0)) > 45.0)");
   std::cout << "--- Select signal sample" << std::endl;
   Float_t userVar1, userVar2;
//   theTree->SetBranchAddress( "var1", &userVar1 );
//   theTree->SetBranchAddress( "var2", &userVar2 );
//   theTree->SetBranchAddress( "var3", &var3 );
//   theTree->SetBranchAddress( "var4", &var4 );

theTree->SetBranchAddress( " Z_rapidity_z", &Z_rapidity_z);

theTree->SetBranchAddress( " THRUST_2D", &THRUST_2D);

theTree->SetBranchAddress( " L1_L2_cosangle", &L1_L2_cosangle);

theTree->SetBranchAddress( " TransMass_ZH150_uncl", &TransMass_ZH150_uncl);

theTree->SetBranchAddress( " TransMass_ZH150", &TransMass_ZH150);

theTree->SetBranchAddress( " DeltaPhi_ZH", &DeltaPhi_ZH);

theTree->SetBranchAddress( " DeltaPhi_ZH_uncl", &DeltaPhi_ZH_uncl);

theTree->SetBranchAddress( " CMAngle", &CMAngle);

theTree->SetBranchAddress( " CS_cosangle", &CS_cosangle);

   // efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

   std::vector<Float_t> vecVar(9); // vector for EvaluateMVA tests

   std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
   TStopwatch sw;
   sw.Start();
   for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {

      if (ievt%1000 == 0){
         std::cout << "--- ... Processing event: " << ievt << std::endl;
      }

      theTree->GetEntry(ievt);

      var1 = userVar1 + userVar2;
      var2 = userVar1 - userVar2;

      if (ievt <20){
         // test the twodifferent Reader::EvaluateMVA functions 
         // access via registered variables compared to access via vector<float>
//         vecVar[0]=var1;
//         vecVar[1]=var2;
//         vecVar[2]=var3;
//         vecVar[3]=var4;      

vecVar[0]=Z_rapidity_z;

vecVar[1]=THRUST_2D;

vecVar[2]=L1_L2_cosangle;

vecVar[3]=TransMass_ZH150_uncl;

vecVar[4]=TransMass_ZH150;

vecVar[5]=DeltaPhi_ZH;

vecVar[6]=DeltaPhi_ZH_uncl;

vecVar[7]=CMAngle;

vecVar[8]=CS_cosangle;
         for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
            if (it->second) {
               TString mName = it->first + " method";
               Double_t mva1 = reader->EvaluateMVA( mName); 
               Double_t mva2 = reader->EvaluateMVA( vecVar, mName); 
               if (mva1 != mva2) {
                  std::cout << "++++++++++++++ ERROR in "<< mName <<", comparing different EvaluateMVA results val1=" << mva1 << " val2="<<mva2<<std::endl;
               }
            }
         }
         // now test that the inputs do matter
         TRandom3 rand(0);
//         vecVar[0]=rand.Rndm();
//         vecVar[1]=rand.Rndm();
//         vecVar[2]=rand.Rndm();
//         vecVar[3]=rand.Rndm();

vecVar[0]=rand.Rndm();

vecVar[1]=rand.Rndm();

vecVar[2]=rand.Rndm();

vecVar[3]=rand.Rndm();

vecVar[4]=rand.Rndm();

vecVar[5]=rand.Rndm();

vecVar[6]=rand.Rndm();

vecVar[7]=rand.Rndm();

vecVar[8]=rand.Rndm();
         for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
            if (it->second) {
               TString mName = it->first + " method";
               Double_t mva1 = reader->EvaluateMVA( mName); 
               Double_t mva2 = reader->EvaluateMVA( vecVar, mName); 
               if (mva1 == mva2) {
                  std::cout << "++++++++++++++ ERROR in "<< mName <<", obtaining idnetical output for different inputs" <<std::endl;
               }
            }
         }
      }
      // 
      // return the MVAs and fill to histograms
      // 
      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"          ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"           ) );
      if (Use["MLPBNN"          ])   histNnbnn ->Fill( reader->EvaluateMVA( "MLPBNN method"           ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
      if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

      // retrieve also per-event error
      if (Use["PDEFoam"]) {
         Double_t val = reader->EvaluateMVA( "PDEFoam method" );
         Double_t err = reader->GetMVAError();
         histPDEFoam   ->Fill( val );
         histPDEFoamErr->Fill( err );         
         histPDEFoamSig->Fill( val/err );
      }         

      // retrieve probability instead of MVA output
      if (Use["Fisher"])   {
         probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
         rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
      }
   }
   // get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

   // get efficiency for cuts classifier
   if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                << " (for a required signal efficiency of " << effS << ")" << std::endl;

   if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
         std::vector<Double_t> cutsMin;
         std::vector<Double_t> cutsMax;
         mcuts->GetCuts( 0.7, cutsMin, cutsMax );
         std::cout << "--- -------------------------------------------------------------" << std::endl;
         std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
         for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
            std::cout << "... Cut: " 
                      << cutsMin[ivar] 
                      << " < \"" 
                      << mcuts->GetInputVar(ivar)
                      << "\" <= " 
                      << cutsMax[ivar] << std::endl;
         }
         std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
   }

   //
   // write histograms
   //
   TFile *target  = new TFile( "TMVApp.root","RECREATE" );
   if (Use["Likelihood"   ])   histLk     ->Write();
   if (Use["LikelihoodD"  ])   histLkD    ->Write();
   if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
   if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
   if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
   if (Use["PDERS"        ])   histPD     ->Write();
   if (Use["PDERSD"       ])   histPDD    ->Write();
   if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
   if (Use["KNN"          ])   histKNN    ->Write();
   if (Use["HMatrix"      ])   histHm     ->Write();
   if (Use["Fisher"       ])   histFi     ->Write();
   if (Use["FisherG"      ])   histFiG    ->Write();
   if (Use["BoostedFisher"])   histFiB    ->Write();
   if (Use["LD"           ])   histLD     ->Write();
   if (Use["MLP"          ])   histNn     ->Write();
   if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
   if (Use["MLPBNN"       ])   histNnbnn  ->Write();
   if (Use["CFMlpANN"     ])   histNnC    ->Write();
   if (Use["TMlpANN"      ])   histNnT    ->Write();
   if (Use["BDT"          ])   histBdt    ->Write();
   if (Use["BDTD"         ])   histBdtD   ->Write();
   if (Use["BDTG"         ])   histBdtG   ->Write(); 
   if (Use["RuleFit"      ])   histRf     ->Write();
   if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
   if (Use["SVM_Poly"     ])   histSVMP   ->Write();
   if (Use["SVM_Lin"      ])   histSVML   ->Write();
   if (Use["FDA_MT"       ])   histFDAMT  ->Write();
   if (Use["FDA_GA"       ])   histFDAGA  ->Write();
   if (Use["Category"     ])   histCat    ->Write();
   if (Use["Plugin"       ])   histPBdt   ->Write();

   // write also error and significance histos
   if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

   // write also probability hists
   if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
   target->Close();

   std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
  
   delete reader;
    
   std::cout << "==> TMVAClassificationApplication is done!" << endl << std::endl;
} 
Esempio n. 2
0
void TMVAClassificationApplication( TString myMethodList = "" ) 
{   
#ifdef __CINT__
   gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif

   //---------------------------------------------------------------

   // This loads the library
   TMVA::Tools::Instance();

   // Default MVA methods to be trained + tested
   std::map<std::string,int> Use;

   Use["MLP"]             = 0; // Recommended ANN
   Use["BDT"]             = 1; // uses Adaptive Boost

   std::cout << std::endl;
   std::cout << "==> Start TMVAClassificationApplication" << std::endl;

   // --- Create the Reader object

   TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

   // Create a set of variables and declare them to the reader
   // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
   Float_t var1, var2;
   Float_t var3, var4;
   reader->AddVariable( "myvar1 := var1+var2", &var1 );
   reader->AddVariable( "myvar2 := var1-var2", &var2 );
   reader->AddVariable( "var3",                &var3 );
   reader->AddVariable( "var4",                &var4 );

   // Spectator variables declared in the training have to be added to the reader, too
   Float_t spec1,spec2;
   reader->AddSpectator( "spec1 := var1*2",   &spec1 );
   reader->AddSpectator( "spec2 := var1*3",   &spec2 );

   Float_t Category_cat1, Category_cat2, Category_cat3;
   if (Use["Category"]){
      // Add artificial spectators for distinguishing categories
      reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
      reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
      reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
   }

   // --- Book the MVA methods

   TString dir    = "weights/";
   TString prefix = "TMVAClassification";

   // Book method(s)
   for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
         TString methodName = TString(it->first) + TString(" method");
         TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
         reader->BookMVA( methodName, weightfile ); 
      }
   }
   
   // Book output histograms
   UInt_t nbin = 100;
   TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
   TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
   TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
   TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
   TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

   if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
   if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
   if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
   if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
   if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
   if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
   if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
   if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
   if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
   if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
   if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
   if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
   if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
   if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
   if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
   if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
   if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
   if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
   if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
   if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
   if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
   if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
   if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
   if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
   if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
   if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
   if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
   if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
   if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
   if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

   // PDEFoam also returns per-event error, fill in histogram, and also fill significance
   if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
   }

   // Book example histogram for probability (the other methods are done similarly)
   TH1F *probHistFi(0), *rarityHistFi(0);
   if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
   }

   // Prepare input tree (this must be replaced by your data source)
   // in this example, there is a toy tree with signal and one with background events
   // we'll later on use only the "signal" events for the test in this example.
   //   
   TFile *input(0);
   TString fname = "./tmva_example.root";   
   if (!gSystem->AccessPathName( fname )) 
      input = TFile::Open( fname ); // check if file in local directory exists
   else    
      input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
   
   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
   
   // --- Event loop

   // Prepare the event tree
   // - here the variable names have to corresponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //
   std::cout << "--- Select signal sample" << std::endl;
   TTree* theTree = (TTree*)input->Get("TreeS");
   Float_t userVar1, userVar2;
   theTree->SetBranchAddress( "var1", &userVar1 );
   theTree->SetBranchAddress( "var2", &userVar2 );
   theTree->SetBranchAddress( "var3", &var3 );
   theTree->SetBranchAddress( "var4", &var4 );

   // Efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

   std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

   std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
   TStopwatch sw;
   sw.Start();
   for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {

      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      theTree->GetEntry(ievt);

      var1 = userVar1 + userVar2;
      var2 = userVar1 - userVar2;

      // --- Return the MVA outputs and fill into histograms

      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
      if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

      // Retrieve also per-event error
      if (Use["PDEFoam"]) {
         Double_t val = reader->EvaluateMVA( "PDEFoam method" );
         Double_t err = reader->GetMVAError();
         histPDEFoam   ->Fill( val );
         histPDEFoamErr->Fill( err );         
         if (err>1.e-50) histPDEFoamSig->Fill( val/err );
      }         

      // Retrieve probability instead of MVA output
      if (Use["Fisher"])   {
         probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
         rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
      }
   }

   // Get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

   // Get efficiency for cuts classifier
   if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                << " (for a required signal efficiency of " << effS << ")" << std::endl;

   if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
         std::vector<Double_t> cutsMin;
         std::vector<Double_t> cutsMax;
         mcuts->GetCuts( 0.7, cutsMin, cutsMax );
         std::cout << "--- -------------------------------------------------------------" << std::endl;
         std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
         for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
            std::cout << "... Cut: " 
                      << cutsMin[ivar] 
                      << " < \"" 
                      << mcuts->GetInputVar(ivar)
                      << "\" <= " 
                      << cutsMax[ivar] << std::endl;
         }
         std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
   }

   // --- Write histograms

   TFile *target  = new TFile( "TMVApp.root","RECREATE" );
   if (Use["Likelihood"   ])   histLk     ->Write();
   if (Use["LikelihoodD"  ])   histLkD    ->Write();
   if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
   if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
   if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
   if (Use["PDERS"        ])   histPD     ->Write();
   if (Use["PDERSD"       ])   histPDD    ->Write();
   if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
   if (Use["KNN"          ])   histKNN    ->Write();
   if (Use["HMatrix"      ])   histHm     ->Write();
   if (Use["Fisher"       ])   histFi     ->Write();
   if (Use["FisherG"      ])   histFiG    ->Write();
   if (Use["BoostedFisher"])   histFiB    ->Write();
   if (Use["LD"           ])   histLD     ->Write();
   if (Use["MLP"          ])   histNn     ->Write();
   if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
   if (Use["MLPBNN"       ])   histNnbnn  ->Write();
   if (Use["CFMlpANN"     ])   histNnC    ->Write();
   if (Use["TMlpANN"      ])   histNnT    ->Write();
   if (Use["BDT"          ])   histBdt    ->Write();
   if (Use["BDTD"         ])   histBdtD   ->Write();
   if (Use["BDTG"         ])   histBdtG   ->Write(); 
   if (Use["RuleFit"      ])   histRf     ->Write();
   if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
   if (Use["SVM_Poly"     ])   histSVMP   ->Write();
   if (Use["SVM_Lin"      ])   histSVML   ->Write();
   if (Use["FDA_MT"       ])   histFDAMT  ->Write();
   if (Use["FDA_GA"       ])   histFDAGA  ->Write();
   if (Use["Category"     ])   histCat    ->Write();
   if (Use["Plugin"       ])   histPBdt   ->Write();

   // Write also error and significance histos
   if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

   // Write also probability hists
   if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
   target->Close();

   std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
  
   delete reader;
    
   std::cout << "==> TMVAClassificationApplication is done!" << endl << std::endl;
} 
Esempio n. 3
0
void Classify_HWW( TString myMethodList = "" ) 
{   
#ifdef __CINT__
  gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif

  //--------------------------------------------------------------------
  // path to weights dir (this is where MVA training info is stored)
  // output root file will be stored at [path]/output
  //--------------------------------------------------------------------

  TString path   = "Trainings/v5/H160_WW_10vars_dphi10/";
  //TString path   = "./";

  //-----------------------------------
  // select samples to run over
  //-----------------------------------

  char* babyPath = "/tas/cerati/HtoWWmvaBabies/latest";
  int mH         = 160;  // choose Higgs mass

  vector<char*> samples;
  samples.push_back("WWTo2L2Nu");
  samples.push_back("GluGluToWWTo4L");
  samples.push_back("WZ");
  samples.push_back("ZZ");
  samples.push_back("TTJets");
  samples.push_back("tW");
  samples.push_back("WJetsToLNu");
  samples.push_back("DY");
  //samples.push_back("WJetsFO3");

  if     ( mH == 130 ) samples.push_back("Higgs130");
  else if( mH == 160 ) samples.push_back("Higgs160");
  else if( mH == 200 ) samples.push_back("Higgs200");
  else{
    cout << "Error, unrecognized Higgs mass " << mH << " GeV, quitting" << endl;
    exit(0);
  }

  //--------------------------------------------------------------------------------
  // IMPORTANT: set the following variables to the same set used for MVA training!!!
  //--------------------------------------------------------------------------------
  
  std::map<std::string,int> mvaVar;
  mvaVar[ "lephard_pt" ]        = 1;
  mvaVar[ "lepsoft_pt" ]        = 1;
  mvaVar[ "dil_dphi" ]          = 1;
  mvaVar[ "dil_mass" ]          = 1;
  mvaVar[ "event_type" ]        = 0;
  mvaVar[ "met_projpt" ]        = 1;
  mvaVar[ "met_pt" ]            = 0;
  mvaVar[ "mt_lephardmet" ]     = 1;
  mvaVar[ "mt_lepsoftmet" ]     = 1;
  mvaVar[ "mthiggs" ]           = 1;
  mvaVar[ "dphi_lephardmet" ]   = 1;
  mvaVar[ "dphi_lepsoftmet" ]   = 1;
  mvaVar[ "lepsoft_fbrem" ]     = 0;
  mvaVar[ "lepsoft_eOverPIn" ]  = 0;
  mvaVar[ "lepsoft_qdphi" ]     = 0;

  //---------------------------------------------------------------

  // This loads the library
  TMVA::Tools::Instance();

  // Default MVA methods to be trained + tested
  std::map<std::string,int> Use;

  // --- Cut optimisation
  Use["Cuts"]            = 1;
  Use["CutsD"]           = 1;
  Use["CutsPCA"]         = 0;
  Use["CutsGA"]          = 0;
  Use["CutsSA"]          = 0;
  // 
  // --- 1-dimensional likelihood ("naive Bayes estimator")
  Use["Likelihood"]      = 1;
  Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
  Use["LikelihoodPCA"]   = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
  Use["LikelihoodKDE"]   = 0;
  Use["LikelihoodMIX"]   = 0;
  //
  // --- Mutidimensional likelihood and Nearest-Neighbour methods
  Use["PDERS"]           = 1;
  Use["PDERSD"]          = 0;
  Use["PDERSPCA"]        = 0;
  Use["PDEFoam"]         = 1;
  Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
  Use["KNN"]             = 1; // k-nearest neighbour method
  //
  // --- Linear Discriminant Analysis
  Use["LD"]              = 1; // Linear Discriminant identical to Fisher
  Use["Fisher"]          = 0;
  Use["FisherG"]         = 0;
  Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
  Use["HMatrix"]         = 0;
  //
  // --- Function Discriminant analysis
  Use["FDA_GA"]          = 1; // minimisation of user-defined function using Genetics Algorithm
  Use["FDA_SA"]          = 0;
  Use["FDA_MC"]          = 0;
  Use["FDA_MT"]          = 0;
  Use["FDA_GAMT"]        = 0;
  Use["FDA_MCMT"]        = 0;
  //
  // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
  Use["MLP"]             = 0; // Recommended ANN
  Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
  Use["MLPBNN"]          = 1; // Recommended ANN with BFGS training method and bayesian regulator
  Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
  Use["TMlpANN"]         = 0; // ROOT's own ANN
  //
  // --- Support Vector Machine 
  Use["SVM"]             = 1;
  // 
  // --- Boosted Decision Trees
  Use["BDT"]             = 1; // uses Adaptive Boost
  Use["BDTG"]            = 0; // uses Gradient Boost
  Use["BDTB"]            = 0; // uses Bagging
  Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
  // 
  // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
  Use["RuleFit"]         = 1;
  // ---------------------------------------------------------------
  Use["Plugin"]          = 0;
  Use["Category"]        = 0;
  Use["SVM_Gauss"]       = 0;
  Use["SVM_Poly"]        = 0;
  Use["SVM_Lin"]         = 0;

  std::cout << std::endl;
  std::cout << "==> Start TMVAClassificationApplication" << std::endl;

  // Select methods (don't look at this code - not of interest)
  if (myMethodList != "") {
    for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

    std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
    for (UInt_t i=0; i<mlist.size(); i++) {
      std::string regMethod(mlist[i]);

      if (Use.find(regMethod) == Use.end()) {
        std::cout << "Method \"" << regMethod 
                  << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
        for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
          std::cout << it->first << " ";
        }
        std::cout << std::endl;
        return;
      }
      Use[regMethod] = 1;
    }
  }

  // --------------------------------------------------------------------------------------------------

  const unsigned int nsamples = samples.size();
  
  for( unsigned int i = 0 ; i < nsamples ; ++i ){

    // --- Create the Reader object

    TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

    // Create a set of variables and declare them to the reader
    // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
    //    Float_t var1, var2;
    //    Float_t var3, var4;
    //    reader->AddVariable( "myvar1 := var1+var2", &var1 );
    //    reader->AddVariable( "myvar2 := var1-var2", &var2 );
    //    reader->AddVariable( "var3",                &var3 );
    //    reader->AddVariable( "var4",                &var4 );

    Float_t lephard_pt;
    Float_t lepsoft_pt;
    Float_t dil_dphi;
    Float_t dil_mass;
    Float_t event_type;
    Float_t met_projpt;
    Float_t met_pt;
    Float_t mt_lephardmet;
    Float_t mt_lepsoftmet;
    Float_t mthiggs;
    Float_t dphi_lephardmet;
    Float_t dphi_lepsoftmet;
    Float_t lepsoft_fbrem;
    Float_t lepsoft_eOverPIn;
    Float_t lepsoft_qdphi;

    if( mvaVar["lephard_pt"])       reader->AddVariable( "lephard_pt"                  ,   &lephard_pt        ); 
    if( mvaVar["lepsoft_pt"])       reader->AddVariable( "lepsoft_pt"                  ,   &lepsoft_pt        ); 
    if( mvaVar["dil_dphi"])         reader->AddVariable( "dil_dphi"                    ,   &dil_dphi          ); 
    if( mvaVar["dil_mass"])         reader->AddVariable( "dil_mass"                    ,   &dil_mass          ); 
    if( mvaVar["event_type"])       reader->AddVariable( "event_type"                  ,   &event_type        );
    if( mvaVar["met_projpt"])       reader->AddVariable( "met_projpt"                  ,   &met_pt            );
    if( mvaVar["met_pt"])           reader->AddVariable( "met_pt"                      ,   &met_pt            );
    if( mvaVar["mt_lephardmet"])    reader->AddVariable( "mt_lephardmet"               ,   &mt_lephardmet     );
    if( mvaVar["mt_lepsoftmet"])    reader->AddVariable( "mt_lepsoftmet"               ,   &mt_lepsoftmet     );
    if( mvaVar["mthiggs"])          reader->AddVariable( "mthiggs"                     ,   &mthiggs           );  
    if( mvaVar["dphi_lephardmet"])  reader->AddVariable( "dphi_lephardmet"             ,   &dphi_lephardmet   );
    if( mvaVar["dphi_lepsoftmet"])  reader->AddVariable( "dphi_lepsoftmet"             ,   &dphi_lepsoftmet   );
    if( mvaVar["lepsoft_fbrem"])    reader->AddVariable( "lepsoft_fbrem"               ,   &lepsoft_fbrem     );
    if( mvaVar["lepsoft_eOverPIn"]) reader->AddVariable( "lepsoft_eOverPIn"            ,   &lepsoft_eOverPIn  );
    if( mvaVar["lepsoft_qdphi"])    reader->AddVariable( "lepsoft_q * lepsoft_dPhiIn"  ,   &lepsoft_qdphi     );
 

    // Spectator variables declared in the training have to be added to the reader, too
    //    Float_t spec1,spec2;
    //    reader->AddSpectator( "spec1 := var1*2",   &spec1 );
    //    reader->AddSpectator( "spec2 := var1*3",   &spec2 );

    Float_t Category_cat1, Category_cat2, Category_cat3;
    if (Use["Category"]){
      // Add artificial spectators for distinguishing categories
      //       reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
      //       reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
      //       reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
    }

    // --- Book the MVA methods

    //--------------------------------------------------------------------------------------
    // tell Classify_HWW where to find the weights dir, which contains the trained MVA's. 
    // In this example, the weights dir is located at [path]/[dir]
    // and the output root file is written to [path]/[output]
    //--------------------------------------------------------------------------------------

    TString dir    = path + "weights/";
    TString outdir = path + "output/";
    TString prefix = "TMVAClassification";

    // Book method(s)
    for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
        TString methodName = TString(it->first) + TString(" method");
        TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
        reader->BookMVA( methodName, weightfile ); 
      }
    }
   
    // Book output histograms
    UInt_t nbin = 1000;
    TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
    TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
    TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
    TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
    TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

    if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );               
    if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
    if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
    if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
    if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
    if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
    if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
    if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
    if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
    if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
    if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
    if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
    if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
    if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
    if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
    if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
    if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
    if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
    if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
    if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -1. , 1. );
    if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
    if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
    if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
    if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
    if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
    if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
    if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
    if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
    if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
    if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

    if (Use["Likelihood"])    histLk      ->Sumw2();
    if (Use["LikelihoodD"])   histLkD     ->Sumw2();
    if (Use["LikelihoodPCA"]) histLkPCA   ->Sumw2();
    if (Use["LikelihoodKDE"]) histLkKDE   ->Sumw2();
    if (Use["LikelihoodMIX"]) histLkMIX   ->Sumw2();
    if (Use["PDERS"])         histPD      ->Sumw2();
    if (Use["PDERSD"])        histPDD     ->Sumw2();
    if (Use["PDERSPCA"])      histPDPCA   ->Sumw2();
    if (Use["KNN"])           histKNN     ->Sumw2();
    if (Use["HMatrix"])       histHm      ->Sumw2();
    if (Use["Fisher"])        histFi      ->Sumw2();
    if (Use["FisherG"])       histFiG     ->Sumw2();
    if (Use["BoostedFisher"]) histFiB     ->Sumw2();
    if (Use["LD"])            histLD      ->Sumw2();
    if (Use["MLP"])           histNn      ->Sumw2();
    if (Use["MLPBFGS"])       histNnbfgs  ->Sumw2();
    if (Use["MLPBNN"])        histNnbnn   ->Sumw2();
    if (Use["CFMlpANN"])      histNnC     ->Sumw2();
    if (Use["TMlpANN"])       histNnT     ->Sumw2();
    if (Use["BDT"])           histBdt     ->Sumw2();
    if (Use["BDTD"])          histBdtD    ->Sumw2();
    if (Use["BDTG"])          histBdtG    ->Sumw2();
    if (Use["RuleFit"])       histRf      ->Sumw2();
    if (Use["SVM_Gauss"])     histSVMG    ->Sumw2();
    if (Use["SVM_Poly"])      histSVMP    ->Sumw2();
    if (Use["SVM_Lin"])       histSVML    ->Sumw2();
    if (Use["FDA_MT"])        histFDAMT   ->Sumw2();
    if (Use["FDA_GA"])        histFDAGA   ->Sumw2();
    if (Use["Category"])      histCat     ->Sumw2();
    if (Use["Plugin"])        histPBdt    ->Sumw2();

    // PDEFoam also returns per-event error, fill in histogram, and also fill significance
    if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
    }

    // Book example histogram for probability (the other methods are done similarly)
    TH1F *probHistFi(0), *rarityHistFi(0);
    if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
    }

    // Prepare input tree (this must be replaced by your data source)
    // in this example, there is a toy tree with signal and one with background events
    // we'll later on use only the "signal" events for the test in this example.
    //   

 
    TChain *ch = new TChain("Events");

    if( strcmp( samples.at(i) , "DY" ) == 0 ){
      ch -> Add( Form("%s/DYToMuMuM20_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/DYToMuMuM10To20_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/DYToEEM20_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/DYToEEM10To20_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/DYToTauTauM20_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/DYToTauTauM10To20_PU_testFinal_baby.root",babyPath) );
    }
    if( strcmp( samples.at(i) , "WJetsFO3" ) == 0 ){
      ch -> Add( Form("%s/WJetsToLNu_FOv3_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/WToLNu_FOv3_testFinal_baby.root",babyPath) );
    }
    else if( strcmp( samples.at(i) , "Higgs130" ) == 0 ){
      ch -> Add( Form("%s/HToWWTo2L2NuM130_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/HToWWToLNuTauNuM130_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/HToWWTo2Tau2NuM130_PU_testFinal_baby.root",babyPath) );
    }
    else if( strcmp( samples.at(i) , "Higgs160" ) == 0 ){
      ch -> Add( Form("%s/HToWWTo2L2NuM160_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/HToWWToLNuTauNuM160_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/HToWWTo2Tau2NuM160_PU_testFinal_baby.root",babyPath) );
    }
    else if( strcmp( samples.at(i) , "Higgs200" ) == 0 ){
      ch -> Add( Form("%s/HToWWTo2L2NuM200_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/HToWWToLNuTauNuM200_PU_testFinal_baby.root",babyPath) );
      ch -> Add( Form("%s/HToWWTo2Tau2NuM200_PU_testFinal_baby.root",babyPath) );
    }
    else{
      ch -> Add( Form("%s/%s_PU_testFinal_baby.root",babyPath,samples.at(i)) );
    }

    // --- Event loop

    // Prepare the event tree
    // - here the variable names have to corresponds to your tree
    // - you can use the same variables as above which is slightly faster,
    //   but of course you can use different ones and copy the values inside the event loop
    //
  
    TTree *theTree     = (TTree*) ch;

    std::cout << "--- Using input files: -------------------" <<  std::endl;

    TObjArray *listOfFiles = ch->GetListOfFiles();
    TIter fileIter(listOfFiles);
    TChainElement* currentFile = 0;
    
    while((currentFile = (TChainElement*)fileIter.Next())) {
      std::cout << currentFile->GetTitle() << std::endl;
    }

    Float_t lephard_pt_;
    Float_t lepsoft_pt_;
    Float_t lepsoft_fr_;
    Float_t dil_dphi_;
    Float_t dil_mass_;
    Float_t event_type_;
    Float_t met_projpt_;
    Int_t   jets_num_;
    Int_t   extralep_num_;
    Int_t   lowptbtags_num_;
    Int_t   softmu_num_;
    Float_t event_scale1fb_;
    Float_t met_pt_;
    Int_t   lepsoft_passTighterId_;
    Float_t mt_lephardmet_;
    Float_t mt_lepsoftmet_;
    Float_t mthiggs_;
    Float_t dphi_lephardmet_;
    Float_t dphi_lepsoftmet_;
    Float_t lepsoft_fbrem_;
    Float_t lepsoft_eOverPIn_;
    Float_t lepsoft_q_;
    Float_t lepsoft_dPhiIn_;

    theTree->SetBranchAddress( "lephard_pt_"             ,   &lephard_pt_              ); 
    theTree->SetBranchAddress( "lepsoft_pt_"             ,   &lepsoft_pt_              ); 
    theTree->SetBranchAddress( "lepsoft_fr_"             ,   &lepsoft_fr_              ); 
    theTree->SetBranchAddress( "dil_dphi_"               ,   &dil_dphi_                ); 
    theTree->SetBranchAddress( "dil_mass_"               ,   &dil_mass_                ); 
    theTree->SetBranchAddress( "event_type_"             ,   &event_type_              ); 
    theTree->SetBranchAddress( "met_projpt_"             ,   &met_projpt_              ); 
    theTree->SetBranchAddress( "jets_num_"               ,   &jets_num_                ); 
    theTree->SetBranchAddress( "extralep_num_"           ,   &extralep_num_            ); 
    theTree->SetBranchAddress( "lowptbtags_num_"         ,   &lowptbtags_num_          ); 
    theTree->SetBranchAddress( "softmu_num_"             ,   &softmu_num_              ); 
    theTree->SetBranchAddress( "event_scale1fb_"         ,   &event_scale1fb_          ); 
    theTree->SetBranchAddress( "lepsoft_passTighterId_"  ,   &lepsoft_passTighterId_   );
    theTree->SetBranchAddress( "met_pt_"                 ,   &met_pt_                  );
    theTree->SetBranchAddress( "mt_lephardmet_"          ,   &mt_lephardmet_           );
    theTree->SetBranchAddress( "mt_lepsoftmet_"          ,   &mt_lepsoftmet_           );
    theTree->SetBranchAddress( "mthiggs_"                ,   &mthiggs_                 );
    theTree->SetBranchAddress( "dphi_lephardmet_"        ,   &dphi_lephardmet_         );
    theTree->SetBranchAddress( "dphi_lepsoftmet_"        ,   &dphi_lepsoftmet_         );
    theTree->SetBranchAddress( "lepsoft_fbrem_"          ,   &lepsoft_fbrem_           );
    theTree->SetBranchAddress( "lepsoft_eOverPIn_"       ,   &lepsoft_eOverPIn_        );
    theTree->SetBranchAddress( "lepsoft_q_"              ,   &lepsoft_q_               );
    theTree->SetBranchAddress( "lepsoft_dPhiIn_"         ,   &lepsoft_dPhiIn_          );

    // Efficiency calculator for cut method
    Int_t    nSelCutsGA = 0;
    Double_t effS       = 0.7;

    std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

    std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
    TStopwatch sw;
    sw.Start();

    int npass   = 0;
    float yield = 0.;
    
    for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {

      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      theTree->GetEntry(ievt);

      //-------------------------------------------------------
      // event selection
      //-------------------------------------------------------

      if( dil_dphi_ > 1. ) continue;

      //em
      if( event_type_ > 0.5 && event_type_ < 2.5 ){
        if( met_projpt_ < 20. )   continue;
      }
      //ee/mm
      if( event_type_ < 0.5 || event_type_ > 2.5 ){
        if( met_projpt_ < 35. )   continue;
      }
      if( lephard_pt_ < 20.           )             continue;
      if( jets_num_ > 0               )             continue;
      if( extralep_num_ > 0           )             continue;
      if( lowptbtags_num_ > 0         )             continue;
      if( softmu_num_ > 0             )             continue;
      if( dil_mass_ < 12.             )             continue;
      if( lepsoft_passTighterId_ == 0 )             continue;
      //if( event_type_ < 1.5    )                    continue;
      //if( event_type > 1.5 && lepsoft_pt_ < 15. )   continue;

      //mH-dependent selection
      if( mH == 130 ){
        if( lepsoft_pt_ < 10.    )                  continue;      
        if( dil_mass_   > 90.    )                  continue;     
      }
      else if( mH == 160 ){
        if( lepsoft_pt_ < 20.    )                  continue;      
        if( dil_mass_   > 100.   )                  continue;     
      }
      else if( mH == 200 ){
        if( lepsoft_pt_ < 20.    )                  continue;      
        if( dil_mass_   > 130.   )                  continue;     
      }

      float weight = event_scale1fb_ * lepsoft_fr_ * 0.5;

      //--------------------------------------------------------
      // important: here we associate branches to MVA variables
      //--------------------------------------------------------

      lephard_pt        = lephard_pt_;
      lepsoft_pt        = lepsoft_pt_;
      dil_mass          = dil_mass_;
      dil_dphi          = dil_dphi_;
      event_type        = event_type_;
      met_pt            = met_pt_;
      met_projpt        = met_projpt_;
      mt_lephardmet     = mt_lephardmet_;
      mt_lepsoftmet     = mt_lepsoftmet_;
      mthiggs           = mthiggs_;
      dphi_lephardmet   = dphi_lephardmet_;
      dphi_lepsoftmet   = dphi_lepsoftmet_;
      lepsoft_fbrem     = lepsoft_fbrem_;
      lepsoft_eOverPIn  = lepsoft_eOverPIn_;
      lepsoft_qdphi     = lepsoft_q_ * lepsoft_dPhiIn_;

      npass++;
      yield+=weight;

      //       var1 = userVar1 + userVar2;
      //       var2 = userVar1 - userVar2;

      // --- Return the MVA outputs and fill into histograms

      if (Use["CutsGA"]) {
        // Cuts is a special case: give the desired signal efficienciy
        Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
        if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) , weight);
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) , weight);
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) , weight);
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) , weight);
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) , weight);
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) , weight);
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) , weight);
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) , weight);
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) , weight);
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) , weight);
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) , weight);
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) , weight);
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) , weight);
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) , weight);
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) , weight);
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) , weight);
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) , weight);
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) , weight);
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) , weight);
      if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) , weight);
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) , weight);
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) , weight);
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) , weight);
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) , weight);
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) , weight);
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) , weight);
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) , weight);
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) , weight);
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) , weight);
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) , weight);

      // Retrieve also per-event error
      if (Use["PDEFoam"]) {
        Double_t val = reader->EvaluateMVA( "PDEFoam method" );
        Double_t err = reader->GetMVAError();
        histPDEFoam   ->Fill( val );
        histPDEFoamErr->Fill( err );         
        if (err>1.e-50) histPDEFoamSig->Fill( val/err , weight);
      }         

      // Retrieve probability instead of MVA output
      if (Use["Fisher"])   {
        probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) , weight);
        rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) , weight);
      }
    }

    std::cout << npass << " events passing selection, yield " << yield << std::endl;
 
    // Get elapsed time
    sw.Stop();
    std::cout << "--- End of event loop: "; sw.Print();

    // Get efficiency for cuts classifier
    if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                 << " (for a required signal efficiency of " << effS << ")" << std::endl;

    if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
        std::vector<Double_t> cutsMin;
        std::vector<Double_t> cutsMax;
        mcuts->GetCuts( 0.7, cutsMin, cutsMax );
        std::cout << "--- -------------------------------------------------------------" << std::endl;
        std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
        for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
          std::cout << "... Cut: " 
                    << cutsMin[ivar] 
                    << " < \"" 
                    << mcuts->GetInputVar(ivar)
                    << "\" <= " 
                    << cutsMax[ivar] << std::endl;
        }
        std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
    }

    // --- Write histograms
    cout << "dir " << dir << endl;
    char* mydir = outdir;
    TFile *target  = new TFile( Form("%s/%s.root",mydir,samples.at(i) ) ,"RECREATE" );
    cout << "Writing to file " << Form("%s/%s.root",mydir,samples.at(i) ) << endl;

    if (Use["Likelihood"   ])   histLk     ->Write();
    if (Use["LikelihoodD"  ])   histLkD    ->Write();
    if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
    if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
    if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
    if (Use["PDERS"        ])   histPD     ->Write();
    if (Use["PDERSD"       ])   histPDD    ->Write();
    if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
    if (Use["KNN"          ])   histKNN    ->Write();
    if (Use["HMatrix"      ])   histHm     ->Write();
    if (Use["Fisher"       ])   histFi     ->Write();
    if (Use["FisherG"      ])   histFiG    ->Write();
    if (Use["BoostedFisher"])   histFiB    ->Write();
    if (Use["LD"           ])   histLD     ->Write();
    if (Use["MLP"          ])   histNn     ->Write();
    if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
    if (Use["MLPBNN"       ])   histNnbnn  ->Write();
    if (Use["CFMlpANN"     ])   histNnC    ->Write();
    if (Use["TMlpANN"      ])   histNnT    ->Write();
    if (Use["BDT"          ])   histBdt    ->Write();
    if (Use["BDTD"         ])   histBdtD   ->Write();
    if (Use["BDTG"         ])   histBdtG   ->Write(); 
    if (Use["RuleFit"      ])   histRf     ->Write();
    if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
    if (Use["SVM_Poly"     ])   histSVMP   ->Write();
    if (Use["SVM_Lin"      ])   histSVML   ->Write();
    if (Use["FDA_MT"       ])   histFDAMT  ->Write();
    if (Use["FDA_GA"       ])   histFDAGA  ->Write();
    if (Use["Category"     ])   histCat    ->Write();
    if (Use["Plugin"       ])   histPBdt   ->Write();

    // Write also error and significance histos
    if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

    // Write also probability hists
    if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
    target->Close();

    delete reader;
    
    std::cout << "==> TMVAClassificationApplication is done with sample " << samples.at(i) << endl << std::endl;
  } 
}
Esempio n. 4
0
void DYPtZ_HF_BDTCut( TString myMethodList = "" ) 
{   
#ifdef __CINT__
   gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif

   //---------------------------------------------------------------

   // This loads the library
   TMVA::Tools::Instance();

   // Default MVA methods to be trained + tested
   std::map<std::string,int> Use;

   // --- Cut optimisation
   Use["Cuts"]            = 0;
   Use["CutsD"]           = 0;
   Use["CutsPCA"]         = 0;
   Use["CutsGA"]          = 0;
   Use["CutsSA"]          = 0;
   // 
   // --- 1-dimensional likelihood ("naive Bayes estimator")
   Use["Likelihood"]      = 0;
   Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
   Use["LikelihoodPCA"]   = 0; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
   Use["LikelihoodKDE"]   = 0;
   Use["LikelihoodMIX"]   = 0;
   //
   // --- Mutidimensional likelihood and Nearest-Neighbour methods
   Use["PDERS"]           = 0;
   Use["PDERSD"]          = 0;
   Use["PDERSPCA"]        = 0;
   Use["PDEFoam"]         = 0;
   Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
   Use["KNN"]             = 0; // k-nearest neighbour method
   //
   // --- Linear Discriminant Analysis
   Use["LD"]              = 0; // Linear Discriminant identical to Fisher
   Use["Fisher"]          = 0;
   Use["FisherG"]         = 0;
   Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
   Use["HMatrix"]         = 0;
   //
   // --- Function Discriminant analysis
   Use["FDA_GA"]          = 0; // minimisation of user-defined function using Genetics Algorithm
   Use["FDA_SA"]          = 0;
   Use["FDA_MC"]          = 0;
   Use["FDA_MT"]          = 0;
   Use["FDA_GAMT"]        = 0;
   Use["FDA_MCMT"]        = 0;
   //
   // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
   Use["MLP"]             = 0; // Recommended ANN
   Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
   Use["MLPBNN"]          = 0; // Recommended ANN with BFGS training method and bayesian regulator
   Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
   Use["TMlpANN"]         = 0; // ROOT's own ANN
   //
   // --- Support Vector Machine 
   Use["SVM"]             = 0;
   // 
   // --- Boosted Decision Trees using this
   Use["BDT"]             = 1; // uses Adaptive Boost
   Use["BDTG"]            = 0; // uses Gradient Boost
   Use["BDTB"]            = 0; // uses Bagging
   Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
   // 
   // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
   Use["RuleFit"]         = 0;
   // ---------------------------------------------------------------
   Use["Plugin"]          = 0;
   Use["Category"]        = 0;
   Use["SVM_Gauss"]       = 0;
   Use["SVM_Poly"]        = 0;
   Use["SVM_Lin"]         = 0;

   std::cout << std::endl;
   std::cout << "==> Start DYPtZ_HF_BDTCut" << std::endl;

   // Select methods (don't look at this code - not of interest)
   if (myMethodList != "") {
      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

      std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
      for (UInt_t i=0; i<mlist.size(); i++) {
         std::string regMethod(mlist[i]);

         if (Use.find(regMethod) == Use.end()) {
            std::cout << "Method \"" << regMethod 
                      << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
               std::cout << it->first << " ";
            }
            std::cout << std::endl;
            return;
         }
         Use[regMethod] = 1;
      }
   }

   // --------------------------------------------------------------------------------------------------

   // --- Create the Reader object

   TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

   // Create a set of variables and declare them to the reader
   // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
   Float_t Hmass, Emumass;
	Float_t Hpt, Zpt;
	Float_t CSV0, CSV1;
	Float_t DeltaPhiHV, DetaJJ;
	Int_t  nJets, eventFlavor, Naj; 
 	Float_t BDTvalue, Trigweight, B2011PUweight, A2011PUweight, btag2CSF, MET;
	Float_t alpha_j, qtb1, jetPhi0, jetPhi1, jetEta0, jetEta1, Zphi, Hphi;
	Float_t Ht, EvntShpCircularity, jetCHF0, jetCHF1;
	Float_t EtaStandDev, lep_pfCombRelIso0, lep_pfCombRelIso1, EvntShpIsotropy;
	Float_t lep0pt, lep1pt, UnweightedEta, DphiJJ, RMS_eta, EvntShpSphericity;
	Float_t PtbalZH, EventPt, AngleEMU, Centrality, EvntShpAplanarity;
	 Float_t UnweightedEta, lep0pt;
	 	Int_t naJets, nSV;
	 Float_t Mte, Mtmu, dPhiHMET, Dphiemu, delRjj, delRemu, DphiZMET, DeltaPhijetMETmin;
	 Float_t MassEleb0, DphiEleMET, PtbalMETH,dphiEleMET, dEtaJJ, dphiZMET, ScalarSumPt;
	Float_t ZmassSVD, AngleHemu, ProjVisT, topMass, topPt, ZmassSVDnegSol, Mt, Zmass, ZmassNegInclu;
	
	Float_t dphiEMU, dphiZMET;
	
	reader->AddVariable( "Hmass", &Hmass );
	//reader->AddVariable( "Naj", &Naj );
	reader->AddVariable( "CSV0", &CSV0 );
	reader->AddVariable( "Emumass", &Emumass );
	reader->AddVariable( "DeltaPhiHV", &DeltaPhiHV );
	reader->AddVariable( "Mt", &Mt );
	reader->AddVariable( "dPhiHMET", &dPhiHMET );
	reader->AddVariable( "dphiEMU := abs(Dphiemu)", &dphiEMU );
	reader->AddVariable( "dphiZMET:=abs(DphiZMET)", &dphiZMET );
	reader->AddVariable( "PtbalMETH", &PtbalMETH );
	reader->AddVariable( "EtaStandDev", &EtaStandDev );
	reader->AddVariable( "jetCHF0", &jetCHF0 );
	reader->AddVariable( "ProjVisT", &ProjVisT );

	
   // Spectator variables declared in the training have to be added to the reader, too
 
//   reader->AddSpectator( "UnweightedEta",   &UnweightedEta );
  // reader->AddSpectator( "lep0pt",   &lep0pt );

/*   Float_t Category_cat1, Category_cat2, Category_cat3;
   if (Use["Category"]){
      // Add artificial spectators for distinguishing categories
      reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
      reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
      reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
   }
*/
   // --- Book the MVA methods

   TString dir    = "weights/";
   TString prefix = "TMVAClassification";

   // Book method(s)
   for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
         TString methodName = TString(it->first) + TString(" method");
         TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
         reader->BookMVA( methodName, weightfile ); 
      }
   }
   
	// Book output histograms
   	TH1F* hCHFb0_OpenSelection= new TH1F		("hCHFb0_OpenSelection", "charged Hadron Energy Fraction b1", 40, 0.0, 1.2);
   	TH1F* hCHFb1_OpenSelection= new TH1F		("hCHFb1_OpenSelection", "charged Hadron Energy Fraction b2", 40, 0.0, 1.2);
 	TH1F* hPtjj_OpenSelection= new TH1F		("hPtjj_OpenSelection","Pt of two b jets with highest CSV ", 50, 0.0, 400);
 	TH1F* hPtmumu_OpenSelection= new TH1F	("hPtmumu_OpenSelection","Pt of two muons with highest pt ", 50, 0.0, 400);
	TH1F* hPtbalZH_OpenSelection= new TH1F	("hPtbalZH_OpenSelection", "Pt balance of Z and H", 40, -80, 80);
	TH1F* hPtmu0_OpenSelection= new TH1F		("hPtmu0_OpenSelection","Pt of muon with highest pt ", 30, 0.0, 300);
	TH1F* hPtmu1_OpenSelection= new TH1F		("hPtmu1_OpenSelection","Pt of muon with second highest pt ", 30, 0.0, 300);
	TH1F* hPFRelIsomu0_OpenSelection= new TH1F		("hPFRelIsomu0_OpenSelection", "PF Rel Iso of muon with highest Pt", 40, 0., 0.2);
	TH1F* hPFRelIsomu1_OpenSelection= new TH1F		("hPFRelIsomu1_OpenSelection", "PF Rel Iso of muon with second highest Pt", 40, 0., 0.2);
	TH1F* hCSV0_OpenSelection= new TH1F		("hCSV0_OpenSelection","Jet with highest CSV ",			40, 0, 1.5);
	TH1F* hCSV1_OpenSelection= new TH1F		("hCSV1_OpenSelection","Jet with second highest CSV ",		40, 0, 1.5);
	TH1F* hdphiVH_OpenSelection= new TH1F	("hdphiVH_OpenSelection","Delta phi between Z and Higgs ", 50, -0.1, 4.5);
	TH1F* hdetaJJ_OpenSelection= new TH1F	("hdetaJJ_OpenSelection","Delta eta between two jets ", 60, -4, 4);
	TH1F* hNjets_OpenSelection= new TH1F	("hNjets_OpenSelection", "Number of Jets",		13, -2.5, 10.5);
	TH1F* hMjj_OpenSelection	= new TH1F	("hMjj_OpenSelection",  "Invariant Mass of two Jets ",		50, 0, 300);
	TH1F* hMmumu_OpenSelection	= new TH1F	("hMmumu_OpenSelection",  "Invariant Mass of two muons ",	75, 0, 200);
    TH1F* hRMSeta_OpenSelection= new TH1F	("hRMSeta_OpenSelection", "RMS Eta",		30, 0, 3);
    TH1F* hStaDeveta_OpenSelection= new TH1F	("hStaDeveta_OpenSelection", "Standard Deviation Eta",		30, 0, 3);
	TH1F* hUnweightedEta_OpenSelection= new TH1F	("hUnweightedEta_OpenSelection",  "Unweighted Eta ",		50, 0, 15);		
	TH1F* hdphiJJ_vect_OpenSelection= new TH1F	("hdphiJJ_vect_OpenSelection", "Delta phi between two jets",  30, -3.5, 4);
	TH1F* hCircularity_OpenSelection= new TH1F("hCircularity_OpenSelection","EventShapeVariables circularity", 30, 0.0, 1.2);
	TH1F* hHt_OpenSelection= new TH1F("hHt_OpenSelection","scalar sum of pt of four particles", 50, 0.0, 500);
    TH1F* hCentrality_OpenSelection= new TH1F	("hCentrality_OpenSelection", "Centrality", 40, 0.0, 0.8);
    TH1F* hEventPt_OpenSelection= new TH1F	("hEventPt_OpenSelection", "Pt of HV system", 50, 0.0, 100);
    TH1F* hAngleEMU_OpenSelection= new TH1F	("hAngleEMU_OpenSelection", "AngleEMU between H and Z", 45, 0, 4.5);
    TH1F* hSphericity_OpenSelection= new TH1F	("hSphericity_OpenSelection", "EventShapeVariables sphericity", 50, 0.0, 1);
    TH1F* hAplanarity_OpenSelection= new TH1F	("hAplanarity_OpenSelection", "EventShapeVariables Aplanarity", 50, -0.1, .4);
    TH1F* hIsotropy_OpenSelection= new TH1F	("hIsotropy_OpenSelection",  "EventShapeVariables isotropy", 30, 0.0, 1.3);
    TH2F* hDphiDetajj_OpenSelection= new TH2F	("hDphiDetajj_OpenSelection", "#Delta#phi vs #Delta#eta JJ", 25, -5, 5, 25, -5, 5);
	TH1F* hMtmu_OpenSelection= new TH1F ("hMtmu_OpenSelection", "Mt with respect to Muon", 101, -0.1, 200);
	TH1F* hMte_OpenSelection= new TH1F ("hMte_OpenSelection", "Mt with respect to Electron", 101, -0.1, 200);
	TH1F* hdPhiHMET_OpenSelection= new TH1F ("hdPhiHMET_OpenSelection", "Delta phi between MET and Higgs", 50, -0.1, 4.5);
	TH1F* hDphiemu_OpenSelection= new TH1F ("hDphiemu_OpenSelection", "Delta phi between e and muon", 50, -3.5, 4.5);
	TH1F* hdelRjj_OpenSelection= new TH1F ("hdelRjj_OpenSelection", "Delta R jj", 55, 0, 5.5);
	TH1F* hdelRemu_OpenSelection= new TH1F ("hdelRemu_OpenSelection", "Delta R emu", 55, 0, 5.5);
	TH1F* hDphiZMET_OpenSelection= new TH1F ("hDphiZMET_OpenSelection", "Delta phi between Z and MET",  71, -3.5, 4);
	TH1F* hDeltaPhijetMETmin_OpenSelection= new TH1F ("hDeltaPhijetMETmin_OpenSelection", "Delta phi between MET and nearest jet", 50, -0.1, 4.5);
	TH1F* hAngleHemu_OpenSelection= new TH1F ("hAngleHemu_OpenSelection", "Angle between H and Z", 30, 0, 3.5);
	TH1F* hProjVisT_OpenSelection= new TH1F ("hProjVisT_OpenSelection", "Transverse componenet of Projection of Z onto bisector", 80, 0, 200);
	TH1F* htopMass_OpenSelection= new TH1F ("htopMass_OpenSelection", "Top Mass single lepton", 100, 75, 375);
	TH1F* htopPt_OpenSelection= new TH1F ("htopPt_OpenSelection", "Pt of Top", 125, 0, 250);
	TH1F* hVMt_OpenSelection= new TH1F ("hVMt_OpenSelection", "VMt", 75, 0, 150);
	TH1F* hZmassSVD_OpenSelection= new TH1F ("hZmassSVD_OpenSelection", "Invariant Mass of two Leptons corrected SVD", 75, 0, 150);
	TH1F* hZmassSVDnegSol_OpenSelection= new TH1F ("hZmassSVDnegSol_OpenSelection", "Invariant Mass of two Leptons corrected SVD", 100, -50, 200);
	TH1F* hZmass_OpenSelection= new TH1F ("hZmass_OpenSelection", "Zmass ", 5, 0, 150);
	TH1F* hZmassNegInclu_OpenSelection= new TH1F ("hZmassNegInclu_OpenSelection", "Invariant Mass of two Leptons corrected SVD", 100, -50, 200);
	
	
	TTree *treeWithBDT = new TTree("treeWithBDT","Tree wiht BDT output");
	treeWithBDT->SetDirectory(0);
	treeWithBDT->Branch("nJets",&nJets, "nJets/I");
	treeWithBDT->Branch("naJets",&naJets, "naJets/I");
	treeWithBDT->Branch("eventFlavor",&eventFlavor, "eventFlavor/I");
	treeWithBDT->Branch("CSV0",&CSV0, "CSV0/F");
	treeWithBDT->Branch("CSV1",&CSV1, "CSV1/F");
	treeWithBDT->Branch("Emumass",&Emumass, "Emumass/F");
	treeWithBDT->Branch("Hmass",&Hmass, "Hmass/F");
	treeWithBDT->Branch("DeltaPhiHV",&DeltaPhiHV, "DeltaPhiHV/F");
	treeWithBDT->Branch("Hpt",&Hpt, "Hpt/F");
	treeWithBDT->Branch("Zpt",&Zpt, "Zpt/F");
	treeWithBDT->Branch("lep0pt",&lep0pt, "lep0pt/F");
	treeWithBDT->Branch("Ht",&Ht, "Ht/F");
	treeWithBDT->Branch("EtaStandDev",&EtaStandDev, "EtaStandDev/F");
	treeWithBDT->Branch("UnweightedEta",&UnweightedEta, "UnweightedEta/F");
	treeWithBDT->Branch("EvntShpCircularity",&EvntShpCircularity, "EvntShpCircularity/F");
	treeWithBDT->Branch("alpha_j",&alpha_j, "alpha_j/F");
	treeWithBDT->Branch("qtb1",&qtb1, "qtb1/F");
	treeWithBDT->Branch("nSV",&nSV, "nSV/I");
	treeWithBDT->Branch("Trigweight",&Trigweight, "Trigweight/F");
	treeWithBDT->Branch("B2011PUweight",&B2011PUweight, "B2011PUweight/F");
	treeWithBDT->Branch("A2011PUweight",&A2011PUweight, "A2011PUweight/F");
	treeWithBDT->Branch("btag2CSF",&btag2CSF, "btag2CSF/F");
	treeWithBDT->Branch("DetaJJ",&DetaJJ, "DetaJJ/F");
	treeWithBDT->Branch("jetCHF0",&jetCHF0, "jetCHF0/F");
	treeWithBDT->Branch("jetCHF1",&jetCHF1, "jetCHF1/F");
	treeWithBDT->Branch("jetPhi0",&jetPhi0, "jetPhi0/F");
	treeWithBDT->Branch("jetPhi1",&jetPhi1, "jetPhi1/F");
	treeWithBDT->Branch("jetEta0",&jetEta0, "jetEta0/F");
	treeWithBDT->Branch("jetEta1",&jetEta1, "jetEta1/F");
	treeWithBDT->Branch("lep1pt",&lep1pt, "lep1pt/F");
	treeWithBDT->Branch("lep_pfCombRelIso0",&lep_pfCombRelIso0, "lep_pfCombRelIso0/F");
	treeWithBDT->Branch("lep_pfCombRelIso1",&lep_pfCombRelIso1, "lep_pfCombRelIso1/F");
	treeWithBDT->Branch("DphiJJ",&DphiJJ, "DphiJJ/F");
	treeWithBDT->Branch("RMS_eta",&RMS_eta, "RMS_eta/F");
	treeWithBDT->Branch("PtbalZH",&PtbalZH, "PtbalZH/F");
	treeWithBDT->Branch("EventPt",&EventPt, "EventPt/F");
	treeWithBDT->Branch("AngleEMU",&AngleEMU, "AngleEMU/F");
	treeWithBDT->Branch("Centrality",&Centrality, "Centrality/F");
	treeWithBDT->Branch("MET",&MET, "MET/F");
	treeWithBDT->Branch("EvntShpAplanarity",&EvntShpAplanarity, "EvntShpAplanarity/F");
	treeWithBDT->Branch("EvntShpSphericity",&EvntShpSphericity, "EvntShpSphericity/F");
	treeWithBDT->Branch("EvntShpIsotropy",&EvntShpIsotropy, "EvntShpIsotropy/F");
	treeWithBDT->Branch("Zphi",&Zphi, "Zphi/F");
	treeWithBDT->Branch("Hphi",&Hphi, "Hphi/F");
	treeWithBDT->Branch("Mte",&Mte, "Mte/F");
	treeWithBDT->Branch("Mtmu",&Mtmu, "Mtmu/F");
	treeWithBDT->Branch("dPhiHMET",&dPhiHMET, "dPhiHMET/F");
	treeWithBDT->Branch("Dphiemu",&Dphiemu, "Dphiemu/F");
	treeWithBDT->Branch("delRjj",&delRjj, "delRjj/F");
	treeWithBDT->Branch("delRemu",&delRemu, "delRemu/F");
	treeWithBDT->Branch("DphiZMET",&DphiZMET, "DphiZMET/F");
	treeWithBDT->Branch("DeltaPhijetMETmin",&DeltaPhijetMETmin, "DeltaPhijetMETmin/F");
	treeWithBDT->Branch("BDTvalue",&BDTvalue, "BDTvalue/F");
	treeWithBDT->Branch("AngleHemu",&AngleHemu, "AngleHemu/F");
	treeWithBDT->Branch("ProjVisT",&ProjVisT, "ProjVisT/F");
	treeWithBDT->Branch("topMass",&topMass, "topMass/F");
	treeWithBDT->Branch("topPt",&topPt, "topPt/F");
	treeWithBDT->Branch("ZmassSVD",&ZmassSVD, "ZmassSVD/F");
	treeWithBDT->Branch("ZmassSVDnegSol",&ZmassSVDnegSol, "ZmassSVDnegSol/F");
	treeWithBDT->Branch("Zmass",&Zmass, "Zmass/F");
	treeWithBDT->Branch("ZmassNegInclu",&ZmassNegInclu, "ZmassNegInclu/F");
	treeWithBDT->Branch("topMass",&topMass, "topMass/F");
	treeWithBDT->Branch("Mt",&Mt, "Mt/F");

	
   UInt_t nbin = 15;
   TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
   TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
   TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
   TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
   TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

   if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
   if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
   if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
   if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
   if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
   if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
   if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
   if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
   if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
   if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
   if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
   if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
   if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
   if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
   if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
   if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
   if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
   if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
   if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
	if (Use["BDT"])     {
		histMattBdt     = new TH1F( "Matt_BDT",           "Matt_BDT",           15, -1.1, 0.35 );
		histTMVABdt     = new TH1F( "TMVA_BDT",           "TMVA_BDT",           36, -1.0, -0.1 );
	}
   if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
   if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
   if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
   if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
   if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
   if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
   if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
   if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
   if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
   if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

   // PDEFoam also returns per-event error, fill in histogram, and also fill significance
   if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
   }

   // Book example histogram for probability (the other methods are done similarly)
   TH1F *probHistFi(0), *rarityHistFi(0);
   if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
   }

   // Prepare input tree (this must be replaced by your data source)
   // in this example, there is a toy tree with signal and one with background events
   // we'll later on use only the "signal" events for the test in this example.
   //   
   TFile *input(0);
   TString fname = "/home/hep/wilken/taus/CMSSW_4_4_2_patch8/src/UserCode/wilken/V21/DY_PtZ.root"; 
	double lumi = 4.457;
	Double_t  DY_PtZ_weight = lumi/(lumiZJH/2.0); //WW_TuneZ2_7TeV_pythia6_tauola
  
   if (!gSystem->AccessPathName( fname )) 
      input = TFile::Open( fname ); // check if file in local directory exists
   else    
      input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
   
   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
   
   // --- Event loop

   // Prepare the event tree
   // - here the variable names have to corresponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //
   std::cout << "--- Select signal sample" << std::endl;
   TTree* theTree = (TTree*)input->Get("BDT_btree");
   
    theTree->SetBranchAddress( "Hmass", &Hmass );
    theTree->SetBranchAddress( "Emumass", &Emumass );
    theTree->SetBranchAddress( "Hpt", &Hpt );
	theTree->SetBranchAddress( "Zpt", &Zpt );
	theTree->SetBranchAddress( "CSV0", &CSV0 );
	theTree->SetBranchAddress( "CSV1", &CSV1 );
	theTree->SetBranchAddress( "DeltaPhiHV", &DeltaPhiHV );
	theTree->SetBranchAddress( "DetaJJ", &DetaJJ );
	theTree->SetBranchAddress( "UnweightedEta", &UnweightedEta );
	theTree->SetBranchAddress( "lep0pt", &lep0pt );
	theTree->SetBranchAddress( "Ht", &Ht );
	theTree->SetBranchAddress( "EvntShpCircularity", &EvntShpCircularity );
	theTree->SetBranchAddress( "nJets", &nJets );
	theTree->SetBranchAddress( "naJets",&naJets);
	theTree->SetBranchAddress( "nSV", &nSV );
	theTree->SetBranchAddress( "lep1pt", &lep1pt );
	theTree->SetBranchAddress( "lep0pt", &lep0pt );
	theTree->SetBranchAddress( "EtaStandDev", &EtaStandDev );
	theTree->SetBranchAddress( "UnweightedEta", &UnweightedEta );
	theTree->SetBranchAddress( "jetCHF0", &jetCHF0 );
	theTree->SetBranchAddress( "jetCHF1", &jetCHF1 );
	theTree->SetBranchAddress( "lep_pfCombRelIso0", &lep_pfCombRelIso0 );
	theTree->SetBranchAddress( "lep_pfCombRelIso1", &lep_pfCombRelIso1 );
	theTree->SetBranchAddress( "DphiJJ", &DphiJJ );
	theTree->SetBranchAddress( "RMS_eta", &RMS_eta );
	theTree->SetBranchAddress( "PtbalZH", &PtbalZH );
	theTree->SetBranchAddress( "EventPt", &EventPt );
	theTree->SetBranchAddress( "AngleEMU", &AngleEMU );
	theTree->SetBranchAddress( "Centrality", &Centrality );
	theTree->SetBranchAddress( "EvntShpAplanarity", &EvntShpAplanarity );
	theTree->SetBranchAddress( "EvntShpSphericity", &EvntShpSphericity );
	theTree->SetBranchAddress( "EvntShpIsotropy", &EvntShpIsotropy );
	theTree->SetBranchAddress( "Trigweight", &Trigweight );
	theTree->SetBranchAddress( "B2011PUweight", &B2011PUweight );
	theTree->SetBranchAddress( "A2011PUweight", &A2011PUweight );
	theTree->SetBranchAddress( "btag2CSF", &btag2CSF );
	theTree->SetBranchAddress( "MET", &MET );
	theTree->SetBranchAddress( "Mte"      ,  &Mte     );
	theTree->SetBranchAddress( "Mtmu"      ,  &Mtmu     );
	theTree->SetBranchAddress( "dPhiHMET"      ,  &dPhiHMET     );
	theTree->SetBranchAddress( "DeltaPhijetMETmin"      ,  &DeltaPhijetMETmin     );
	theTree->SetBranchAddress( "delRjj"      ,  &delRjj     );
	theTree->SetBranchAddress( "delRemu"      ,  &delRemu     );
	theTree->SetBranchAddress( "DphiZMET"      ,  &DphiZMET     );
	theTree->SetBranchAddress( "Dphiemu"      ,  &Dphiemu     );
	theTree->SetBranchAddress( "DeltaPhijetMETmin"      ,  &DeltaPhijetMETmin     );
	theTree->SetBranchAddress("AngleHemu",&AngleHemu);
	theTree->SetBranchAddress("ProjVisT",&ProjVisT);
	theTree->SetBranchAddress("topMass",&topMass);
	theTree->SetBranchAddress("topPt",&topPt);
	theTree->SetBranchAddress("ZmassSVD",&ZmassSVD);
	theTree->SetBranchAddress("ZmassSVDnegSol",&ZmassSVDnegSol);
	theTree->SetBranchAddress("Zmass",&Zmass);
	theTree->SetBranchAddress("ZmassNegInclu",&ZmassNegInclu);
	theTree->SetBranchAddress("topMass",&topMass);
	theTree->SetBranchAddress("Mt",&Mt);
	
	theTree->SetBranchAddress( "eventFlavor", &eventFlavor );
	

   // Efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

	TTree* TMVATree = (TTree*)input->Get("TMVA_tree");
	int NumInTree = theTree->GetEntries();
	int NumberTMVAtree = TMVATree->GetEntries();
	DY_PtZ_weight = DY_PtZ_weight* ( NumInTree /float(NumberTMVAtree)) ;
	

   std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

   std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
   TStopwatch sw;
   sw.Start();
   int Nevents = 0, NpassBDT = 0;
   for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
Nevents++;
      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      theTree->GetEntry(ievt);

 //     var1 = userVar1 + userVar2;
   //   var2 = userVar1 - userVar2;

      // --- Return the MVA outputs and fill into histograms

      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
	   if (Use["BDT"          ]) {
	   BDTvalue = reader->EvaluateMVA( "BDT method"           );
		   histMattBdt    ->Fill( BDTvalue,DY_PtZ_weight*Trigweight*B2011PUweight );
		   histTMVABdt    ->Fill( BDTvalue,DY_PtZ_weight*Trigweight*B2011PUweight );
	   }
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

      // Retrieve also per-event error
      if (Use["PDEFoam"]) {
         Double_t val = reader->EvaluateMVA( "PDEFoam method" );
         Double_t err = reader->GetMVAError();
         histPDEFoam   ->Fill( val );
         histPDEFoamErr->Fill( err );         
         if (err>1.e-50) histPDEFoamSig->Fill( val/err );
      }         

      // Retrieve probability instead of MVA output
      if (Use["Fisher"])   {
         probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
         rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
      }
   
	  // std::cout << "Ht is "<< Ht << endl;
	   if(BDTvalue>-1.50){
NpassBDT++;
hMjj_OpenSelection->Fill(Hmass,DY_PtZ_weight*Trigweight*B2011PUweight );
hMmumu_OpenSelection->Fill(Emumass,DY_PtZ_weight*Trigweight*B2011PUweight );
hPtjj_OpenSelection->Fill(Hpt,DY_PtZ_weight*Trigweight*B2011PUweight );
hPtmumu_OpenSelection->Fill(Zpt,DY_PtZ_weight*Trigweight*B2011PUweight );
hCSV0_OpenSelection->Fill(CSV0,DY_PtZ_weight*Trigweight*B2011PUweight );
hCSV1_OpenSelection->Fill(CSV1,DY_PtZ_weight*Trigweight*B2011PUweight );
hdphiVH_OpenSelection->Fill(DeltaPhiHV,DY_PtZ_weight*Trigweight*B2011PUweight );
hdetaJJ_OpenSelection->Fill(DetaJJ,DY_PtZ_weight*Trigweight*B2011PUweight );
hUnweightedEta_OpenSelection->Fill(UnweightedEta,DY_PtZ_weight*Trigweight*B2011PUweight );
hPtmu0_OpenSelection->Fill(lep0pt,DY_PtZ_weight*Trigweight*B2011PUweight );
	   hHt_OpenSelection->Fill(Ht,DY_PtZ_weight*Trigweight*B2011PUweight );
	   hCircularity_OpenSelection->Fill(EvntShpCircularity,DY_PtZ_weight*Trigweight*B2011PUweight );
	   hCHFb0_OpenSelection->Fill(jetCHF0, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hCHFb1_OpenSelection->Fill(jetCHF1, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hPtbalZH_OpenSelection->Fill(PtbalZH, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hPtmu1_OpenSelection->Fill(lep1pt, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hPFRelIsomu0_OpenSelection->Fill(lep_pfCombRelIso0, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hPFRelIsomu1_OpenSelection->Fill(lep_pfCombRelIso1, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hNjets_OpenSelection->Fill(nJets, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hRMSeta_OpenSelection->Fill(RMS_eta, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hStaDeveta_OpenSelection->Fill(EtaStandDev, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hdphiJJ_vect_OpenSelection->Fill(DphiJJ, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hCentrality_OpenSelection->Fill(Centrality, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hEventPt_OpenSelection->Fill(EventPt, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hAngleEMU_OpenSelection->Fill(AngleEMU, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hSphericity_OpenSelection->Fill(EvntShpSphericity, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hAplanarity_OpenSelection->Fill(EvntShpAplanarity, DY_PtZ_weight*Trigweight*B2011PUweight );
	   hIsotropy_OpenSelection->Fill(EvntShpIsotropy, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hDphiDetajj_OpenSelection->Fill(DphiJJ, DetaJJ, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hMte_OpenSelection->Fill(Mte, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hMtmu_OpenSelection->Fill(Mtmu, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hdPhiHMET_OpenSelection->Fill(dPhiHMET, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hDphiemu_OpenSelection->Fill(DeltaPhijetMETmin, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hdelRjj_OpenSelection->Fill(delRjj, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hdelRemu_OpenSelection->Fill(delRemu, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hDphiZMET_OpenSelection->Fill(DphiZMET, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hDphiemu_OpenSelection->Fill(Dphiemu, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hDeltaPhijetMETmin_OpenSelection->Fill(DeltaPhijetMETmin, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hAngleHemu_OpenSelection->Fill(AngleHemu, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hProjVisT_OpenSelection->Fill(ProjVisT, DY_PtZ_weight*Trigweight*B2011PUweight );
		   htopMass_OpenSelection->Fill(topMass, DY_PtZ_weight*Trigweight*B2011PUweight );
		   htopPt_OpenSelection->Fill(topPt, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hVMt_OpenSelection->Fill(Mt, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hZmassSVD_OpenSelection->Fill(ZmassSVD, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hZmassSVDnegSol_OpenSelection->Fill(ZmassSVDnegSol, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hZmass_OpenSelection->Fill(Zmass, DY_PtZ_weight*Trigweight*B2011PUweight );
		   hZmassNegInclu_OpenSelection->Fill(ZmassNegInclu, DY_PtZ_weight*Trigweight*B2011PUweight );

	   }
	   treeWithBDT->Fill();
	   
   }//end event loop
   
   // Get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();
std::cout << "Number of Events: "<< Nevents << " Events passed BDT " << NpassBDT<< endl;
   // Get efficiency for cuts classifier
   
   if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                << " (for a required signal efficiency of " << effS << ")" << std::endl;

   if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
         std::vector<Double_t> cutsMin;
         std::vector<Double_t> cutsMax;
         mcuts->GetCuts( 0.7, cutsMin, cutsMax );
         std::cout << "--- -------------------------------------------------------------" << std::endl;
         std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
         for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
            std::cout << "... Cut: " 
                      << cutsMin[ivar] 
                      << " < \"" 
                      << mcuts->GetInputVar(ivar)
                      << "\" <= " 
                      << cutsMax[ivar] << std::endl;
         }
         std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
   }
	

   // --- Write histograms
   TFile *target  = new TFile( "BDTCut_DYPtZ_HF.root","RECREATE" );
   if (Use["Likelihood"   ])   histLk     ->Write();
   if (Use["LikelihoodD"  ])   histLkD    ->Write();
   if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
   if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
   if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
   if (Use["PDERS"        ])   histPD     ->Write();
   if (Use["PDERSD"       ])   histPDD    ->Write();
   if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
   if (Use["KNN"          ])   histKNN    ->Write();
   if (Use["HMatrix"      ])   histHm     ->Write();
   if (Use["Fisher"       ])   histFi     ->Write();
   if (Use["FisherG"      ])   histFiG    ->Write();
   if (Use["BoostedFisher"])   histFiB    ->Write();
   if (Use["LD"           ])   histLD     ->Write();
   if (Use["MLP"          ])   histNn     ->Write();
   if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
   if (Use["MLPBNN"       ])   histNnbnn  ->Write();
   if (Use["CFMlpANN"     ])   histNnC    ->Write();
   if (Use["TMlpANN"      ])   histNnT    ->Write();
	if (Use["BDT"          ])  {
		histMattBdt    ->Write();
		histTMVABdt    ->Write();
	}
   if (Use["BDTD"         ])   histBdtD   ->Write();
   if (Use["BDTG"         ])   histBdtG   ->Write(); 
   if (Use["RuleFit"      ])   histRf     ->Write();
   if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
   if (Use["SVM_Poly"     ])   histSVMP   ->Write();
   if (Use["SVM_Lin"      ])   histSVML   ->Write();
   if (Use["FDA_MT"       ])   histFDAMT  ->Write();
   if (Use["FDA_GA"       ])   histFDAGA  ->Write();
   if (Use["Category"     ])   histCat    ->Write();
   if (Use["Plugin"       ])   histPBdt   ->Write();

   // Write also error and significance histos
   if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

   // Write also probability hists
   if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
   
   hCHFb0_OpenSelection->Write();
   hCHFb1_OpenSelection->Write();
   hPtbalZH_OpenSelection->Write();
   hPtmu1_OpenSelection->Write();
   hPFRelIsomu0_OpenSelection->Write();
   hPFRelIsomu1_OpenSelection->Write();
   hMjj_OpenSelection->Write();
   hMmumu_OpenSelection->Write();
   hPtjj_OpenSelection->Write();
   hPtmumu_OpenSelection->Write();
   hCSV0_OpenSelection->Write();
   hCSV1_OpenSelection->Write();
   hdphiVH_OpenSelection->Write();
   hdetaJJ_OpenSelection->Write();
   hUnweightedEta_OpenSelection->Write();
   hPtmu0_OpenSelection->Write();
	hCircularity_OpenSelection->Write();
	hHt_OpenSelection->Write();
	hNjets_OpenSelection->Write();
	hRMSeta_OpenSelection->Write();
hStaDeveta_OpenSelection->Write();
hdphiJJ_vect_OpenSelection->Write();
hCentrality_OpenSelection->Write();
hEventPt_OpenSelection->Write();
hAngleEMU_OpenSelection->Write();
hSphericity_OpenSelection->Write();
hAplanarity_OpenSelection->Write();
hIsotropy_OpenSelection->Write();
hDphiDetajj_OpenSelection->Write();
hMtmu_OpenSelection->Write();
hMte_OpenSelection->Write();
hdPhiHMET_OpenSelection->Write();
hDphiemu_OpenSelection->Write();
hdelRjj_OpenSelection->Write();
hdelRemu_OpenSelection->Write();
hDphiZMET_OpenSelection->Write();
hDeltaPhijetMETmin_OpenSelection->Write();
hAngleHemu_OpenSelection->Write();
hProjVisT_OpenSelection->Write();
htopMass_OpenSelection->Write();
htopPt_OpenSelection->Write();
hVMt_OpenSelection->Write();
hZmassSVD_OpenSelection->Write();
hZmassSVDnegSol_OpenSelection->Write();
hZmass_OpenSelection->Write();
hZmassNegInclu_OpenSelection->Write();

treeWithBDT->Write();

   
   target->Close();

  
   delete reader;

	hCHFb0_OpenSelection->Delete();
	hCHFb1_OpenSelection->Delete();
	hPtbalZH_OpenSelection->Delete();
	hPtmu1_OpenSelection->Delete();
	hPFRelIsomu0_OpenSelection->Delete();
	hPFRelIsomu1_OpenSelection->Delete();
	hMjj_OpenSelection->Delete();
	hMmumu_OpenSelection->Delete();
	hPtjj_OpenSelection->Delete();
	hPtmumu_OpenSelection->Delete();
	hCSV0_OpenSelection->Delete();
	hCSV1_OpenSelection->Delete();
	hdphiVH_OpenSelection->Delete();
	hdetaJJ_OpenSelection->Delete();
	hUnweightedEta_OpenSelection->Delete();
	hPtmu0_OpenSelection->Delete();
	hCircularity_OpenSelection->Delete();
	hHt_OpenSelection->Delete();
	hNjets_OpenSelection->Delete();
	hRMSeta_OpenSelection->Delete();
	hStaDeveta_OpenSelection->Delete();
	hdphiJJ_vect_OpenSelection->Delete();
	hCentrality_OpenSelection->Delete();
	hEventPt_OpenSelection->Delete();
	hAngleEMU_OpenSelection->Delete();
	hSphericity_OpenSelection->Delete();
	hAplanarity_OpenSelection->Delete();
	hIsotropy_OpenSelection->Delete();
	hDphiDetajj_OpenSelection->Delete();
	hMtmu_OpenSelection->Delete();
	hMte_OpenSelection->Delete();
	hdPhiHMET_OpenSelection->Delete();
	hDphiemu_OpenSelection->Delete();
	hdelRjj_OpenSelection->Delete();
	hdelRemu_OpenSelection->Delete();
	hDphiZMET_OpenSelection->Delete();
	hDeltaPhijetMETmin_OpenSelection->Delete();
	hAngleHemu_OpenSelection->Delete();
	hProjVisT_OpenSelection->Delete();
	htopMass_OpenSelection->Delete();
	htopPt_OpenSelection->Delete();
	hVMt_OpenSelection->Delete();
	hZmassSVD_OpenSelection->Delete();
	hZmassSVDnegSol_OpenSelection->Delete();
	hZmass_OpenSelection->Delete();
	hZmassNegInclu_OpenSelection->Delete();
	

	if (Use["BDT"          ])  {
		histMattBdt    ->Delete();
		histTMVABdt    ->Delete();
	}
	
	

treeWithBDT->Delete();

	
    
   std::cout << "==> DYPtZ_HF_BDTCut is done!" << endl << std::endl;
   
   gROOT->ProcessLine(".q");
} 
void ZTMVAClassificationApplication( string filename, TString myMethodList = "" ) 
{   
#ifdef __CINT__
  gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif
  //---------------------------------------------------------------

  // This loads the library
  TMVA::Tools::Instance();

  // Default MVA methods to be trained + tested
  std::map<std::string,int> Use;

  // --- Cut optimisation
  Use["Cuts"]            = 0;
  Use["CutsD"]           = 0;
  Use["CutsPCA"]         = 0;
  Use["CutsGA"]          = 0;
  Use["CutsSA"]          = 0;
  // 
  // --- 1-dimensional likelihood ("naive Bayes estimator")
  Use["Likelihood"]      = 0;
  Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
  Use["LikelihoodPCA"]   = 0; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
  Use["LikelihoodKDE"]   = 0;
  Use["LikelihoodMIX"]   = 0;
  //
  // --- Mutidimensional likelihood and Nearest-Neighbour methods
  Use["PDERS"]           = 0;
  Use["PDERSD"]          = 0;
  Use["PDERSPCA"]        = 0;
  Use["PDEFoam"]         = 0;
  Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
  Use["KNN"]             = 0; // k-nearest neighbour method
  //
  // --- Linear Discriminant Analysis
  Use["LD"]              = 0; // Linear Discriminant identical to Fisher
  Use["Fisher"]          = 0;
  Use["FisherG"]         = 0;
  Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
  Use["HMatrix"]         = 0;
  //
  // --- Function Discriminant analysis
  Use["FDA_GA"]          = 0; // minimisation of user-defined function using Genetics Algorithm
  Use["FDA_SA"]          = 0;
  Use["FDA_MC"]          = 0;
  Use["FDA_MT"]          = 0;
  Use["FDA_GAMT"]        = 0;
  Use["FDA_MCMT"]        = 0;
  //
  // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
  Use["MLP"]             = 1; // Recommended ANN
  Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
  Use["MLPBNN"]          = 0; // Recommended ANN with BFGS training method and bayesian regulator
  Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
  Use["TMlpANN"]         = 0; // ROOT's own ANN
  //
  // --- Support Vector Machine 
  Use["SVM"]             = 0;
  // 
  // --- Boosted Decision Trees
  Use["BDT"]             = 1; // uses Adaptive Boost
  Use["BDTG"]            = 1; // uses Gradient Boost
  Use["BDTB"]            = 0; // uses Bagging
  Use["BDTD"]            = 1; // decorrelation + Adaptive Boost
  // 
  // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
  Use["RuleFit"]         = 0;
  // ---------------------------------------------------------------
  Use["Plugin"]          = 0;
  Use["Category"]        = 0;
  Use["SVM_Gauss"]       = 0;
  Use["SVM_Poly"]        = 0;
  Use["SVM_Lin"]         = 0;

  std::cout << std::endl;
  std::cout << "==> Start TMVAClassificationApplication" << std::endl;

  // Select methods (don't look at this code - not of interest)
  if (myMethodList != "") {
    for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

    std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
    for (UInt_t i=0; i<mlist.size(); i++) {
       std::string regMethod(mlist[i]);

       if (Use.find(regMethod) == Use.end()) {
          std::cout << "Method \"" << regMethod 
                    << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
          for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
             std::cout << it->first << " ";
          }
          std::cout << std::endl;
          return;
       }
       Use[regMethod] = 1;
    }
  }

  // --------------------------------------------------------------------------------------------------

  // --- Create the Reader object

  TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

  Float_t B_s0_ln_FDCHI2; reader->AddVariable("B_s0_ln_FDCHI2", &B_s0_ln_FDCHI2 );
  Float_t B_s0_ln_IPCHI2; reader->AddVariable("B_s0_ln_IPCHI2", &B_s0_ln_IPCHI2 );
  Float_t B_s0_ln_EVCHI2; reader->AddVariable("B_s0_ln_EVCHI2", &B_s0_ln_EVCHI2 );
  Float_t B_s0_PT_fiveGeV;reader->AddVariable("B_s0_PT_fiveGeV",&B_s0_PT_fiveGeV);
  Float_t B_s0_Eta;       reader->AddVariable("B_s0_Eta",       &B_s0_Eta       );
  Float_t minK_PT_GeV;    reader->AddVariable("minK_PT_GeV",    &minK_PT_GeV    );
  Float_t minK_ln_IPCHI2; reader->AddVariable("minK_ln_IPCHI2", &minK_ln_IPCHI2 );
  
  Float_t Category_cat1, Category_cat2, Category_cat3;
  if (Use["Category"]){
    // Add artificial spectators for distinguishing categories
    reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
    reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
    reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
  }

  // --- Book the MVA methods

  TString dir    = "weights/";
  TString prefix = "TMVAClassification";

  // Book method(s)
  for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
    if (it->second) {
       TString methodName = TString(it->first) + TString(" method");
       TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
       reader->BookMVA( methodName, weightfile ); 
    }
  }
  
  // Book output histograms
  UInt_t nbin = 100;
  TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
  TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
  TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
  TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
  TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

  if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
  if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
  if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
  if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
  if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
  if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
  if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
  if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
  if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
  if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
  if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
  if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
  if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
  if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
  if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
  if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
  if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
  if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
  if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
  if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
  if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
  if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
  if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
  if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
  if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
  if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
  if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
  if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
  if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
  if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

  // PDEFoam also returns per-event error, fill in histogram, and also fill significance
  if (Use["PDEFoam"]) {
    histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
    histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
    histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
  }

  // Book example histogram for probability (the other methods are done similarly)
  TH1F *probHistFi(0), *rarityHistFi(0);
  if (Use["Fisher"]) {
    probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
    rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
  }

  //TFile * input = new TFile("../cloosepid.root"); // this is the signal 
  //TFile * input_Background = new TFile("Z4430Files/merged_ntuple_jpsi_s17.root"); // this is the background
  //TFile * input = new TFile("../output/MCBsphif0_after_transform.root"); // this is the signal
  TFile * input_Background; 
  input_Background = new TFile(filename.c_str());
  //std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
  std::cout << "--- TMVAClassificationApp    : Using input file: " << input_Background->GetName() << std::endl;
  
  // --- Event loop



  //save the results here
  /*
  TTree* results= new TTree("results", "results");
  Float_t Method_Likelihood; Float_t bdt;
  results->Branch("Method_Likelihood",&Method_Likelihood ,"Method_Likelihood/D" );
  results->Branch("BDTG method", &bdt, "BDTG method" );       
  */

 

  std::cout << "--- Select signal sample" << std::endl;
  //TTree* theTree = (TTree*)input->Get("MCtree");
  TTree* theTree = (TTree*)input_Background->Get("DecayTree");
 
  //if(mode<3) TCut cut = TCut("time1>0. && abs(phi_mass-1019.455)<15");
//   else TCut cut = TCut("time1>0. && abs(phi_mass-1019.455)<12 && abs(phi1_mass-1019.455)<12");
  //else TCut cut = TCut("time1>0. && abs(phi_mass-1019.455)<15 && abs(phi1_mass-1019.455)<15");
  //TFile* f_out  =new TFile("../output/MCBsphif0_after_bdt.root","RECREATE");
  TFile* f_out;
  string oldLabel="mvaVars_vetoes";
  string newLabel="mva";
  filename.replace(filename.find(oldLabel), oldLabel.length(), newLabel);
  f_out = new TFile(filename.c_str(),"RECREATE");

  //TTree* smalltree = theTree->CopyTree(cut);
  TTree*  newtree = theTree->CloneTree(-1);
  //TTree* smalltree = theTree->CloneTree(-1);
  //TTree*  newtree = theTree->CloneTree(-1);
  float bdtg;
  TBranch*  b_bdtg = newtree->Branch("bdtg", &bdtg,"bdtg/F");  
  float bdt;
  TBranch*  b_bdt = newtree->Branch("bdt", &bdt,"bdt/F");  
  float bdtd;
  TBranch*  b_bdtd = newtree->Branch("bdtd", &bdtd,"bdtd/F");  
  float mlp;
  TBranch*  b_mlp = newtree->Branch("mlp", &mlp,"mlp/F");  
 

  //   Float_t userptsum, userpionpt, userptj, userdmj , uservchi2dof;
  // Float_t usermaxdphi; Float_t userptAsym;
  theTree->SetBranchAddress("B_s0_ln_FDCHI2", &B_s0_ln_FDCHI2 );
  theTree->SetBranchAddress("B_s0_ln_IPCHI2", &B_s0_ln_IPCHI2 );
  theTree->SetBranchAddress("B_s0_ln_EVCHI2", &B_s0_ln_EVCHI2 );
  theTree->SetBranchAddress("B_s0_PT_fiveGeV",&B_s0_PT_fiveGeV);
  theTree->SetBranchAddress("B_s0_Eta",       &B_s0_Eta       );
  theTree->SetBranchAddress("minK_PT_GeV",    &minK_PT_GeV    );
  theTree->SetBranchAddress("minK_ln_IPCHI2", &minK_ln_IPCHI2 );
  // Efficiency calculator for cut method
  Int_t    nSelCutsGA = 0;
  Double_t effS       = 0.7;

  std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
  Int_t num_entries = newtree->GetEntries();
  std::cout << "--- Processing: " << num_entries << " events" << std::endl;
  TStopwatch sw;
  sw.Start();
  for (Long64_t ievt=0; ievt<num_entries;ievt++) {

    // if (ievt%10000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

    newtree->GetEntry(ievt);
   
    //    var1 = userVar1 + userVar2;
    //       var2 = userVar1 - userVar2;
    
    // --- Return the MVA outputs and fill into histograms

    if (Use["CutsGA"]) {
       // Cuts is a special case: give the desired signal efficienciy
       Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
       if (passed) nSelCutsGA++;
    }

    if (Use["Likelihood"   ])   {

	histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
	//	Method_Likelihood =  reader->EvaluateMVA( "Likelihood method"    ) ;
	//std::cout << Method_Likelihood << std::endl;
	//	results->Fill();
    }
    if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
    if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
    if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
    if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
    if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
    if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
    if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
    if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
    if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
    if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
    if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
    if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
    if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
    if (Use["MLP"          ]) {
	mlp =  reader->EvaluateMVA( "MLP method"           ) ;
      b_mlp->Fill();
      histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
    }
    if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
    if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
    if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
    if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
    if (Use["BDT"          ])  {
       histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
       bdt = reader->EvaluateMVA( "BDT method"           ) ;
 	b_bdt->Fill();
    }
    if (Use["BDTD"         ])  {
      histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      bdtd =  reader->EvaluateMVA( "BDTD method"          );
	b_bdtd->Fill();
    }
    if (Use["BDTG"         ])  {
      histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      bdtg =  reader->EvaluateMVA( "BDTG method"        );
	b_bdtg->Fill();
	//cout <<  reader->EvaluateMVA( "BDTG method" )  <<endl;
    }
    if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
    if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
    if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
    if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
    if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
    if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
    if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
    if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

    // Retrieve also per-event error
    if (Use["PDEFoam"]) {
       Double_t val = reader->EvaluateMVA( "PDEFoam method" );
       Double_t err = reader->GetMVAError();
       histPDEFoam   ->Fill( val );
       histPDEFoamErr->Fill( err );         
       if (err>1.e-50) histPDEFoamSig->Fill( val/err );
    }         

    // Retrieve probability instead of MVA output
    if (Use["Fisher"])   {
       probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
       rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
    }
  }

  // Get elapsed time
  sw.Stop();
  std::cout << "--- End of event loop: "; sw.Print();

  // Get efficiency for cuts classifier
  if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                              << " (for a required signal efficiency of " << effS << ")" << std::endl;

  if (Use["CutsGA"]) {

    // test: retrieve cuts for particular signal efficiency
    // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
    TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

    if (mcuts) {      
       std::vector<Double_t> cutsMin;
       std::vector<Double_t> cutsMax;
       mcuts->GetCuts( 0.7, cutsMin, cutsMax );
       std::cout << "--- -------------------------------------------------------------" << std::endl;
       std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
       for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
          std::cout << "... Cut: " 
                    << cutsMin[ivar] 
                    << " < \"" 
                    << mcuts->GetInputVar(ivar)
                    << "\" <= " 
                    << cutsMax[ivar] << std::endl;
       }
       std::cout << "--- -------------------------------------------------------------" << std::endl;
    }
  }

 newtree->Write();
  f_out->Close();  


  // --- Write histograms

  TFile *target  = new TFile( "TMVApp.root","RECREATE" );
  if (Use["Likelihood"   ])   histLk     ->Write();
  if (Use["LikelihoodD"  ])   histLkD    ->Write();
  if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
  if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
  if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
  if (Use["PDERS"        ])   histPD     ->Write();
  if (Use["PDERSD"       ])   histPDD    ->Write();
  if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
  if (Use["KNN"          ])   histKNN    ->Write();
  if (Use["HMatrix"      ])   histHm     ->Write();
  if (Use["Fisher"       ])   histFi     ->Write();
  if (Use["FisherG"      ])   histFiG    ->Write();
  if (Use["BoostedFisher"])   histFiB    ->Write();
  if (Use["LD"           ])   histLD     ->Write();
  if (Use["MLP"          ])   histNn     ->Write();
  if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
  if (Use["MLPBNN"       ])   histNnbnn  ->Write();
  if (Use["CFMlpANN"     ])   histNnC    ->Write();
  if (Use["TMlpANN"      ])   histNnT    ->Write();
  if (Use["BDT"          ])   histBdt    ->Write();
  if (Use["BDTD"         ])   histBdtD   ->Write();
  if (Use["BDTG"         ])   histBdtG   ->Write(); 
  if (Use["RuleFit"      ])   histRf     ->Write();
  if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
  if (Use["SVM_Poly"     ])   histSVMP   ->Write();
  if (Use["SVM_Lin"      ])   histSVML   ->Write();
  if (Use["FDA_MT"       ])   histFDAMT  ->Write();
  if (Use["FDA_GA"       ])   histFDAGA  ->Write();
  if (Use["Category"     ])   histCat    ->Write();
  if (Use["Plugin"       ])   histPBdt   ->Write();


  // results->Write();
  // Write also error and significance histos
  if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

  // Write also probability hists
  if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
  target->Close();

  std::cout << "--- Created_prob.root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
  delete reader;
  
  std::cout << "==> TMVAClassificationApplication is done!" << endl << std::endl;
} 
void TMVAClassificationApplicationLambda( TString myMethodList = "" ) 
{   
#ifdef __CINT__
   gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif

   //---------------------------------------------------------------

   // This loads the library
   TMVA::Tools::Instance();

   // Default MVA methods to be trained + tested
   std::map<std::string,int> Use;

   // --- Cut optimisation
   Use["Cuts"]            = 1;
   Use["CutsD"]           = 1;
   Use["CutsPCA"]         = 0;
   Use["CutsGA"]          = 0;
   Use["CutsSA"]          = 0;
   // 
   // --- 1-dimensional likelihood ("naive Bayes estimator")
   Use["Likelihood"]      = 1;
   Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
   Use["LikelihoodPCA"]   = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
   Use["LikelihoodKDE"]   = 0;
   Use["LikelihoodMIX"]   = 0;
   //
   // --- Mutidimensional likelihood and Nearest-Neighbour methods
   Use["PDERS"]           = 1;
   Use["PDERSD"]          = 0;
   Use["PDERSPCA"]        = 0;
   Use["PDEFoam"]         = 1;
   Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
   Use["KNN"]             = 1; // k-nearest neighbour method
   //
   // --- Linear Discriminant Analysis
   Use["LD"]              = 1; // Linear Discriminant identical to Fisher
   Use["Fisher"]          = 0;
   Use["FisherG"]         = 0;
   Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
   Use["HMatrix"]         = 0;
   //
   // --- Function Discriminant analysis
   Use["FDA_GA"]          = 1; // minimisation of user-defined function using Genetics Algorithm
   Use["FDA_SA"]          = 0;
   Use["FDA_MC"]          = 0;
   Use["FDA_MT"]          = 0;
   Use["FDA_GAMT"]        = 0;
   Use["FDA_MCMT"]        = 0;
   //
   // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
   Use["MLP"]             = 0; // Recommended ANN
   Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
   Use["MLPBNN"]          = 1; // Recommended ANN with BFGS training method and bayesian regulator
   Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
   Use["TMlpANN"]         = 0; // ROOT's own ANN
   //
   // --- Support Vector Machine 
   Use["SVM"]             = 1;
   // 
   // --- Boosted Decision Trees
   Use["BDT"]             = 1; // uses Adaptive Boost
   Use["BDTG"]            = 0; // uses Gradient Boost
   Use["BDTB"]            = 0; // uses Bagging
   Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
   // 
   // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
   Use["RuleFit"]         = 1;
   // ---------------------------------------------------------------
   Use["Plugin"]          = 0;
   Use["Category"]        = 0;
   Use["SVM_Gauss"]       = 0;
   Use["SVM_Poly"]        = 0;
   Use["SVM_Lin"]         = 0;

   std::cout << std::endl;
   std::cout << "==> Start TMVAClassificationApplication" << std::endl;

   // Select methods (don't look at this code - not of interest)
   if (myMethodList != "") {
      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

      std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
      for (UInt_t i=0; i<mlist.size(); i++) {
         std::string regMethod(mlist[i]);

         if (Use.find(regMethod) == Use.end()) {
            std::cout << "Method \"" << regMethod 
                      << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
               std::cout << it->first << " ";
            }
            std::cout << std::endl;
            return;
         }
         Use[regMethod] = 1;
      }
   }

   // --------------------------------------------------------------------------------------------------

   // --- Create the Reader object

   TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

   // Create a set of variables and declare them to the reader
   // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
   //Float_t var1, var2;
   //Float_t var3, var4;

   Float_t la_agl, la_dlos, la_dau1_dzos, la_dau1_dxyos, la_dau2_dzos, la_dau2_dxyos;
   Float_t la_vtxChi2, la_dau1_nhit, la_dau2_nhit;

   reader->AddVariable( "la_agl", &la_agl );
   reader->AddVariable( "la_dlos", &la_dlos );
   reader->AddVariable( "la_dau1_dzos", &la_dau1_dzos );
   reader->AddVariable( "la_dau2_dzos",&la_dau2_dzos);
   reader->AddVariable( "la_dau1_dxyos",                &la_dau1_dxyos );
   
   reader->AddVariable( "la_dau2_dxyos",&la_dau2_dxyos);
   reader->AddVariable( "la_vtxChi2",&la_vtxChi2);
   reader->AddVariable( "la_dau1_nhit",&la_dau1_nhit);
   reader->AddVariable( "la_dau2_nhit",&la_dau2_nhit);


   // Spectator variables declared in the training have to be added to the reader, too
   Float_t la_mass;
   reader->AddSpectator( "la_mass",   &la_mass );
   //reader->AddSpectator( "spec2 := var1*3",   &spec2 );
/*
   Float_t Category_cat1, Category_cat2, Category_cat3;
   if (Use["Category"]){
      // Add artificial spectators for distinguishing categories
      reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
      reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
      reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
   }
*/
   // --- Book the MVA methods

   TString dir    = "weights/";
   TString prefix = "TMVAClassification";

   // Book method(s)
   for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
         TString methodName = TString(it->first) + TString(" method");
         TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
         reader->BookMVA( methodName, weightfile ); 
      }
   }
   
   // Book output histograms
   UInt_t nbin = 100;
   TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
   TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
   TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
   TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
   TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

   if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
   if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
   if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
   if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
   if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
   if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
   if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
   if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
   if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
   if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
   if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
   if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
   if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
   if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
   if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
   if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
   if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
   if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
   if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
   if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
   if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
   if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
   if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
   if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
   if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
   if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
   if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
   if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
   if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
   if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

   // PDEFoam also returns per-event error, fill in histogram, and also fill significance
   if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
   }

   // Book example histogram for probability (the other methods are done similarly)
   TH1F *probHistFi(0), *rarityHistFi(0);
   if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
   }

   // Prepare input tree (this must be replaced by your data source)
   // in this example, there is a toy tree with signal and one with background events
   // we'll later on use only the "signal" events for the test in this example.
   //   
   TFile *input(0);
   TString fname = "./tmva_example.root";   
   if (!gSystem->AccessPathName( fname )) 
      input = TFile::Open( fname ); // check if file in local directory exists
   else    
      input = TFile::Open( "~/2014Research/ROOT_file/V0reco_PbPb/MCPbPb_central1.root" ); // if not: download from ROOT server
   
   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
   
   // --- Event loop

   // Prepare the event tree
   // - here the variable names have to corresponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //
   std::cout << "--- Select signal sample" << std::endl;
   TTree* theTree = (TTree*)input->Get("ana/v0_Lambda");
   //Float_t userVar1, userVar2;
   theTree->SetBranchAddress( "la_agl", &la_agl );
   theTree->SetBranchAddress( "la_dlos", &la_dlos );
   theTree->SetBranchAddress( "la_dau1_dzos", &la_dau1_dzos );
   theTree->SetBranchAddress( "la_dau1_dxyos",                &la_dau1_dxyos );
   theTree->SetBranchAddress( "la_dau2_dzos",&la_dau2_dzos);
   theTree->SetBranchAddress( "la_dau2_dxyos",&la_dau2_dxyos);
   theTree->SetBranchAddress( "la_vtxChi2",&la_vtxChi2);
   theTree->SetBranchAddress( "la_dau1_nhit",&la_dau1_nhit);
   theTree->SetBranchAddress( "la_dau2_nhit",&la_dau2_nhit);

   theTree->SetBranchAddress( "la_mass", &la_mass );




   // Efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

   std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

   std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
   TStopwatch sw;
   sw.Start();

   TFile *target  = new TFile( "TMVAppLambda.root","RECREATE" );

   TNtuple *n1 =  new TNtuple( "n1","n1","Lam_mass:MVA");
   
   for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {

      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      theTree->GetEntry(ievt);
      
      // --- Return the MVA outputs and fill into histograms

      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
      
      if (Use["BDT"          ]) {  

          double Lam_mass = la_mass;
          
          histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
          

          double MVA = 0.0;
          MVA = reader->EvaluateMVA("BDT method");


          n1->Fill(Lam_mass,MVA);
          
          
         // cout << "la mass: " << temp << endl
       }

      if (Use["BDTD"         ])  {
  
          double Lam_mass = la_mass;
          
          histBdtD    ->Fill( reader->EvaluateMVA( "BDTD method"           ) );
          

          double MVA = 0.0;
          MVA = reader->EvaluateMVA("BDTD method");


          n1->Fill(Lam_mass,MVA);

      }

      if (Use["BDTG"         ]) {

          double Lam_mass = la_mass;
          
          histBdtG    ->Fill( reader->EvaluateMVA( "BDTG method"           ) );
          

          double MVA = 0.0;
          MVA = reader->EvaluateMVA("BDTG method");


          n1->Fill(Lam_mass,MVA);



      }

      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

      // Retrieve also per-event error
      if (Use["PDEFoam"]) {
         Double_t val = reader->EvaluateMVA( "PDEFoam method" );
         Double_t err = reader->GetMVAError();
         histPDEFoam   ->Fill( val );
         histPDEFoamErr->Fill( err );         
         if (err>1.e-50) histPDEFoamSig->Fill( val/err );
      }         

      // Retrieve probability instead of MVA output
      if (Use["Fisher"])   {
         probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
         rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
      }
   }

   // Get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

   // Get efficiency for cuts classifier
   if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                << " (for a required signal efficiency of " << effS << ")" << std::endl;

   if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
         std::vector<Double_t> cutsMin;
         std::vector<Double_t> cutsMax;
         mcuts->GetCuts( 0.7, cutsMin, cutsMax );
         std::cout << "--- -------------------------------------------------------------" << std::endl;
         std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
         for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
            std::cout << "... Cut: " 
                      << cutsMin[ivar] 
                      << " < \"" 
                      << mcuts->GetInputVar(ivar)
                      << "\" <= " 
                      << cutsMax[ivar] << std::endl;
         }
         std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
   }

   // --- Write histograms



   

   if (Use["Likelihood"   ])   histLk     ->Write();
   if (Use["LikelihoodD"  ])   histLkD    ->Write();
   if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
   if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
   if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
   if (Use["PDERS"        ])   histPD     ->Write();
   if (Use["PDERSD"       ])   histPDD    ->Write();
   if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
   if (Use["KNN"          ])   histKNN    ->Write();
   if (Use["HMatrix"      ])   histHm     ->Write();
   if (Use["Fisher"       ])   histFi     ->Write();
   if (Use["FisherG"      ])   histFiG    ->Write();
   if (Use["BoostedFisher"])   histFiB    ->Write();
   if (Use["LD"           ])   histLD     ->Write();
   if (Use["MLP"          ])   histNn     ->Write();
   if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
   if (Use["MLPBNN"       ])   histNnbnn  ->Write();
   if (Use["CFMlpANN"     ])   histNnC    ->Write();
   if (Use["TMlpANN"      ])   histNnT    ->Write();
   
   if (Use["BDT"          ]){   
      histBdt    ->Write();
      n1->Write();
    }

   if (Use["BDTD"         ]){
      histBdtD   ->Write();
      n1->Write();

   }

   if (Use["BDTG"         ])  {

      histBdtG   ->Write();
      n1->Write();
   } 
   if (Use["RuleFit"      ])   histRf     ->Write();
   if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
   if (Use["SVM_Poly"     ])   histSVMP   ->Write();
   if (Use["SVM_Lin"      ])   histSVML   ->Write();
   if (Use["FDA_MT"       ])   histFDAMT  ->Write();
   if (Use["FDA_GA"       ])   histFDAGA  ->Write();
   if (Use["Category"     ])   histCat    ->Write();
   if (Use["Plugin"       ])   histPBdt   ->Write();

   // Write also error and significance histos
   if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

   // Write also probability hists
   if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
   target->Close();

   std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
  
   delete reader;
    
   std::cout << "==> TMVAClassificationApplication is done!" << endl << std::endl;
} 
void rezamyTMVAClassificationApplication1systematic( TString myMethodList = "" ) 
{   
#ifdef __CINT__
   gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif

   //---------------------------------------------------------------

   // This loads the library
   TMVA::Tools::Instance();

   // Default MVA methods to be trained + tested
   std::map<std::string,int> Use;

   // --- Cut optimisation
   Use["Cuts"]            = 0;
   Use["CutsD"]           = 0;
   Use["CutsPCA"]         = 0;
   Use["CutsGA"]          = 0;
   Use["CutsSA"]          = 0;
   // 
   // --- 1-dimensional likelihood ("naive Bayes estimator")
   Use["Likelihood"]      = 0;
   Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
   Use["LikelihoodPCA"]   = 0; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
   Use["LikelihoodKDE"]   = 0;
   Use["LikelihoodMIX"]   = 0;
   //
   // --- Mutidimensional likelihood and Nearest-Neighbour methods
   Use["PDERS"]           = 0;
   Use["PDERSD"]          = 0;
   Use["PDERSPCA"]        = 0;
   Use["PDEFoam"]         = 0;
   Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
   Use["KNN"]             = 0; // k-nearest neighbour method
   //
   // --- Linear Discriminant Analysis
   Use["LD"]              = 0; // Linear Discriminant identical to Fisher
   Use["Fisher"]          = 0;
   Use["FisherG"]         = 0;
   Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
   Use["HMatrix"]         = 0;
   //
   // --- Function Discriminant analysis
   Use["FDA_GA"]          = 0; // minimisation of user-defined function using Genetics Algorithm
   Use["FDA_SA"]          = 0;
   Use["FDA_MC"]          = 0;
   Use["FDA_MT"]          = 0;
   Use["FDA_GAMT"]        = 0;
   Use["FDA_MCMT"]        = 0;
   //
   // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
   Use["MLP"]             = 0; // Recommended ANN
   Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
   Use["MLPBNN"]          = 0; // Recommended ANN with BFGS training method and bayesian regulator
   Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
   Use["TMlpANN"]         = 0; // ROOT's own ANN
   //
   // --- Support Vector Machine 
   Use["SVM"]             = 0;
   // 
   // --- Boosted Decision Trees
   Use["BDT"]             = 1; // uses Adaptive Boost
   Use["BDTG"]            = 0; // uses Gradient Boost
   Use["BDTB"]            = 0; // uses Bagging
   Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
   // 
   // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
   Use["RuleFit"]         = 0;
   // ---------------------------------------------------------------
   Use["Plugin"]          = 0;
   Use["Category"]        = 0;
   Use["SVM_Gauss"]       = 0;
   Use["SVM_Poly"]        = 0;
   Use["SVM_Lin"]         = 0;

   std::cout << std::endl;
   std::cout << "==> Start TMVAClassificationApplication" << std::endl;

   // Select methods (don't look at this code - not of interest)
   if (myMethodList != "") {
      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

      std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
      for (UInt_t i=0; i<mlist.size(); i++) {
         std::string regMethod(mlist[i]);

         if (Use.find(regMethod) == Use.end()) {
            std::cout << "Method \"" << regMethod 
                      << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
               std::cout << it->first << " ";
            }
            std::cout << std::endl;
            return;
         }
         Use[regMethod] = 1;
      }
   }

   // --------------------------------------------------------------------------------------------------

   // --- Create the Reader object

   TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

  Float_t ptphoton,etaphoton,ptmuon,etamuon,ptjet,etajet,masstop,mtw,deltaRphotonjet,deltaRphotonmuon,ht,costopphoton,deltaphiphotonmet,cvsdiscriminant;
Float_t jetmultiplicity,bjetmultiplicity,leptoncharge;
   // Create a set of variables and declare them to the reader
   // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
  reader->AddVariable ("ptphoton", &ptphoton);
//  reader->AddVariable ("etaphoton", &etaphoton);
  reader->AddVariable ("ptmuon", &ptmuon);
//  reader->AddVariable ("etamuon", &etamuon);
  reader->AddVariable ("ptjet", &ptjet);
//  reader->AddVariable ("etajet", &etajet);
//  reader->AddVariable ("masstop", &masstop);
//  reader->AddVariable ("mtw", &mtw);
  reader->AddVariable ("deltaRphotonjet", &deltaRphotonjet);
  reader->AddVariable ("deltaRphotonmuon", &deltaRphotonmuon);
//  reader->AddVariable ("ht", &ht);
//  reader->AddVariable ("photonmuonmass", &photonmuonmass);
  reader->AddVariable ("costopphoton", &costopphoton);
//  reader->AddVariable ("topphotonmass", &topphotonmass);
//reader->AddVariable ("pttop", &pttop);
//reader->AddVariable ("etatop", &etatop);
  reader->AddVariable ("jetmultiplicity", &jetmultiplicity);
//  reader->AddVariable ("bjetmultiplicity", &bjetmultiplicity);
  reader->AddVariable ("deltaphiphotonmet", &deltaphiphotonmet);
  reader->AddVariable ("cvsdiscriminant", &cvsdiscriminant);
//  reader->AddVariable ("leptoncharge", &leptoncharge);


/*
   // Spectator variables declared in the training have to be added to the reader, too
   reader->AddSpectator( "spec1 := var1*2",   &spec1 );
   reader->AddSpectator( "spec2 := var1*3",   &spec2 );

   Float_t Category_cat1, Category_cat2, Category_cat3;
   if (Use["Category"]){
      // Add artificial spectators for distinguishing categories
      reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
      reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
      reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
   }
*/
   // --- Book the MVA methods

   TString dir    = "weights/";
   TString prefix = "TMVA";

   // Book method(s)
   for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
         TString methodName = TString(it->first) + TString(" method");
         TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
         reader->BookMVA( methodName, weightfile ); 
      }
   }
   
   // Book output histograms
   UInt_t nbin = 40;
   TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
   TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
   TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
   TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
   TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

   if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
   if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
   if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
   if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
   if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
   if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
   if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
   if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
   if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
   if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
   if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
   if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
   if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
   if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
   if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
   if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
   if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
   if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
   if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
   if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
   if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
   if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
   if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
   if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
   if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
   if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
   if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
   if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
   if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
   if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

   // PDEFoam also returns per-event error, fill in histogram, and also fill significance
   if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
   }

   // Book example histogram for probability (the other methods are done similarly)
   TH1F *probHistFi(0), *rarityHistFi(0);
   if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
   }

   // Prepare input tree (this must be replaced by your data source)
   // in this example, there is a toy tree with signal and one with background events
   // we'll later on use only the "signal" events for the test in this example.
   //   
   TFile *input(0);
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
std::vector<string> samples_;
std::vector<string> datasamples_;
std::vector<TH1F*> datahists;
std::vector<TH1F*> revDATAhists;

float scales[] = {0.0978,1.491,0.0961,0.0253,0.0224,0.0145,0.0125,0.0160,0.0158,0.0341,0.0341,0.0341,0.020,0.0017,0.0055,0.0032,0.00084,0.02,19.145*0.0169,19.145*0.0169,19.145*0.0169,19.145*0.0169,19.145*0.0169};
//samples_.push_back("WJET.root");
samples_.push_back("ZJET.root");
samples_.push_back("PHJET200400.root");
samples_.push_back("WPHJET.root");
samples_.push_back("T-W-CH.root");
samples_.push_back("TBAR-W-CH.root");
samples_.push_back("T-S-CH.root");
samples_.push_back("TBAR-S-CH.root");
samples_.push_back("T-T-CH.root");
samples_.push_back("TBAR-T-CH.root");
samples_.push_back("TTBAR1.root");
samples_.push_back("TTBAR2.root");
samples_.push_back("TTBAR3.root");
samples_.push_back("TTG.root");
samples_.push_back("WWG.root");
samples_.push_back("WW.root");
samples_.push_back("WZ.root");
samples_.push_back("ZZ.root");
samples_.push_back("ZGAMMA.root");
samples_.push_back("SIGNAL.root");
samples_.push_back("SIGNAL.root");
samples_.push_back("SIGNAL.root");
samples_.push_back("SIGNAL.root");
samples_.push_back("SIGNAL.root");


datasamples_.push_back("REALDATA1.root");
datasamples_.push_back("REALDATA2.root");
datasamples_.push_back("REALDATA3.root");

std::vector<string> datasamplesreverse_;
datasamplesreverse_.push_back("etarev/REALDATA1.root");
datasamplesreverse_.push_back("etarev/REALDATA2.root");
datasamplesreverse_.push_back("etarev/REALDATA3.root");

std::vector<string> systematics;
//systematics.push_back("__JES__plus");
//systematics.push_back("__JES__minus");
//systematics.push_back("__JER__plus");
//systematics.push_back("__JER__minus");
systematics.push_back("__PU__plus");
systematics.push_back("__PU__minus");
systematics.push_back("__TRIG__plus");
systematics.push_back("__TRIG__minus");
systematics.push_back("__BTAG__plus");
systematics.push_back("__BTAG__minus");
systematics.push_back("__MISSTAG__plus");
systematics.push_back("__MISSTAG__minus");
systematics.push_back("__MUON__plus");
systematics.push_back("__MUON__minus");
systematics.push_back("__PHOTON__plus");
systematics.push_back("__PHOTON__minus");
systematics.push_back("");

map<string, double> eventwight;
TList* hList = new TList();      // list of histograms to store
std::vector<TFile*> files;
for(unsigned int idx=0; idx<samples_.size(); ++idx){
files.push_back(new TFile(samples_[idx].c_str()));
}

std::vector<TFile*> datafiles;
for(unsigned int idx=0; idx<datasamples_.size(); ++idx){
datafiles.push_back(new TFile(datasamples_[idx].c_str()));
}


for(unsigned int phi=0; phi<systematics.size(); ++phi){
std::vector<TH1F*> hists;

TH1F   *wphjethist(0), *zjethist(0) , *phjethist(0), *wjethist(0), *twchhist(0), *tbarwhist(0),  *tschhist(0), *tbarschhist(0), *ttchhist(0), *tbartchhist(0), *tt1hist(0) ,*tt2hist(0), *tt3hist(0), *ttphhist(0), *wwphhist(0), *wwhist(0), *wzhist(0), *zzhist(0), *zgammahist(0), *signalhist5(0) , *signalhist10(0), *signalhist20(0), *signalhist30(0), *signalhist40(0);

TH1F *data1histrev(0), *data2histrev(0) ,*data3histrev(0);

wphjethist = new TH1F( std::string("BDT__wphjethist").append(systematics[phi]).c_str(), std::string("BDT__wphjethist").append(systematics[phi]).c_str()  , nbin,  -1, 1    );
zjethist = new TH1F( std::string("BDT__zjethist").append(systematics[phi]).c_str(), std::string("BDT__zjethist").append(systematics[phi]).c_str(),  nbin,  -1, 1    );
phjethist = new TH1F( std::string("BDT__phjethist").append(systematics[phi]).c_str(),std::string("BDT__phjethist").append(systematics[phi]).c_str() , nbin,  -1, 1    );
//wjethist = new TH1F( std::string("BDT__wjethist").append(systematics[phi]).c_str(),std::string("BDT__wjethist").append(systematics[phi]).c_str() , nbin, -0.8, 0.8 );
twchhist = new TH1F( std::string("BDT__twchhist").append(systematics[phi]).c_str(),std::string("BDT__twchhist").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
tbarwhist = new TH1F( std::string("BDT__tbarwhist").append(systematics[phi]).c_str(),std::string("BDT__tbarwhist").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
tschhist = new TH1F( std::string("BDT__tschhist").append(systematics[phi]).c_str(), std::string("BDT__tschhist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
tbarschhist = new TH1F( std::string("BDT__tbarschhist").append(systematics[phi]).c_str(),std::string("BDT__tbarschhist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
ttchhist = new TH1F( std::string("BDT__ttchhist").append(systematics[phi]).c_str(),std::string("BDT__ttchhist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
tbartchhist = new TH1F( std::string("BDT__tbartchhist").append(systematics[phi]).c_str(), std::string("BDT__tbartchhist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
tt1hist = new TH1F( std::string("BDT__tt1hist").append(systematics[phi]).c_str(), std::string("BDT__tt1hist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
tt2hist = new TH1F( std::string("BDT__tt2hist").append(systematics[phi]).c_str(), std::string("BDT__tt2hist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
tt3hist = new TH1F( std::string("BDT__tt3hist").append(systematics[phi]).c_str(),std::string("BDT__tt3hist").append(systematics[phi]).c_str() , nbin,  -1, 1    );
ttphhist = new TH1F( std::string("BDT__ttphhist").append(systematics[phi]).c_str(),std::string("BDT__ttphhist").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
wwphhist = new TH1F( std::string("BDT__wwphhist").append(systematics[phi]).c_str(),std::string("BDT__wwphhist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
wwhist = new TH1F( std::string("BDT__wwhist").append(systematics[phi]).c_str(),std::string("BDT__wwhist").append(systematics[phi]).c_str()  ,nbin,  -1, 1    );
wzhist = new TH1F( std::string("BDT__wzhist").append(systematics[phi]).c_str(),std::string("BDT__wzhist").append(systematics[phi]).c_str(), nbin,  -1, 1    );
zzhist = new TH1F( std::string("BDT__zzhist").append(systematics[phi]).c_str(),std::string("BDT__zzhist").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
zgammahist = new TH1F( std::string("BDT__zgammahist").append(systematics[phi]).c_str(),std::string("BDT__zgammahist").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
signalhist5 = new TH1F( std::string("BDT__signal5").append(systematics[phi]).c_str(),std::string("BDT__signal5").append(systematics[phi]).c_str()  ,nbin,  -1, 1    );
signalhist10 = new TH1F( std::string("BDT__signal10").append(systematics[phi]).c_str(),std::string("BDT__signal10").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
signalhist20 = new TH1F( std::string("BDT__signal20").append(systematics[phi]).c_str(),std::string("BDT__signal20").append(systematics[phi]).c_str() ,nbin,  -1, 1    );
signalhist30 = new TH1F( std::string("BDT__signal30").append(systematics[phi]).c_str(),std::string("BDT__signal30").append(systematics[phi]).c_str() ,nbin, -1, 1   );
signalhist40 = new TH1F( std::string("BDT__signal40").append(systematics[phi]).c_str(),std::string("BDT__signal40").append(systematics[phi]).c_str() ,nbin, -1, 1   );



hists.push_back(zjethist);
hists.push_back(phjethist);
hists.push_back(wphjethist);
//hists.push_back(wjethist);
hists.push_back(twchhist);
hists.push_back(tbarwhist);
hists.push_back(tschhist);
hists.push_back(tbarschhist);
hists.push_back(ttchhist);
hists.push_back(tbartchhist);
hists.push_back(tt1hist);
hists.push_back(tt2hist);
hists.push_back(tt3hist);
hists.push_back(ttphhist);
hists.push_back(wwphhist);
hists.push_back(wwhist);
hists.push_back(wzhist);
hists.push_back(zzhist);
hists.push_back(zgammahist);
hists.push_back(signalhist5);
hists.push_back(signalhist10);
hists.push_back(signalhist20);
hists.push_back(signalhist30);
hists.push_back(signalhist40);


for(unsigned int idx=0; idx<samples_.size(); ++idx){
   TFile *input(0);

   TString fname =samples_[idx];
   if (!gSystem->AccessPathName( fname )) input = TFile::Open( fname ); // check if file in local directory exists
   else    
      input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
   
   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
  
   // --- Event loop

   // Prepare the event tree
   // - here the variable names have to corres[1]ponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //
  //Double_t  myptphoton,myetaphoton,myptmuon,myetamuon,myptjet,myetajet,mymasstop,mymtw,mydeltaRphotonjet,mydeltaRphotonmuon,myht,mycostopphoton,mydeltaphiphotonmet,mycvsdiscriminant,myjetmultiplicity,mybjetmultiplicity,myleptoncharge;


std::vector<double> *myptphoton=0;
std::vector<double> *myetaphoton=0;
std::vector<double> *myptmuon=0;
std::vector<double> *myetamuon=0;
std::vector<double> *myptjet=0;
std::vector<double> *myetajet=0;
std::vector<double> *mymasstop=0;
//std::vector<double> *mymtw=0;
std::vector<double> *mydeltaRphotonjet=0;
std::vector<double> *mydeltaRphotonmuon=0;
//std::vector<double> *myht=0;
std::vector<double> *mycostopphoton=0;
std::vector<double> *mydeltaphiphotonmet=0;
std::vector<double> *mycvsdiscriminant=0;
std::vector<double> *myjetmultiplicity=0;
//std::vector<double> *mybjetmultiplicity=0;
//std::vector<double> *myleptoncharge=0;
std::vector<double> *myweight=0;
std::vector<double> *mybtagSF=0;
std::vector<double> *mybtagSFup=0;
std::vector<double> *mybtagSFdown=0;
std::vector<double> *mymistagSFup=0;
std::vector<double> *mymistagSFdown=0;
std::vector<double> *mytriggerSF=0;
std::vector<double> *mytriggerSFup=0;
std::vector<double> *mytriggerSFdown=0;
std::vector<double> *myphotonSF=0;
std::vector<double> *myphotonSFup=0;
std::vector<double> *myphotonSFdown=0;
std::vector<double> *mypileupSF=0;
std::vector<double> *mypileupSFup=0;
std::vector<double> *mypileupSFdown=0;
std::vector<double> *mymuonSFup=0;
std::vector<double> *mymuonSFdown=0;
std::vector<double> *mymuonSF=0;

   std::cout << "--- Select signal sample" << std::endl;
   TTree* theTree = (TTree*)input->Get("analyzestep2/atq");
//   Int_t myjetmultiplicity, mybjetmultiplicity , myleptoncharge;
//   Float_t userVar1, userVar2;

   theTree->SetBranchAddress("ptphoton", &myptphoton  );
   theTree->SetBranchAddress( "etaphoton", &myetaphoton );
   theTree->SetBranchAddress( "ptmuon", &myptmuon );
   theTree->SetBranchAddress( "etamuon", &myetamuon );
   theTree->SetBranchAddress( "ptjet", &myptjet );
   theTree->SetBranchAddress( "etajet", &myetajet );
   theTree->SetBranchAddress( "masstop", &mymasstop );
//   theTree->SetBranchAddress( "mtw", &mymtw );
   theTree->SetBranchAddress( "deltaRphotonjet", &mydeltaRphotonjet );
   theTree->SetBranchAddress( "deltaRphotonmuon", &mydeltaRphotonmuon );
//   theTree->SetBranchAddress( "ht", &myht );
   theTree->SetBranchAddress( "costopphoton", &mycostopphoton );
   theTree->SetBranchAddress( "jetmultiplicity", &myjetmultiplicity );
//   theTree->SetBranchAddress( "bjetmultiplicity", &mybjetmultiplicity );
   theTree->SetBranchAddress( "deltaphiphotonmet", &mydeltaphiphotonmet );
   theTree->SetBranchAddress( "cvsdiscriminant", &mycvsdiscriminant );
//   theTree->SetBranchAddress( "leptoncharge", &myleptoncharge );
   theTree->SetBranchAddress( "weight", &myweight);
   theTree->SetBranchAddress( "btagSF", &mybtagSF);
   theTree->SetBranchAddress( "btagSFup", &mybtagSFup);
   theTree->SetBranchAddress( "btagSFdown", &mybtagSFdown);
   theTree->SetBranchAddress( "mistagSFup", &mymistagSFup);
   theTree->SetBranchAddress( "mistagSFdown", &mymistagSFdown);
   theTree->SetBranchAddress( "triggerSF", &mytriggerSF);
   theTree->SetBranchAddress( "triggerSFup", &mytriggerSFup);
   theTree->SetBranchAddress( "triggerSFdown", &mytriggerSFdown);
   theTree->SetBranchAddress( "photonSF", &myphotonSF);
   theTree->SetBranchAddress( "photonSFup", &myphotonSFup);
   theTree->SetBranchAddress( "photonSFdown", &myphotonSFdown);
   theTree->SetBranchAddress( "muonSF", &mymuonSF);
   theTree->SetBranchAddress( "muonSFup", &mymuonSFup);
   theTree->SetBranchAddress( "muonSFdown", &mymuonSFdown);
   theTree->SetBranchAddress( "pileupSF", &mypileupSF);
   theTree->SetBranchAddress( "pileupSFup", &mypileupSFup);
   theTree->SetBranchAddress( "pileupSFdown", &mypileupSFdown);



 // Efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

   std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

//   std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
   TStopwatch sw;
   sw.Start();
   for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
//   std::cout << "--- ... Processing event: " << ievt << std::endl;
double finalweight;

      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      theTree->GetEntry(ievt);
//for (int l=0;l<sizeof(myptphoton);l++){
//std::cout << "--- ... reza: " << myptphoton[l] <<std::endl;
//}
//std::cout << "--- ......................."<< (*mycvsdiscriminant)[0]<<std::endl;
      // --- Return the MVA outputs and fill into histograms
ptphoton=(float)(*myptphoton)[0];
etaphoton=(float)(*myetaphoton )[0];
ptmuon=(float)(*myptmuon )[0];
etamuon=(float)(*myetamuon )[0];
ptjet=(float)(*myptjet )[0];
etajet=(float)(*myetajet )[0];
masstop=(float)(*mymasstop )[0];
//mtw=(float)(*mymtw )[0];
deltaRphotonjet=(float)(*mydeltaRphotonjet )[0];
deltaRphotonmuon=(float)(*mydeltaRphotonmuon )[0];
//ht=(float)(*myht )[0];
costopphoton=(float)(*mycostopphoton )[0];
jetmultiplicity=(float)(*myjetmultiplicity )[0];
//bjetmultiplicity=(float)(*mybjetmultiplicity )[0];
deltaphiphotonmet=(float)(*mydeltaphiphotonmet )[0];
cvsdiscriminant=(float)(*mycvsdiscriminant)[0];
//leptoncharge=(float)(*myleptoncharge )[0];
finalweight=(*myweight)[0];
//cout<<(*myweight)[0]<<endl;
eventwight["__PU__plus"]=(*myweight)[0]*(*mypileupSFup)[0]/(*mypileupSF)[0];
eventwight["__PU__minus"]=(*myweight)[0]*(*mypileupSFdown)[0]/(*mypileupSF)[0];
eventwight["__TRIG__plus"]=(*myweight)[0]*(*mytriggerSFup)[0]/(*mytriggerSF)[0];
eventwight["__TRIG__minus"]=(*myweight)[0]*(*mytriggerSFdown)[0]/(*mytriggerSF)[0];
eventwight["__BTAG__plus"]=(*myweight)[0]*(*mybtagSFup)[0]/(*mybtagSF)[0];
eventwight["__BTAG__minus"]=(*myweight)[0]*(*mybtagSFdown)[0]/(*mybtagSF)[0];
eventwight["__MISSTAG__plus"]=(*myweight)[0]*(*mymistagSFup)[0]/(*mybtagSF)[0];
eventwight["__MISSTAG__minus"]=(*myweight)[0]*(*mymistagSFdown)[0]/(*mybtagSF)[0];
eventwight["__MUON__plus"]=(*myweight)[0]*(*mymuonSFup)[0]/(*mymuonSF)[0];
eventwight["__MUON__minus"]=(*myweight)[0]*(*mymuonSFdown)[0]/(*mymuonSF)[0];
eventwight["__PHOTON__plus"]=(*myweight)[0]*(*myphotonSFup)[0]/(*myphotonSF)[0];
eventwight["__PHOTON__minus"]=(*myweight)[0]*(*myphotonSFdown)[0]/(*myphotonSF)[0];
eventwight[""]=(*myweight)[0];

finalweight=eventwight[systematics[phi].c_str()];
if (samples_[idx]=="SIGNAL.root") finalweight=(*myweight)[0];
//if (samples_[idx]=="WPHJET")  finalweight=(*mypileupSF)[0]*(*mytriggerSF)[0]*(*mybtagSF)[0]*(*mymuonSF)[0]*(*myphotonSF)[0];
//if (finalweight<0) finalweight=30;
//cout<<"negative event weight"<<finalweight<<"            "<<ptphoton<<endl;
      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

hists[idx] ->Fill( reader->EvaluateMVA( "BDT method"           ),finalweight* scales[idx] );

//cout<<reader->EvaluateMVA( "BDT method")<<"                            "<<finalweight<<endl;   
//cout<<(*myweight)[0]<<endl;
      // Retrieve also per-event error
      if (Use["PDEFoam"]) {
         Double_t val = reader->EvaluateMVA( "PDEFoam method" );
         Double_t err = reader->GetMVAError();
         histPDEFoam   ->Fill( val );
         histPDEFoamErr->Fill( err );         
         if (err>1.e-50) histPDEFoamSig->Fill( val/err );
      }         

      // Retrieve probability instead of MVA output
      if (Use["Fisher"])   {
         probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
         rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
      }
//delete finalweight;
   }

   // Get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

   // Get efficiency for cuts classifier
   if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                << " (for a required signal efficiency of " << effS << ")" << std::endl;

   if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
         std::vector<Double_t> cutsMin;
         std::vector<Double_t> cutsMax;
         mcuts->GetCuts( 0.7, cutsMin, cutsMax );
         std::cout << "--- -------------------------------------------------------------" << std::endl;
         std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
         for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
            std::cout << "... Cut: " 
                      << cutsMin[ivar] 
                      << " < \"" 
                      << mcuts->GetInputVar(ivar)
                      << "\" <= " 
                      << cutsMax[ivar] << std::endl;
         }
         std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
   }

delete myptphoton;
delete myetaphoton;
delete myptmuon;
delete myetamuon;
delete myptjet;
delete myetajet;
delete mymasstop;
//delete mymtw;
delete mydeltaRphotonjet;
delete mydeltaRphotonmuon;
//delete myht;
delete mycostopphoton;
delete mydeltaphiphotonmet;
delete mycvsdiscriminant;
delete myjetmultiplicity;
//delete mybjetmultiplicity;
//delete myleptoncharge;
//delete myplot;
delete mybtagSF;
delete mybtagSFup;
delete mybtagSFdown;
delete mymistagSFup;
delete mytriggerSF;
delete mytriggerSFup;
delete mytriggerSFdown;
delete myphotonSF;
delete myphotonSFup;
delete myphotonSFdown;
delete mypileupSF;
delete mypileupSFup;
delete mypileupSFdown;
delete input;
if (idx==samples_.size()-5) hists[idx]->Scale(5/hists[idx]->Integral());
if (idx==samples_.size()-4) hists[idx]->Scale(10/hists[idx]->Integral());
if (idx==samples_.size()-3) hists[idx]->Scale(20/hists[idx]->Integral());
if (idx==samples_.size()-2) hists[idx]->Scale(30/hists[idx]->Integral());
if (idx==samples_.size()-1) hists[idx]->Scale(40/hists[idx]->Integral());

if (samples_[idx]=="WPHJET.root") hists[idx]->Scale(3173/hists[idx]->Integral());
if (samples_[idx]=="TTBAR2.root") hists[idx]->Add(hists[idx-1]);
if (samples_[idx]=="TTBAR3.root") hists[idx]->Add(hists[idx-1]);
if (!(samples_[idx]=="TTBAR1.root" || samples_[idx]=="TTBAR2.root")) hList->Add(hists[idx]);
}
}

TH1F *data1hist(0), *data2hist(0) ,*data3hist(0);
data1hist = new TH1F( "mu_BDT__data1hist",           "mu_BDT__data1hist",           nbin, -1, 1 );
data2hist = new TH1F( "mu_BDT__data2hist",           "mu_BDT__data2hist",           nbin, -1, 1 );
data3hist = new TH1F( "BDT__DATA",           "BDT__DATA",           nbin, -1, 1 );

datahists.push_back(data1hist);
datahists.push_back(data2hist);
datahists.push_back(data3hist);



for(unsigned int idx=0; idx<datasamples_.size(); ++idx){
   TFile *input(0);
   TString fname =datasamples_[idx];
   if (!gSystem->AccessPathName( fname )) input = TFile::Open( fname ); // check if file in local directory exists
   else    
      input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
   
   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
   
   // --- Event loop

   // Prepare the event tree
   // - here the variable names have to corres[1]ponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //

std::vector<double> *myptphoton=0;
std::vector<double> *myetaphoton=0;
std::vector<double> *myptmuon=0;
std::vector<double> *myetamuon=0;
std::vector<double> *myptjet=0;
std::vector<double> *myetajet=0;
std::vector<double> *mymasstop=0;
//std::vector<double> *mymtw=0;
std::vector<double> *mydeltaRphotonjet=0;
std::vector<double> *mydeltaRphotonmuon=0;
//std::vector<double> *myht=0;
std::vector<double> *mycostopphoton=0;
std::vector<double> *mydeltaphiphotonmet=0;
std::vector<double> *mycvsdiscriminant=0;
std::vector<double> *myjetmultiplicity=0;
//std::vector<double> *mybjetmultiplicity=0;
//std::vector<double> *myleptoncharge=0;

   std::cout << "--- Select signal sample" << std::endl;
   TTree* theTree = (TTree*)input->Get("analyzestep2/atq");
//   Int_t myjetmultiplicity, mybjetmultiplicity , myleptoncharge;
//   Float_t userVar1, userVar2;

   theTree->SetBranchAddress("ptphoton", &myptphoton  );
   theTree->SetBranchAddress( "etaphoton", &myetaphoton );
   theTree->SetBranchAddress( "ptmuon", &myptmuon );
   theTree->SetBranchAddress( "etamuon", &myetamuon );
   theTree->SetBranchAddress( "ptjet", &myptjet );
   theTree->SetBranchAddress( "etajet", &myetajet );
   theTree->SetBranchAddress( "masstop", &mymasstop );
//   theTree->SetBranchAddress( "mtw", &mymtw );
   theTree->SetBranchAddress( "deltaRphotonjet", &mydeltaRphotonjet );
   theTree->SetBranchAddress( "deltaRphotonmuon", &mydeltaRphotonmuon );
//   theTree->SetBranchAddress( "ht", &myht );
   theTree->SetBranchAddress( "costopphoton", &mycostopphoton );
   theTree->SetBranchAddress( "jetmultiplicity", &myjetmultiplicity );
//   theTree->SetBranchAddress( "bjetmultiplicity", &mybjetmultiplicity );
   theTree->SetBranchAddress( "deltaphiphotonmet", &mydeltaphiphotonmet );
   theTree->SetBranchAddress( "cvsdiscriminant", &mycvsdiscriminant );
//   theTree->SetBranchAddress( "leptoncharge", &myleptoncharge );


 // Efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

   std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

   std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
   TStopwatch sw;
   sw.Start();
   for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
//   std::cout << "--- ... Processing event: " << ievt << std::endl;

      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      theTree->GetEntry(ievt);
//for (int l=0;l<sizeof(myptphoton);l++){
//std::cout << "--- ... reza: " << myptphoton[l] <<std::endl;
//}
//std::cout << "--- ......................."<< (*mycvsdiscriminant)[0]<<std::endl;
      // --- Return the MVA outputs and fill into histograms
ptphoton=(float)(*myptphoton)[0];
etaphoton=(float)(*myetaphoton )[0];
ptmuon=(float)(*myptmuon )[0];
etamuon=(float)(*myetamuon )[0];
ptjet=(float)(*myptjet )[0];
etajet=(float)(*myetajet )[0];
masstop=(float)(*mymasstop )[0];
//mtw=(float)(*mymtw )[0];
deltaRphotonjet=(float)(*mydeltaRphotonjet )[0];
deltaRphotonmuon=(float)(*mydeltaRphotonmuon )[0];
//ht=(float)(*myht )[0];
costopphoton=(float)(*mycostopphoton )[0];
jetmultiplicity=(float)(*myjetmultiplicity )[0];
//bjetmultiplicity=(float)(*mybjetmultiplicity )[0];
deltaphiphotonmet=(float)(*mydeltaphiphotonmet )[0];
cvsdiscriminant=(float)(*mycvsdiscriminant)[0];
//leptoncharge=(float)(*myleptoncharge )[0];




      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
      if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

datahists[idx] ->Fill( reader->EvaluateMVA( "BDT method"           ) );
   }
   // Get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

delete myptphoton;
delete myetaphoton;
delete myptmuon;
delete myetamuon;
delete myptjet;
delete myetajet;
delete mymasstop;
//delete mymtw;
delete mydeltaRphotonjet;
delete mydeltaRphotonmuon;
//delete myht;
delete mycostopphoton;
delete mydeltaphiphotonmet;
delete mycvsdiscriminant;
delete myjetmultiplicity;
//delete mybjetmultiplicity;
//delete myleptoncharge;
//delete myplot;
delete input;
}

for(unsigned int idx=1; idx<datasamples_.size(); ++idx){
datahists[idx]->Add(datahists[idx-1]);
}
hList->Add(datahists[2]);


TH1F *data1histrev(0), *data2histrev(0) ,*data3histrev(0);

data1histrev = new TH1F( "BDT__data1histrev",           "BDT__data1histrev",           nbin, -1, 1 );
data2histrev = new TH1F( "BDT__data2histrev",           "BDT__data2histrev",           nbin, -1, 1 );
data3histrev = new TH1F( "BDT__wjet",           "BDT__wjet",           nbin, -1, 1 );

revDATAhists.push_back(data1histrev);
revDATAhists.push_back(data2histrev);
revDATAhists.push_back(data3histrev);


for(unsigned int idx=0; idx<datasamplesreverse_.size(); ++idx){
   TFile *input(0);

   TString fname =datasamplesreverse_[idx];
   if (!gSystem->AccessPathName( fname )) input = TFile::Open( fname ); // check if file in local directory exists
   else
      input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server

   if (!input) {
      std::cout << "ERROR: could not open data file" << std::endl;
      exit(1);
   }
   std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;

   // --- Event loop
   //
   //    // Prepare the event tree
   //       // - here the variable names have to corres[1]ponds to your tree
   //          // - you can use the same variables as above which is slightly faster,
   //             //   but of course you can use different ones and copy the values inside the event loop
   //                //
//   Double_t  myptphoton,myetaphoton,myptmuon,myetamuon,myptjet,myetajet,mymasstop,mymtw,mydeltaRphotonjet,mydeltaRphotonmuon,myht,mycostopphoton,mydeltaphiphotonmet,mycvsdiscriminant,myjetmultiplicity,mybjetmultiplicity,myleptoncharge;
   
std::vector<double> *myptphoton=0;
std::vector<double> *myetaphoton=0;
 std::vector<double> *myptmuon=0;
 std::vector<double> *myetamuon=0;
 std::vector<double> *myptjet=0;
 std::vector<double> *myetajet=0;
 std::vector<double> *mymasstop=0;
 //std::vector<double> *mymtw=0;
 std::vector<double> *mydeltaRphotonjet=0;
std::vector<double> *mydeltaRphotonmuon=0;
 //std::vector<double> *myht=0;
 std::vector<double> *mycostopphoton=0;
 std::vector<double> *mydeltaphiphotonmet=0;
 std::vector<double> *mycvsdiscriminant=0;
 std::vector<double> *myjetmultiplicity=0;
 //std::vector<double> *mybjetmultiplicity=0;
 //std::vector<double> *myleptoncharge=0;
   TTree* theTree = (TTree*)input->Get("analyzestep2/atq");
   theTree->SetBranchAddress("ptphoton", &myptphoton  );
   theTree->SetBranchAddress( "etaphoton", &myetaphoton );
   theTree->SetBranchAddress( "ptmuon", &myptmuon );
   theTree->SetBranchAddress( "etamuon", &myetamuon );
   theTree->SetBranchAddress( "ptjet", &myptjet );
   theTree->SetBranchAddress( "etajet", &myetajet );
   theTree->SetBranchAddress( "masstop", &mymasstop );
//   theTree->SetBranchAddress( "mtw", &mymtw );
   theTree->SetBranchAddress( "deltaRphotonjet", &mydeltaRphotonjet );
   theTree->SetBranchAddress( "deltaRphotonmuon", &mydeltaRphotonmuon );
//   theTree->SetBranchAddress( "ht", &myht );
    theTree->SetBranchAddress( "costopphoton", &mycostopphoton );
    theTree->SetBranchAddress( "jetmultiplicity", &myjetmultiplicity );
//   theTree->SetBranchAddress( "bjetmultiplicity", &mybjetmultiplicity );
    theTree->SetBranchAddress( "deltaphiphotonmet", &mydeltaphiphotonmet );
    theTree->SetBranchAddress( "cvsdiscriminant", &mycvsdiscriminant );
//   theTree->SetBranchAddress( "leptoncharge", &myleptoncharge );
 // Efficiency calculator for cut method
    Int_t    nSelCutsGA = 0;
        Double_t effS       = 0.7;
 
           std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
 
              std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
                 TStopwatch sw;
                    sw.Start();
                       for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
                       //   std::cout << "--- ... Processing event: " << ievt << std::endl;
                             if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
 
                                   theTree->GetEntry(ievt);
                                   //for (int l=0;l<sizeof(myptphoton);l++){
                                   //std::cout << "--- ... reza: " << myptphoton[l] <<std::endl;
                                   //}
      //                             std::cout << "--- ......................."<< (*mycvsdiscriminant)[0]<<std::endl;
                                         // --- Return the MVA outputs and fill into histograms
                                         ptphoton=(float)(*myptphoton)[0];
                                         etaphoton=(float)(*myetaphoton )[0];
                                         ptmuon=(float)(*myptmuon )[0];
                                         etamuon=(float)(*myetamuon )[0];
                                         ptjet=(float)(*myptjet )[0];
                                         etajet=(float)(*myetajet )[0];
                                        masstop=(float)(*mymasstop )[0];
                                        //mtw=(float)(*mymtw )[0];
                                         deltaRphotonjet=(float)(*mydeltaRphotonjet )[0];
                                        deltaRphotonmuon=(float)(*mydeltaRphotonmuon )[0];
                                         //ht=(float)(*myht )[0];
                                        costopphoton=(float)(*mycostopphoton )[0];
                                         jetmultiplicity=(float)(*myjetmultiplicity )[0];
                                         //bjetmultiplicity=(float)(*mybjetmultiplicity )[0];
                                         deltaphiphotonmet=(float)(*mydeltaphiphotonmet )[0];
                                         cvsdiscriminant=(float)(*mycvsdiscriminant)[0];
                                         //leptoncharge=(float)(*myleptoncharge )[0];
      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
      if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

revDATAhists[idx]->Fill( reader->EvaluateMVA( "BDT method"           ) );}
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

delete myptphoton;
delete myetaphoton;
delete myptmuon;
delete myetamuon;
delete myptjet;
delete myetajet;
delete mymasstop;
//delete mymtw;
delete mydeltaRphotonjet;
delete mydeltaRphotonmuon;
//delete myht;
delete mycostopphoton;
delete mydeltaphiphotonmet;
delete mycvsdiscriminant;
delete myjetmultiplicity;
////delete mybjetmultiplicity;
////delete myleptoncharge;
////delete myplot;
//
delete input;
}
for(unsigned int idx=1; idx<datasamplesreverse_.size(); ++idx){
revDATAhists[idx]->Add(revDATAhists[idx-1]);
 } 
revDATAhists[2]->Scale(620.32/revDATAhists[2]->Integral());
hList->Add(revDATAhists[2]);
cout<<revDATAhists[2]->Integral()<<endl;
TFile *target  = new TFile( "MVApp.root","RECREATE" );
  hList->Write();
   target->Close();

} 
void TMVAClassificationApplication( TString myMethodList = "" , TString decay_mode) 
{   
#ifdef __CINT__
   gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
#endif

   //---------------------------------------------------------------

   // This loads the library
   TMVA::Tools::Instance();

   // Default MVA methods to be trained + tested
   std::map<std::string,int> Use;

   // --- Cut optimisation
   Use["Cuts"]            = 1;
   Use["CutsD"]           = 1;
   Use["CutsPCA"]         = 0;
   Use["CutsGA"]          = 0;
   Use["CutsSA"]          = 0;
   // 
   // --- 1-dimensional likelihood ("naive Bayes estimator")
   Use["Likelihood"]      = 1;
   Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
   Use["LikelihoodPCA"]   = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
   Use["LikelihoodKDE"]   = 0;
   Use["LikelihoodMIX"]   = 0;
   //
   // --- Mutidimensional likelihood and Nearest-Neighbour methods
   Use["PDERS"]           = 1;
   Use["PDERSD"]          = 0;
   Use["PDERSPCA"]        = 0;
   Use["PDEFoam"]         = 1;
   Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
   Use["KNN"]             = 1; // k-nearest neighbour method
   //
   // --- Linear Discriminant Analysis
   Use["LD"]              = 1; // Linear Discriminant identical to Fisher
   Use["Fisher"]          = 0;
   Use["FisherG"]         = 0;
   Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
   Use["HMatrix"]         = 0;
   //
   // --- Function Discriminant analysis
   Use["FDA_GA"]          = 1; // minimisation of user-defined function using Genetics Algorithm
   Use["FDA_SA"]          = 0;
   Use["FDA_MC"]          = 0;
   Use["FDA_MT"]          = 0;
   Use["FDA_GAMT"]        = 0;
   Use["FDA_MCMT"]        = 0;
   //
   // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
   Use["MLP"]             = 0; // Recommended ANN
   Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
   Use["MLPBNN"]          = 1; // Recommended ANN with BFGS training method and bayesian regulator
   Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
   Use["TMlpANN"]         = 0; // ROOT's own ANN
   //
   // --- Support Vector Machine 
   Use["SVM"]             = 1;
   // 
   // --- Boosted Decision Trees
   Use["BDT"]             = 1; // uses Adaptive Boost
   Use["BDTG"]            = 0; // uses Gradient Boost
   Use["BDTB"]            = 0; // uses Bagging
   Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
   // 
   // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
   Use["RuleFit"]         = 1;
   // ---------------------------------------------------------------
   Use["Plugin"]          = 0;
   Use["Category"]        = 0;
   Use["SVM_Gauss"]       = 0;
   Use["SVM_Poly"]        = 0;
   Use["SVM_Lin"]         = 0;

   std::cout << std::endl;
   std::cout << "==> Start TMVAClassificationApplication" << std::endl;

   // Select methods (don't look at this code - not of interest)
   if (myMethodList != "") {
      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;

      std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
      for (UInt_t i=0; i<mlist.size(); i++) {
         std::string regMethod(mlist[i]);

         if (Use.find(regMethod) == Use.end()) {
            std::cout << "Method \"" << regMethod 
                      << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
               std::cout << it->first << " ";
            }
            std::cout << std::endl;
            return;
         }
         Use[regMethod] = 1;
      }
   }

   // --------------------------------------------------------------------------------------------------

   // --- Create the Reader object

   TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    

   // Create a set of variables and declare them to the reader
   // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
   Float_t met, mT, mT2W;


   reader->AddVariable("met", &met);
   reader->AddVariable("mT", &mT);
   reader->AddVariable("mT2W",&mT2W);
   

   // Spectator variables declared in the training have to be added to the reader, too
/*   Float_t spec1,spec2;
   reader->AddSpectator( "spec1 := var1*2",   &spec1 );
   reader->AddSpectator( "spec2 := var1*3",   &spec2 );

   Float_t Category_cat1, Category_cat2, Category_cat3;
   if (Use["Category"]){
    // Add artificial spectators for distinguishing categories
      reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
      reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
      reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
   }
*/
   // --- Book the MVA methods

   TString dir    = "weights/";
   TString prefix = "TMVAClassification";

   // Book method(s)
   for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
      if (it->second) {
         TString methodName = TString(it->first) + TString(" method");
         TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
         reader->BookMVA( methodName, weightfile ); 
      }
   }
   
   // Book output histograms
   UInt_t nbin = 100;
   TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
   TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
   TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
   TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
   TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);

   if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
   if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
   if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
   if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
   if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
   if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
   if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
   if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
   if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
   if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
   if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
   if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
   if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
   if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
   if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
   if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
   if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
   if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
   if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
   if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
   if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
   if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
   if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
   if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
   if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
   if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
   if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
   if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
   if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
   if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );

   // PDEFoam also returns per-event error, fill in histogram, and also fill significance
   if (Use["PDEFoam"]) {
      histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
      histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
      histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
   }

   // Book example histogram for probability (the other methods are done similarly)
   TH1F *probHistFi(0), *rarityHistFi(0);
   if (Use["Fisher"]) {
      probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
      rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
   }



   TString fname; 
   Double_t event_weight; // events weights are: xsection * efficiency (preselection)

   if (decay_mode == "TT_1Lep") {fname  = "Example_Rootfiles/ttbar_1l/output/ttbar_1l.root"; 		event_weight = 0.153;}
   if (decay_mode == "TT_2Lep") {fname  = "Example_Rootfiles/ttbar_2l/output/ttbar_2l.root"; 		event_weight = 0.129;}
   if (decay_mode == "WJets")   {fname  = "Example_Rootfiles/wjets_all/output/wjets_all.root"; 		event_weight = 0.67;}  
   if (decay_mode == "Others")  {fname  = "Example_Rootfiles/others_all/output/others_all.root"; 	event_weight = 0.106;}

   if (decay_mode == "T2tt_R1") { fname  = "Example_Rootfiles/t2tt_all/R1/output/t2tt_all_R1.root"; 	event_weight = 1592./24845.;}
   if (decay_mode == "T2tt_R2") { fname  = "Example_Rootfiles/t2tt_all/R2/output/t2tt_all_R2.root"; 	event_weight = 1018./24994.;}
   if (decay_mode == "T2tt_R3") { fname  = "Example_Rootfiles/t2tt_all/R3/output/t2tt_all_R3.root"; 	event_weight = 306./25006.;}
   if (decay_mode == "T2tt_R4") { fname  = "Example_Rootfiles/t2tt_all/R4/output/t2tt_all_R4.root"; 	event_weight = 87./25094.;}
   if (decay_mode == "T2tt_R5") { fname  = "Example_Rootfiles/t2tt_all/R5/output/t2tt_all_R5.root"; 	event_weight = 28./25075.;}
   if (decay_mode == "T2tt_R6") { fname  = "Example_Rootfiles/t2tt_all/R6/output/t2tt_all_R6.root"; 	event_weight = 9./24863.;}



   TFile *input = TFile::Open( fname );


   std::cout << "--- TMVAClassification       : Using input file: " << input->GetName() << std::endl;
   
   // --- //Event loop

   // Prepare the event tree
   // - here the variable names have to corresponds to your tree
   // - you can use the same variables as above which is slightly faster,
   //   but of course you can use different ones and copy the values inside the event loop
   //
   std::cout << "--- Select signal sample" << std::endl;
   TTree *theTree     = (TTree*)input->Get("BDTtree");

   Float_t met, mT, mT2W;
   Int_t event;

   theTree->SetBranchAddress("met",   &met);
   theTree->SetBranchAddress("mT",    &mT);
   theTree->SetBranchAddress("mT2W",  &mT2W);
   theTree->SetBranchAddress("event", &event);



   // Efficiency calculator for cut method
   Int_t    nSelCutsGA = 0;
   Double_t effS       = 0.7;

   std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests

   Int_t totevt;

    if (string(decay_mode).find("T2tt") != std::string::npos) totevt = 50000;  // Takes too long (for exercise) to run over all the signal events 
    else totevt = theTree->GetEntries();
 
   std::cout << "--- Processing: " << totevt << " events" << std::endl;
   TStopwatch sw;
   sw.Start();
   for (Long64_t ievt=0; ievt<totevt; ievt++) {

      theTree->GetEntry(ievt);
      if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;

      if ( event%2 == 1 ) continue; // remove all odd numbers from our actual analysis 

    
 
      // --- Return the MVA outputs and fill into histograms

      // this gives you the mva value per event 
      // cout << reader->EvaluateMVA("BDT method") << endl;

      if (Use["CutsGA"]) {
         // Cuts is a special case: give the desired signal efficienciy
         Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
         if (passed) nSelCutsGA++;
      }

      if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
      if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
      if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
      if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
      if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
      if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
      if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
      if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
      if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
      if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
      if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
      if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
      if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
      if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
      if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
      if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
      if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
      if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
      if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
      if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ), event_weight ); 
      //if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           )); 
      if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
      if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
      if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
      if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
      if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
      if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
      if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
      if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
      if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
      if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );

      // Retrieve also per-event error
      if (Use["PDEFoam"]) {
         Double_t val = reader->EvaluateMVA( "PDEFoam method" );
         Double_t err = reader->GetMVAError();
         histPDEFoam   ->Fill( val );
         histPDEFoamErr->Fill( err );         
         if (err>1.e-50) histPDEFoamSig->Fill( val/err );
      }         

      // Retrieve probability instead of MVA output
      if (Use["Fisher"])   {
         probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
         rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
      }
   }

   // Get elapsed time
   sw.Stop();
   std::cout << "--- End of event loop: "; sw.Print();

   // Get efficiency for cuts classifier
   if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
                                << " (for a required signal efficiency of " << effS << ")" << std::endl;

   if (Use["CutsGA"]) {

      // test: retrieve cuts for particular signal efficiency
      // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
      TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;

      if (mcuts) {      
         std::vector<Double_t> cutsMin;
         std::vector<Double_t> cutsMax;
         mcuts->GetCuts( 0.7, cutsMin, cutsMax );
         std::cout << "--- -------------------------------------------------------------" << std::endl;
         std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
         for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
            std::cout << "... Cut: " 
                      << cutsMin[ivar] 
                      << " < \"" 
                      << mcuts->GetInputVar(ivar)
                      << "\" <= " 
                      << cutsMax[ivar] << std::endl;
         }
         std::cout << "--- -------------------------------------------------------------" << std::endl;
      }
   }

   // --- Write histograms

   TFile *target  = new TFile( "results_"+decay_mode+".root","RECREATE" );
   if (Use["Likelihood"   ])   histLk     ->Write();
   if (Use["LikelihoodD"  ])   histLkD    ->Write();
   if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
   if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
   if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
   if (Use["PDERS"        ])   histPD     ->Write();
   if (Use["PDERSD"       ])   histPDD    ->Write();
   if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
   if (Use["KNN"          ])   histKNN    ->Write();
   if (Use["HMatrix"      ])   histHm     ->Write();
   if (Use["Fisher"       ])   histFi     ->Write();
   if (Use["FisherG"      ])   histFiG    ->Write();
   if (Use["BoostedFisher"])   histFiB    ->Write();
   if (Use["LD"           ])   histLD     ->Write();
   if (Use["MLP"          ])   histNn     ->Write();
   if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
   if (Use["MLPBNN"       ])   histNnbnn  ->Write();
   if (Use["CFMlpANN"     ])   histNnC    ->Write();
   if (Use["TMlpANN"      ])   histNnT    ->Write();
   if (Use["BDT"          ])   histBdt    ->Write();
   if (Use["BDTD"         ])   histBdtD   ->Write();
   if (Use["BDTG"         ])   histBdtG   ->Write(); 
   if (Use["RuleFit"      ])   histRf     ->Write();
   if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
   if (Use["SVM_Poly"     ])   histSVMP   ->Write();
   if (Use["SVM_Lin"      ])   histSVML   ->Write();
   if (Use["FDA_MT"       ])   histFDAMT  ->Write();
   if (Use["FDA_GA"       ])   histFDAGA  ->Write();
   if (Use["Category"     ])   histCat    ->Write();
   if (Use["Plugin"       ])   histPBdt   ->Write();

   // Write also error and significance histos
   if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }

   // Write also probability hists
   if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
   target->Close();

   std::cout << "--- Created root file: \"results_"+decay_mode+".root\" containing the MVA output histograms" << std::endl;
  
   delete reader;
    
   std::cout << "==> TMVAClassificationApplication is done!" << endl << std::endl;
}